require "BuildClientOptionParser"
require "BuildComm"
+
+
#option parsing
begin
option = option_parse
exit 0
end
-# if "--os" is not specified, use host os type
+
+# check HOST OS
+if not Utils.check_host_OS() then
+ puts "Error: Your host OS is not supported!"
+ exit 1
+end
+
+def query_system_info(ip, port)
+ # HOST SYSTEM INFO
+ puts "* SYSTEM INFO *"
+ client = BuildCommClient.create( ip, port, nil, 0 )
+ if not client.nil? then
+ client.send "QUERY|SYSTEM"
+ result0 = client.receive_data()
+ if result0.nil? then
+ client.terminate
+ exit 1
+ end
+ result0 = result0[0].split(",").map { |x| x.strip }
+ puts "HOST-OS: #{result0[0]}"
+ puts "MAX_WORKING_JOBS: #{result0[1]}"
+ client.terminate
+ else
+ puts "Connection to server failed!"
+ exit 1
+ end
+
+ # FTP INFO
+ puts ""
+ puts "* FTP *"
+ client = BuildCommClient.create( ip, port )
+ if not client.nil? then
+ client.send "QUERY|FTP"
+ result0 = client.receive_data()
+ if result0.nil? then
+ client.terminate
+ exit 1
+ end
+ result0 = result0[0].split(",").map { |x| x.strip }
+ puts "FTP_ADDR: #{result0[0]}"
+ puts "FTP_USERNAME: #{result0[1]}"
+ client.terminate
+ else
+ puts "Connection to server failed!"
+ exit 1
+ end
+
+ # SUPPORTED OS INFO
+ puts ""
+ puts "* SUPPORTED OS LIST *"
+ client = BuildCommClient.create( ip, port )
+ if not client.nil? then
+ client.send "QUERY|OS"
+ result0 = client.receive_data()
+ if not result0.nil? then
+ for item in result0
+ puts "#{item.strip}"
+ end
+ end
+ client.terminate
+ else
+ puts "Connection to server failed!"
+ exit 1
+ end
+end
+
+
+def query_project_list(ip, port)
+ puts "* PROJECT(S) *"
+ client = BuildCommClient.create( ip, port, nil, 0 )
+ if not client.nil? then
+ client.send "QUERY|PROJECT"
+ result1 = client.receive_data()
+ if result1.nil? then
+ client.terminate
+ exit 1
+ end
+ for item in result1
+ tok = item.split(",").map { |x| x.strip }
+ type = (tok[0]=="G" ? "NORMAL":"REMOTE")
+ printf("%-25s %s\n",tok[1],type)
+ end
+ else
+ puts "Connection to server failed!"
+ exit 1
+ end
+end
+
+
+def query_job_list(ip, port)
+ puts "* JOB(S) *"
+ client = BuildCommClient.create( ip, port, nil, 0 )
+ if not client.nil? then
+ client.send "QUERY|JOB"
+ result1 = client.receive_data()
+ if result1.nil? then
+ client.terminate
+ exit 1
+ end
+ for item in result1
+ tok = item.split(",").map { |x| x.strip }
+ if tok[3].nil? then
+ puts "#{tok[1]} #{tok[0]} #{tok[2]}"
+ else
+ puts "#{tok[1]} #{tok[0]} #{tok[2]} (#{tok[3]})"
+ end
+ end
+ else
+ puts "Connection to server failed!"
+ exit 1
+ end
+end
+
+
+# if "--os" is not specified, use pe
if option[:os].nil? then
- option[:os] = Utils::HOST_OS
-else
- if not option[:os] =~ /^(linux|windows|darwin|all)$/ then
- puts "We have no plan to Buld OS \"#{option[:os]}\" \n please check your option OS "
- exit 1
- end
+ option[:os] = "default"
end
if option[:domain].nil? then
+ puts "Warn: Build server IP address is not specified. 127.0.0.1 will be used"
option[:domain] = "127.0.0.1"
end
-if option[:port].nil? then
- option[:port] = 2222
-end
-
begin
case option[:cmd]
when "build"
puts " <ip>:<port>"
exit 1
end
- client = BuildCommClient.create( result[0], result[1], 0 )
+ client = BuildCommClient.create( result[0], result[1], nil, 0 )
if not client.nil? then
- client.send "BUILD,GIT,#{option[:project]},#{option[:passwd]},#{option[:os]},#{option[:async]}"
+ client.send "BUILD|GIT|#{option[:project]}|#{option[:passwd]}|#{option[:os]}|#{option[:async]}"
client.print_stream
client.terminate
else
puts " <ip>:<port>"
exit 1
end
- client = BuildCommClient.create( result[0], result[1], 0 )
+ client = BuildCommClient.create( result[0], result[1], nil, 0 )
if not client.nil? then
- client.send "RESOLVE,GIT,#{option[:project]},#{option[:passwd]},#{option[:os]},#{option[:async]}"
+ client.send "RESOLVE|GIT|#{option[:project]}|#{option[:passwd]}|#{option[:os]}|#{option[:async]}"
client.print_stream
client.terminate
end
puts " <ip>:<port>"
exit 1
end
- # SYSTEM INFO
- client = BuildCommClient.create( result[0], result[1], 0 )
- if not client.nil? then
- client.send "QUERY,SYSTEM"
- result0 = client.receive_data()
- if result0.nil? then
- client.terminate
- exit(-1)
- end
- result0 = result0[0].split(",").map { |x| x.strip }
- puts "HOST-OS: #{result0[0]}"
- puts "MAX_WORKING_JOBS: #{result0[1]}"
- client.terminate
- else
- puts "Connection to server failed!"
- exit 1
- end
-
- # FTP INFO
- puts ""
- client = BuildCommClient.create( result[0], result[1], 0 )
- if not client.nil? then
- client.send "QUERY,FTP"
- result0 = client.receive_data()
- if result0.nil? then
- client.terminate
- exit(-1)
- end
- puts "* FTP *"
- result0 = result0[0].split(",").map { |x| x.strip }
- puts "FTP_ADDR: #{result0[0]}"
- puts "FTP_USERNAME: #{result0[1]}"
- client.terminate
- else
- puts "Connection to server failed!"
- exit 1
- end
- # PROJECT INFO
+ query_system_info( result[0], result[1] )
puts ""
- client = BuildCommClient.create( result[0], result[1], 0 )
- if not client.nil? then
- client.send "QUERY,PROJECT"
- result1 = client.receive_data()
- if result1.nil? then
- client.terminate
- exit(-1)
- end
- puts "* PROJECT(S) *"
- for item in result1
- tok = item.split(",").map { |x| x.strip }
- type = (tok[0]=="G" ? "NORMAL":"REMOTE")
- printf("%-25s %s\n",tok[1],type)
- end
- else
- puts "Connection to server failed!"
- exit 1
- end
-
- # JOB INFO
+ query_project_list( result[0], result[1])
puts ""
- client = BuildCommClient.create( result[0], result[1], 0 )
- if not client.nil? then
- client.send "QUERY,JOB"
- result1 = client.receive_data()
- if result1.nil? then
- client.terminate
- exit(-1)
- end
- puts "* JOB(S) *"
- for item in result1
- tok = item.split(",").map { |x| x.strip }
- puts "#{tok[1]} #{tok[0]} #{tok[2]}"
- end
- else
- puts "Connection to server failed!"
- exit 1
- end
+ query_job_list( result[0], result[1])
+
+ when "query-system"
+ result = Utils.parse_server_addr(option[:domain])
+ if result.nil? then
+ puts "Server address is incorrect. (#{option[:domain]})"
+ puts "Tune as following format."
+ puts " <ip>:<port>"
+ exit 1
+ end
+
+ query_system_info( result[0], result[1] )
+
+ when "query-project"
+ result = Utils.parse_server_addr(option[:domain])
+ if result.nil? then
+ puts "Server address is incorrect. (#{option[:domain]})"
+ puts "Tune as following format."
+ puts " <ip>:<port>"
+ exit 1
+ end
+
+ query_project_list( result[0], result[1])
+
+ when "query-job"
+ result = Utils.parse_server_addr(option[:domain])
+ if result.nil? then
+ puts "Server address is incorrect. (#{option[:domain]})"
+ puts "Tune as following format."
+ puts " <ip>:<port>"
+ exit 1
+ end
+
+ query_job_list( result[0], result[1] )
when "cancel"
result = Utils.parse_server_addr(option[:domain])
exit 1
end
if not option[:job].nil? then
- client = BuildCommClient.create( result[0], result[1], 0 )
+ client = BuildCommClient.create( result[0], result[1], nil, 0 )
if not client.nil? then
- client.send "CANCEL,#{option[:job]},#{option[:passwd]}"
+ client.send "CANCEL|#{option[:job]}|#{option[:passwd]}"
result1 = client.receive_data()
if result1.nil? then
client.terminate
passwd = ftp_result[3]
# upload
- client = BuildCommClient.create( bs_ip, bs_port, 0 )
+ client = BuildCommClient.create( bs_ip, bs_port, nil, 0 )
if client.nil? then
puts "Can't access server #{bs_ip}:#{bs_port}"
exit(-1)
end
# register
- client = BuildCommClient.create( bs_ip, bs_port, 0 )
+ client = BuildCommClient.create( bs_ip, bs_port, nil, 0 )
if client.nil? then
puts "Can't access server #{bs_ip}:#{bs_port}"
exit(-1)
end
- client.send("REGISTER,BINARY,#{File.basename(option[:package])},#{option[:passwd]}")
+ client.send("REGISTER|BINARY|#{File.basename(option[:package])}|#{option[:passwd]}")
client.print_stream
client.terminate
end
# FTP INFO
- client = BuildCommClient.create( result[0], result[1], 0 )
+ client = BuildCommClient.create( result[0], result[1], nil, 0 )
if client.nil? then
puts "Can't access server #{result[0]}:#{result[1]}"
exit(-1)
end
- client.send "QUERY,FTP"
+ client.send "QUERY|FTP"
result0 = client.receive_data()
if result0.nil? then
client.terminate
passwd = result0[2]
client.terminate
- client = BuildCommClient.create( result[0], result[1], 0 )
+ client = BuildCommClient.create( result[0], result[1], nil, 0 )
if client.nil? then
puts "Can't access server #{result[0]}:#{result[1]}"
exit(-1)
end
# FTP INFO
- client = BuildCommClient.create( result[0], result[1], 0 )
+ client = BuildCommClient.create( result[0], result[1], nil, 0 )
if client.nil? then
puts "Can't access server #{result[0]}:#{result[1]}"
exit(-1)
end
- client.send "QUERY,FTP"
+ client.send "QUERY|FTP"
result0 = client.receive_data()
if result0.nil? then
client.terminate
client.terminate
# download
- client = BuildCommClient.create( result[0], result[1], 0 )
+ client = BuildCommClient.create( result[0], result[1], nil, 0 )
if client.nil? then
puts "Can't access server #{result[0]}:#{result[1]}"
exit(-1)
end
file_name = option[:file]
- client.send("DOWNLOAD,#{file_name}")
+ client.send("DOWNLOAD|#{file_name}")
result = client.receive_file(ip, username, passwd, "./#{file_name}")
client.terminate
if not result then
exit 0
end
+# check HOST OS
+if not Utils.check_host_OS() then
+ puts "Error: Your host OS is not supported!"
+ exit 1
+end
-# if "--os" is not specified, use host os type
+# if "--os" is not specified, set it as default
if option[:os].nil? then
- option[:os] = "linux,windows"
+ option[:os] = "default"
end
begin
BuildServerController.add_binary_project( option[:name], option[:pid],
option[:package], option[:passwd], option[:os] )
end
+ when "add-os"
+ BuildServerController.add_target_os( option[:name], option[:os] )
when "fullbuild"
BuildServerController.build_all_projects( option[:name] )
when "register"
build()
{
echo "build"
+ echo "----> ${TARGET_OS}"
}
# install
install()
{
- BIN_DIR=$SRCDIR/package/dibs.package.${BUILD_TARGET_OS}/data/dev_tools/dibs/
- DOC_DIR=$SRCDIR/package/dibs.package.${BUILD_TARGET_OS}/data/dev_tools/dibs/doc
+ BIN_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/
+ DOC_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/doc
mkdir -p $BIN_DIR
mkdir -p $DOC_DIR
cp -f $SRCDIR/pkg-* $BIN_DIR/
cp -f $SRCDIR/NOTICE $DOC_DIR/
cp -f $SRCDIR/doc/* $DOC_DIR/
}
-
-
-$1
-echo "$1 success"
-
--- /dev/null
+#!/bin/sh -xe
+# clean
+clean()
+{
+ rm -rf $SRCDIR/*.zip
+ rm -rf $SRCDIR/*.tar.gz
+}
+
+# build
+build()
+{
+ echo "build"
+}
+
+# install
+install()
+{
+ BIN_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/
+ DOC_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/doc
+ mkdir -p $BIN_DIR
+ mkdir -p $DOC_DIR
+ cp -f $SRCDIR/pkg-* $BIN_DIR/
+ cp -f $SRCDIR/build-* $BIN_DIR/
+ cp -rf $SRCDIR/src $BIN_DIR/
+ cp -rf $SRCDIR/src $BIN_DIR/
+ cp -f $SRCDIR/AUTHORS $DOC_DIR/
+ cp -f $SRCDIR/LICENSE $DOC_DIR/
+ cp -f $SRCDIR/NOTICE $DOC_DIR/
+ cp -f $SRCDIR/doc/* $DOC_DIR/
+}
--- /dev/null
+#!/bin/sh -xe
+# clean
+clean()
+{
+ rm -rf $SRCDIR/*.zip
+ rm -rf $SRCDIR/*.tar.gz
+}
+
+# build
+build()
+{
+ echo "build"
+}
+
+# install
+install()
+{
+ BIN_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/
+ DOC_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/doc
+ mkdir -p $BIN_DIR
+ mkdir -p $DOC_DIR
+ cp -f $SRCDIR/pkg-* $BIN_DIR/
+ cp -f $SRCDIR/build-* $BIN_DIR/
+ cp -rf $SRCDIR/src $BIN_DIR/
+ cp -rf $SRCDIR/src $BIN_DIR/
+ cp -f $SRCDIR/AUTHORS $DOC_DIR/
+ cp -f $SRCDIR/LICENSE $DOC_DIR/
+ cp -f $SRCDIR/NOTICE $DOC_DIR/
+ cp -f $SRCDIR/doc/* $DOC_DIR/
+}
-Package : dibs
-Version : 0.99.11
-OS : linux
-Build-host-os : linux
Source : dibs
-Maintainer : taejun ha<taejun.ha@samsung.com>, jiil hyoun <jiil.hyoun@samsung.com>, <donghyouk.yang@samsung.com>, donghee yang< donghee.yang@samsung.com >
+Version : 0.99.18
+Maintainer : taejun ha<taejun.ha@samsung.com>, jiil hyoun <jiil.hyoun@samsung.com>, donghyuk yang <donghyouk.yang@samsung.com>, donghee yang <donghee.yang@samsung.com>, sungmin kim <dev.sungmin.kim@samsung.com
+
+Package : dibs
+OS : ubuntu-32, windows-32, windows-64, macos-64
+Build-host-os : ubuntu-32
Description : Distribute Inteligent Build System
Package : dibs
-Version : 0.99.11
-OS : windows
-Build-host-os : linux
-Source : dibs
-Maintainer : taejun ha<taejun.ha@samsung.com>, jiil hyoun <jiil.hyoun@samsung.com>, <donghyouk.yang@samsung.com>, donghee yang< donghee.yang@samsung.com >
+OS : ubuntu-64
+Build-host-os : ubuntu-64
Description : Distribute Inteligent Build System
--- /dev/null
+Include: pkginfo.manifest
+
+Package : dibs
+OS : ubuntu-32, windows-32, macos-64, ubuntu-64, windows-64
+Build-host-os : windows-32, macos-64, ubuntu-64, windows-64
+Description : Distribute Inteligent Build System
#generate server when local package server is not set
+# check HOST OS
+if not Utils.check_host_OS() then
+ puts "Error: Your host OS is not supported!"
+ exit 1
+end
+
# if "--os" is not specified, use host os type
if option[:os].nil? then
option[:os] = Utils::HOST_OS
-else
- if not option[:os] =~ /^(linux|windows|darwin)$/ then
- puts "We have no plan to Buld OS \"#{option[:os]}\" \n please check your option OS "
- exit 1
- end
end
path = Dir.pwd
end
#build project
-if not builder.build( Utils::WORKING_DIR, option[:os], option[:clean], [], false) then
+if not builder.build( Utils::WORKING_DIR, option[:os], option[:clean], [], true) then
puts "Build Failed!"
+ exit 1
else
puts "Build Succeeded!"
+ exit 0
end
#generate server when local package server is not set
+# check HOST OS
+if not Utils.check_host_OS() then
+ puts "Error: Your host OS is not supported!"
+ exit 1
+end
+
begin
builder = Builder.get("default")
rescue
exit 0
end
+# check HOST OS
+if not Utils.check_host_OS() then
+ puts "Error: Your host OS is not supported!"
+ exit 1
+end
#if "--os" is not specfied, use host os type
if option[:os].nil? then
- system_type = `uname -s`
- case system_type.strip
- when "Linux" then
- option[:os] = "linux"
- when /MINGW32.*/ then
- option[:os] = "windows"
- when "Darwin" then
- option[:os] = "darwin"
- else
- raise RuntimeError, "Unknown OS type : #{system_type}"
- end
+ option[:os] = Utils::HOST_OS
end
case option[:cmd]
when "update" then
client = Client.new( option[:url], nil, nil )
- client.update()
+ #client.update()
when "clean" then
client = Client.new( nil, option[:loc], nil )
client.clean(option[:f])
when "download" then
client = Client.new( option[:url], option[:loc], nil )
- if not option[:url].nil? then
- client.update()
- end
+ #if not option[:url].nil? then
+ # client.update()
+ #end
file_loc = client.download( option[:pkg], option[:os], option[:t] )
when "install" then
client = Client.new( option[:url], option[:loc], nil )
- if not option[:url].nil? then
- client.update()
- end
+ #if not option[:url].nil? then
+ # client.update()
+ #end
client.install( option[:pkg], option[:os], option[:t], option[:f] )
when "install-file" then
client = Client.new( nil, option[:loc], nil )
client.uninstall( option[:pkg], option[:t] )
when "upgrade" then
client = Client.new( option[:url], option[:loc], nil )
- if not option[:url].nil? then
- client.update()
- end
+ #if not option[:url].nil? then
+ # client.update()
+ #end
client.upgrade( option[:os], option[:t] )
when "check-upgrade" then
client = Client.new( option[:url], option[:loc], nil )
- if not option[:url].nil? then
- client.update()
- end
+ #if not option[:url].nil? then
+ # client.update()
+ #end
client.check_upgrade( option[:os] )
when "show-rpkg" then
client = Client.new( option[:url], nil, nil )
- if not option[:url].nil? then
- client.update()
- end
+ #if not option[:url].nil? then
+ # client.update()
+ #end
puts client.show_pkg_info( option[:pkg], option[:os] )
when "list-rpkg" then
client = Client.new( option[:url], nil, nil )
- if not option[:url].nil? then
- client.update()
- end
+ #if not option[:url].nil? then
+ # client.update()
+ #end
result = client.show_pkg_list( option[:os] )
if not result.nil? and not result.empty? then
result.each do |i|
exit 0
end
+# check HOST OS
+if not Utils.check_host_OS() then
+ puts "Error: Your host OS is not supported!"
+ exit 1
+end
+
begin
if option[:cmd].eql? "list" then
if option[:id].empty? then
when "create"
server.create( option[:id], option[:dist], option[:url], option[:loc] )
when "register"
- server.register( option[:pkgs], option[:dist], option[:gensnap], option[:test] )
+ server.register( option[:pkgs], option[:dist], option[:gensnap], option[:test], false )
when "gen-snapshot"
server.generate_snapshot( option[:snaps][0], option[:dist], option[:bsnap] )
when "sync"
server.sync( option[:dist], option[:force] )
when "add-dist"
server.add_distribution( option[:dist], option[:url], option[:clone] )
+ when "add-os"
+ server.add_os( option[:dist], option[:os] )
when "remove"
if not option[:force] then
puts "Do you want to really? then input \"YES\""
new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3')
pkg_name = new_name.split(",")[0]
os = new_name.split(",")[2]
-
+
# check file name
if @pkg_name != pkg_name then return nil end
+ # check os name
+ if not @server.supported_os_list.include? os then return nil end
+
# check package info
file_path = "#{@server.incoming_path}/#{filename}"
if not File.exist? file_path then return nil end
if not Utils.extract_a_file(file_path, "pkginfo.manifest", pkginfo_dir) then
return nil
end
- pkginfo =PackageManifest.new("#{pkginfo_dir}/pkginfo.manifest")
+ begin
+ pkginfo =PackageManifest.new("#{pkginfo_dir}/pkginfo.manifest")
+ rescue => e
+ puts e.message
+ return nil
+ end
pkgs = pkginfo.get_target_packages(os)
if pkgs.count != 1 then return nil end
if pkgs[0].package_name != @pkg_name then return nil end
- return RegisterPackageJob.new( "#{@server.incoming_path}/#{filename}", @server )
+ new_job = RegisterPackageJob.new( "#{@server.incoming_path}/#{filename}", self, @server )
+
+ return new_job
end
raise ArgumentError, "Usage: build-cli query -d <server address>"
end
+ when "query-system" then
+ if options[:domain].nil? or options[:domain].empty? then
+ raise ArgumentError, "Usage: build-cli query-system -d <server address>"
+ end
+
+ when "query-project" then
+ if options[:domain].nil? or options[:domain].empty? then
+ raise ArgumentError, "Usage: build-cli query-project -d <server address>"
+ end
+
+ when "query-job" then
+ if options[:domain].nil? or options[:domain].empty? then
+ raise ArgumentError, "Usage: build-cli query-job -d <server address>"
+ end
+
when "cancel" then
if options[:job].nil? or options[:job].empty? or
options[:domain].nil? or options[:domain].empty? then
banner = "Requiest service to build-server command-line tool." + "\n" \
+ "\n" + "Usage: build-cli <SUBCOMMAND> [OPTS] or build-cli -h" + "\n" \
+ "\n" + "Subcommands:" + "\n" \
+ + "\t" + "build Build and create package." + "\n" \
+ "\t" + "resolve Request change to resolve-status for build-conflict." + "\n" \
+ "\t" + "query Query information about build-server." + "\n" \
+ + "\t" + "query-system Query system information about build-server." + "\n" \
+ + "\t" + "query-project Query project information about build-server." + "\n" \
+ + "\t" + "query-job Query job information about build-server." + "\n" \
+ "\t" + "cancel Cancel a building project." + "\n" \
+ "\t" + "register Register the package to the build-server." + "\n" \
+ "\n" + "Subcommand usage:" + "\n" \
+ "\t" + "build-cli build -N <project name> -d <server address> [-o <os>] [-w <password>] [--async]" + "\n" \
+ "\t" + "build-cli resolve -N <project name> -d <server address> [-o <os>] [-w <password>] [--async]" + "\n" \
+ "\t" + "build-cli query -d <server address>" + "\n" \
+ + "\t" + "build-cli query-system -d <server address>" + "\n" \
+ + "\t" + "build-cli query-project -d <server address>" + "\n" \
+ + "\t" + "build-cli query-job -d <server address>" + "\n" \
+ "\t" + "build-cli cancel -j <job number> -d <server address> [-w <password>] " + "\n" \
+ "\t" + "build-cli register -P <file name> -d <server address> -t <ftp server url> [-w <password>] " + "\n" \
+ "\n" + "Options:" + "\n"
optparse = OptionParser.new(nil, 32, ' '*8) do|opts|
+
# Set a banner, displayed at the top
# of the help screen.
end
options[:os] = nil
- opts.on( '-o', '--os <operating system>', 'target operating system: linux/windows/darwin' ) do|os|
+ opts.on( '-o', '--os <operating system>', 'target operating system: ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64' ) do |os|
options[:os] = os
end
cmd = ARGV[0]
if cmd.eql? "build" or cmd.eql? "resolve" or
- cmd.eql? "query" or cmd.eql? "cancel" or
+ cmd.eql? "query" or cmd.eql? "query-system" or
+ cmd.eql? "query-project" or cmd.eql? "query-job" or
+ cmd.eql? "cancel" or
cmd.eql? "register" or
cmd =~ /(-v)|(--version)/ or
cmd =~ /(help)|(-h)|(--help)/ then
require "fileTransfer"
require "net/ftp"
+ATTEMPTS = ["first", "second", "third"]
+
class BuildCommServer
- VERSION = "1.3.0"
+ VERSION = "1.4.0"
- def initialize(port, log, ftp_url=nil)
- @port = port
- # checking port is available
- if port_open? @port then
- raise "Port \"#{@port}\" is already in use."
- end
+ private_class_method :new
+ def initialize(port, log, ftp_url)
+ @port = port
@log = log
@ftp_url = ftp_url
@tcp_server = TCPServer.open( port )
end
+ def self.create(port, log, ftp_url=nil)
+ # checking port is available
+ if port_open? port then
+ raise "Port \"#{@port}\" is already in use."
+ end
+
+ if log.nil? then
+ log = Log.new(nil)
+ end
+
+ return new(port, log, ftp_url)
+ end
+
# wait for connection and handle request
def wait_for_connection(quit_loop)
begin
if not File.exist? src_file then
- puts "[E] \"#{src_file}\" file does not exist"
+ @log.error "\"#{src_file}\" file does not exist"
req.puts "ERROR"
return false
end
req.puts "READY"
+ @log.info "Ready to upload file"
while l = req.gets()
tok = l.split(",").map { |x| x.strip }
cmd = tok[0].strip
- puts "[I] Received \"#{cmd}\" message.."
if cmd == "FTP" then
if tok.count < 5 then
- puts "[E] Received wrong REQ : #{l.strip}"
+ @log.error "Server received wrong REQ : #{l.strip}"
req.puts "ERROR"
return false
end
port = tok[2].strip
username = tok[3].strip
passwd = tok[4].strip
- puts "[I] FTP info from client : [#{ip}, #{port}, #{username}, #{passwd}]"
+ @log.info "Server received ftp server infomation from client : [#{ip}, #{port}]"
else
url_contents = Utils.parse_ftpserver_url(@ftp_url)
ip = url_contents[0]
end
# upload to ftp server
- ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file)
+ FileTransfer.set_logger(@log)
+ ftp_filepath = nil
+ for attempt in ATTEMPTS
+ ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file)
+ if !ftp_filepath.nil? then break;
+ else @log.info "Server is the #{attempt} upload attempt fails" end
+ end
+ if ftp_filepath.nil? then
+ req.puts "ERROR"
+ return false
+ else @log.info "Server is the #{attempt} successful attempt to upload file: [#{File.basename(src_file)}]" end
req.puts "UPLOADED,#{ftp_filepath}"
- puts "[I] Uploaded file to FTP server: [#{File.basename(src_file)}]"
elsif cmd == "SUCC" then
+ @log.info "Client downloaded file successfully"
FileTransfer.cleandir(ip, port, username, passwd, ftp_filepath)
- puts "[I] Clean temporary dir on FTP server: #{ftp_filepath}"
+ @log.info "Cleaned temporary dir on FTP server: #{ftp_filepath}"
break
- end
+ elsif cmd == "ERROR" then
+ @log.error "Client failed to download file"
+ return false
+ end
end
rescue => e
- puts "[E] Connection closed"
- raise e
+ puts "[BuildCommServer] Exception"
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ return false
end
return true
while l = req.gets()
tok = l.split(",").map { |x| x.strip }
cmd = tok[0].strip
- puts "[I] Received \"#{cmd}\" message.."
if cmd == "UPLOADED" then
+ @log.info "Client uploaded file to ftp server successful"
if tok.count < 6 then
- puts "[E] Received wrong REQ : #{l.strip}"
+ @log.error "Server received wrong REQ : #{l.strip}"
req.puts "ERROR"
return false
end
port = tok[2].strip
username = tok[4].strip
passwd = tok[5].strip
- puts "[I] FTP info from client: [#{ip}, #{port}, #{username}, #{passwd}]"
+ @log.info "Client sent ftp server infomations [#{ip}, #{port}]"
else
url_contents = Utils.parse_ftpserver_url(@ftp_url)
ip = url_contents[0]
end
# download from ftp server
- FileTransfer.getfile(ip, port, username, passwd, filepath, dst_file)
- puts "[I] Received file from FTP server sucessfully.."
+ FileTransfer.set_logger(@log)
+ dst_filepath = nil
+ for attempt in ATTEMPTS
+ dst_filepath = FileTransfer.getfile(ip, port, username, passwd, filepath, dst_file)
+ if not dst_filepath.nil? then break
+ else "Server is the #{attempt} download attempt fails" end
+ end
+ if dst_filepath.nil? then
+ req.puts "ERROR"
+ return false
+ else @log.info " Server is the #{attempt} successful attempt to download" end
req.puts "SUCC"
break
- end
- end
+ elsif cmd == "ERROR" then
+ @log.error "Client failed to upload the file"
+ return false
+ end
+ end
rescue => e
- puts "[E] Connectio is closed"
- req.puts "ERROR"
- raise e
+ puts "[BuildCommServer] Exception"
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ return false
end
return true
end
end
- private
- def port_open?( port )
+ def self.port_open?( port )
Timeout::timeout(1) do
begin
TCPSocket.new("127.0.0.1",port).close
class BuildCommClient
- VERSION = "1.3.0"
+ VERSION = "1.4.0"
private_class_method :new
- def initialize(socket)
+ def initialize(socket, log)
+ @log = log
@socket = socket
end
# create
# if sec 0 or nil then not set timeout. it's timeout spec
- def self.create(ip, port, sec = 5)
+ def self.create(ip, port, log = nil, sec = 5)
# open socket
socket = nil
begin
if socket.nil? then
return nil
end
+
+ if log.nil? then
+ log = Log.new(nil)
+ end
- return new(socket)
+ return new(socket, log)
end
begin
l = @socket.gets()
if l.nil? then
- puts "[E] Connection refused"
+ @log.error "[BuildCommClient] Connection refused"
return false
end
# check protocol
if not protocol_matched? l.strip then
- puts "[E] Comm. Protocol version is mismatched! #{VERSION}"
+ @log.error "[BuildCommClient] Comm. Protocol version is mismatched! #{VERSION}"
return false
end
+ FileTransfer.set_logger(@log)
+
# 1. If "READY" is received, upload src file to FTP server
# After uploading it, send "UPLOADED,ip,file_path,username,passwd"
# 2. If "SUCC" is received, remove the file on FTP server
while line = @socket.gets()
- puts "[I] Recevied \"#{line.strip}\" message.."
if line.strip == "READY" then
- ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file)
+ @log.info "Server is ready to receive file"
+ ftp_filepath = nil
+ for attempt in ATTEMPTS
+ ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file)
+ if !ftp_filepath.nil? then break;
+ else @log.info "Client is the #{attempt} upload attempt fails" end
+ end
+ if ftp_filepath.nil? then
+ send "ERROR"
+ return false
+ else @log.info "Client is the #{attempt} successful attempt to upload file" end
send "UPLOADED,#{ip},#{port},#{ftp_filepath},#{username},#{passwd}"
- puts "[I] Uploaded file to FTP server : #{File.basename(src_file)}"
elsif line.strip == "SUCC" then
+ @log.info "Server downloaded file sucessfully"
FileTransfer.cleandir(ip, port, username, passwd, ftp_filepath)
- puts "[I] Clean FTP temporary dir : #{ftp_filepath}"
+ @log.info "Client cleaned temporary dir on ftp server: #{ftp_filepath}"
elsif line.strip == "ERROR" then
- puts "[E] FTP failed to put file"
+ @log.error "Server failed to download the file. Please check server log"
return false
elsif line.strip == "=END" then
break
end
end
rescue => e
- puts "[E] Connection closed"
- raise e
+ puts "[BuildCommClient] Exception"
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ return false
end
return true
l = @socket.gets()
if l.nil? then
- puts "[E] Connection refused"
+ @log.error "[BuildCommClient] Connection refused"
return false
end
# check protocol
if not protocol_matched? l.strip then
- puts "[E] Comm. Protocol version is mismatched! #{VERSION}"
+ @log.error "[BuildCommClient] Comm. Protocol version is mismatched! #{VERSION}"
return false
end
+ FileTransfer.set_logger(@log)
+
# 1. If "READY" is received, send "FTP,ip,port,username,passwd"
# 2. if "UPLOADED,ftp_file_path" is received,
# Download the file
# 3. If "SUCC" is received, remove the file on FTP server
while line = @socket.gets()
cmd = line.split(",")[0].strip
- puts "[I] Received \"#{cmd}\" message"
+ #@log.info "[BuildCommClient] Received \"#{cmd}\" message from BuildCommServer"
if cmd == "READY" then
send "FTP,#{ip},#{port},#{username},#{passwd}"
- puts "[I] Send FTP info to server : #{ip}, #{port}, #{username}, #{passwd}"
+ @log.info "Client sent ftp server infomation to server : [#{ip}, #{port}]"
elsif cmd == "UPLOADED" then
tok = line.split(",")
if tok.length < 2 then
- puts "[ERROR] Received wrong REQ : #{line.strip}"
+ @log.error "Client received wrong REQ : #{line.strip}"
return false
end
ftp_filepath = tok[1].strip
- FileTransfer.getfile(ip, port, username, passwd, ftp_filepath, dst_file)
+ @log.info "Server uploaded file sucessfully"
+ FileTransfer.set_logger(@log)
+ dst_filepath = nil
+ for attempt in ATTEMPTS
+ dst_filepath = FileTransfer.getfile(ip, port, username, passwd, ftp_filepath, dst_file)
+ if not dst_filepath.nil? then break
+ else "Client is the #{attempt} download attempt fails" end
+ end
+ if dst_filepath.nil? then
+ send "ERROR"
+ return false
+ else @log.info "Client is the #{attempt} successful attempt to download" end
send "SUCC"
- puts "[I] Received file from FTP server sucessfully.."
elsif cmd == "ERROR" then
- puts "[E] Failed to receive file from server"
+ @log.error "Server failed to upload file. Check server log"
return false
elsif cmd == "=END" then
break
end
end
- rescue
- puts "[E] Connection closed"
- return false
+ rescue => e
+ puts "[BuildCommServer] Exception"
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ return false
end
return true
require "BuildServer.rb"
require "JobLog.rb"
require "mail.rb"
+require "utils.rb"
+require "ReverseBuildChecker.rb"
class BuildJob
attr_accessor :status, :pkginfo, :log, :source_path
attr_accessor :pkgsvr_client, :thread
attr_accessor :rev_fail_projects, :rev_success_jobs
- attr_accessor :pending_ancestor, :build_dep_prjs
- attr_accessor :is_fullbuild_job
+ attr_accessor :pending_ancestor, :cancel_state
# initialize
def initialize (id, project, os, server)
@type = "BUILD"
@status = "JUST_CREATED"
+ @cancel_state = "NONE"
@resolve = false
@host_os = Utils::HOST_OS
@pkgserver_url = @server.pkgserver_url
#for cancel operation
@pending_ancestor = nil # for cancel pending job
@remote_id = nil # for cancel remote_working job
- @build_dep_prjs = [] # for cacnel pending job
+ @build_dep_prjs = nil # for cacnel pending job
# for resolving build-break
- @rev_fail_projects = []
- @rev_success_jobs = []
+ @rev_fail_projects = [] # list of [project,os]
+ @rev_success_jobs = [] # list of job
# remote build
@remote_server = nil
@is_rev_build_check_job = false
@is_remote_job = false
@is_internal_job = false
- @is_fullbuild_job = false
@external_pkgs = []
+ @force_rebuild = false
end
@buildroot_dir = "#{@server.path}/jobs/#{@parent.id}/buildroot"
end
+ # get parent
+ def get_parent_job()
+ return @parent
+ end
+
+
+ def is_sub_job?
+ return (not @parent.nil?) ? true : false
+ end
+
+
+ def get_sub_jobs
+ return []
+ end
+
# set reverse build check job
def set_rev_build_check_job( parent )
end
+ # set force rebuild
+ # This make project to build
+ # even though there is a package of same version on pkg-server
+ def set_force_rebuild(value)
+ @force_rebuild = value
+ end
+
+
# set logger
def set_logger( logger )
@log = logger
#cancel
def cancel()
+ # cancel log print
+ if not @log.nil? then
+ @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER)
+ end
+
case @status
- when "WORKING" then
- @status = "CANCELED"
- @thread.terminate
- if not @log.nil? then
- @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER)
- end
- terminate
when "REMOTE_WORKING" then
- @status = "CANCELED"
- client = BuildCommClient.create( @server.ip, @server.port )
+ client = BuildCommClient.create( @server.ip, @server.port, @log )
if not client.nil? then
- client.send "CANCEL,#{@remote_id}"
+ client.send "CANCEL|#{@remote_id}|#{self.get_project.passwd}"
result1 = client.receive_data()
if result1.nil? then
- client.terminate
- exit(-1)
+ @log.info( "cancel operation failed [connection error] !!", Log::LV_USER)
+ else
+ @log.info(result1, Log::LV_USER)
end
- end
- when "WAITING" , "INITIALIZING" , "JUST_CREATED" then
- @status = "CANCELED"
- if not @log.nil? then
- @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER)
+ client.terminate
end
when "PENDING" then
- @server.cancel_lock.synchronize{
- @thread.terminate
- @status = "CANCELED"
- }
if @pending_ancestor.nil? then
#resolve pending job
- pending_descendants = @server.jobmgr.jobs.select{|j| (not j.pending_ancestor.nil?) and "#{j.pending_ancestor.id}" == "#{@id}"}
+ pending_descendants = @server.jobmgr.jobs.select do |j|
+ (not j.pending_ancestor.nil?) and "#{j.pending_ancestor.id}" == "#{@id}"
+ end
pending_descendants.each do |pd|
- pd.cancel
+ pd.cancel_state = "INIT"
end
else
- @pending_ancestor.rev_success_jobs.delete self if @pending_ancestor.rev_success_jobs.include? self
- @pending_ancestor.rev_fail_projects.push @project if not @pending_ancestor.rev_fail_projects.include? @project
- pending_descendants = @server.jobmgr.jobs.select{|j| (not j.pending_ancestor.nil?) and "#{j.pending_ancestor.id}" == "#{@pending_ancestor.id}"}
- pending_descendants.select{|j| j.build_dep_prjs.include? @project}.each do |d|
- @pending_ancestor.rev_success_jobs.delete d if @pending_ancestor.rev_success_jobs.include? d
- @pending_ancestor.rev_fail_projects.push d.get_project if not @pending_ancestor.rev_fail_projects.include? d.get_project
- d.thread.terminate
+ # remove myself from success job if exist
+ # and add myself into rev_fail_project list if not exist
+ @pending_ancestor.remove_rev_success_job(self)
+ @pending_ancestor.add_rev_fail_project( @project, @os )
+
+ # remove the project that depends on me if exist
+ # and add it into rev_fail_project list if not exist
+ p_sub_jobs = @server.jobmgr.jobs.select do |j|
+ ( not j.pending_ancestor.nil? and
+ "#{j.pending_ancestor.id}" == "#{@pending_ancestor.id}" and
+ j.is_build_dependent_project(@project, @os) )
+ end
+ p_sub_jobs.each do |d|
+ @pending_ancestor.remove_rev_success_job(d)
+ @pending_ancestor.add_rev_fail_project( d.get_project, d.os )
+
+ if not d.thread.nil? then d.thread.terminate end
d.status = "WAITING"
end
end
- if not @log.nil? then
- @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER)
- end
- terminate
+ when "WORKING", "WAITING" , "INITIALIZING" , "JUST_CREATED" then
+ #just log
else # ERROR | FINISHED | RESOLVED
#do noting
end
# check building is possible
def can_be_built_on?(host_os)
+ if @pkginfo.nil? then return false end
+
for pkg in @pkginfo.packages
- if pkg.os == @os and pkg.build_host_os.include? host_os then
+ if pkg.os_list.include? @os and pkg.build_host_os.include? host_os then
return true
end
end
-
return false
end
+ def get_packages()
+ return @pkginfo.packages
+ end
+
+
+ def get_build_dependencies(target_os)
+ return @pkginfo.get_build_dependencies(target_os)
+ end
+
+
+ def get_source_dependencies(target_os,host_os)
+ return @pkginfo.get_source_dependencies(target_os,host_os)
+ end
+
+
+ def is_compatible_with?(o)
+ if type != o.type then return false end
+
+ my_project = get_project()
+ other_project = o.get_project()
+
+ # check project name
+ if my_project.nil? or other_project.nil? or
+ my_project.name != other_project.name then
+ return false
+ end
+
+ # check version
+ if @pkginfo.nil? or o.pkginfo.nil? or
+ not (Version.new(@pkginfo.get_version()) == Version.new(o.pkginfo.get_version())) then
+ return false
+ end
+
+ # check compat os
+ compatable_packages = {} # | packag, os_list |
+ @pkginfo.get_target_packages(@os).each do |p|
+ if not p.os_list.include?(o.os) then return false end
+ end
+
+ return true
+ end
+
+
def has_build_dependency?(other_job)
if has_same_packages?(other_job) or
def has_same_packages?( wjob )
- # if job type is different, always true
- if @type != wjob.type then return true end
# same package must have same os
if not @os.eql? wjob.os then
end
# check package name
- for pkg in @pkginfo.packages
- for wpkg in wjob.pkginfo.packages
+ for pkg in get_packages()
+ for wpkg in wjob.get_packages()
if pkg.package_name == wpkg.package_name then
#puts "Removed from candiated... A == B"
return true
def does_depend_on?( wjob )
- # if job type is different, always true
- if @type != wjob.type then return true end
# compare build dependency
- for dep in @pkginfo.get_build_dependencies(@os)
- for wpkg in wjob.pkginfo.packages
+ for dep in get_build_dependencies(@os)
+ for wpkg in wjob.get_packages()
# dep packages of my job must have same name and target os
# with packages in working job
if dep.package_name == wpkg.package_name and
end
end
- # if both are full build jobs, install dependency must be checked
- if @is_fullbuild_job and wjob.is_fullbuild_job and
- @os == wjob.os then
-
- for dep in @pkginfo.get_install_dependencies(@os)
- for wpkg in wjob.pkginfo.packages
- if dep.package_name == wpkg.package_name then
- return true
- end
- end
- end
- end
-
return false
end
def does_depended_by?( wjob )
- # if job type is different, always true
- if @type != wjob.type then return true end
- for pkg in @pkginfo.packages
- for dep in wjob.pkginfo.get_build_dependencies(wjob.os)
+ for pkg in get_packages()
+ for dep in wjob.get_build_dependencies(wjob.os)
# dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == pkg.package_name and
end
end
+
+ # remove job from reverse success job
+ def remove_rev_success_job( job )
+ @rev_success_jobs.delete job if @rev_success_jobs.include? job
+ end
+
+
+ # check [project,os] is in reverse fail project list
+ def is_rev_fail_project( prj, os )
+ # check the project already exist
+ for p in @rev_fail_projects
+ if p[0] == prj and p[1] == os then
+ return true
+ end
+ end
+
+ return false
+ end
+
+
+ # add [project,os] to reverse fail project list
+ def add_rev_fail_project( prj, os )
+ # check the project already exist
+ for p in @rev_fail_projects
+ if p[0] == prj and p[1] == os then
+ return
+ end
+ end
+ # if not, add it
+ @rev_fail_projects.push [prj,os]
+ end
+
+
+ # remove [project,os] from reverse fail project list
+ def remove_rev_fail_project( prj, os )
+ remove_list = []
+
+ # check project and os name
+ for p in @rev_fail_projects
+ if p[0] == prj and p[1] == os then
+ remove_list.push p
+ end
+ end
+
+ # remove
+ for r in remove_list
+ @rev_fail_projects.delete r
+ end
+ end
+
+
+ # get project that my job is dependent on
+ def get_build_dependent_projects()
+ if @build_dep_prjs.nil? then
+ deps = @pkginfo.get_build_dependencies(@os)
+ pkgs = deps.map{|x|
+ # if "os" is not specified, use my "os"
+ if x.target_os_list.nil? or x.target_os_list.empty? then
+ os = @os
+ else
+ os = x.target_os_list[0]
+ end
+
+ # package as item
+ @pkgsvr_client.get_pkg_from_list(x.package_name, os)
+ }
+ prjs = @server.prjmgr.get_projects_from_pkgs(pkgs)
+ @build_dep_prjs = prjs
+ end
+
+ return @build_dep_prjs
+ end
+
+
+ # check if the project is my dependent project
+ def is_build_dependent_project( prj, os )
+ dep_list = get_build_dependent_projects()
+ for dep in dep_list
+ if dep[0] == prj and dep[1] == os then
+ return true
+ end
+ end
+
+ return false
+ end
+
#
# PROTECTED METHODS
#
@log.info( "New Job #{@id} is started", Log::LV_USER)
# checking build dependency
- if not @is_rev_build_check_job and not @is_remote_job and
- not @is_fullbuild_job and
+ if not @is_remote_job and
not check_build_dependency() then
@status = "ERROR"
return false
# clean build
if not build() then
@status = "ERROR"
- return
+ return false
end
# upload
- if not @is_rev_build_check_job and not @is_internal_job and
+ if not @is_rev_build_check_job and not @is_internal_job and
+ @parent.nil? and
not upload() then
@status = "ERROR"
- return
+ return false
end
# copy result files to outgoing path
if @is_internal_job then
copy_result_files(@server.outgoing_path)
- elsif @is_fullbuild_job then
+ elsif not @parent.nil? and not @is_rev_build_check_job then
copy_result_files(@parent.source_path)
end
# INFO. don't change this string
@log.info( "Job is completed!", Log::LV_USER)
@status = "FINISHED"
+ return true
end
# check if local package version is greater than server
- def check_package_version()
+ def check_package_version( source_info )
@log.info( "Checking package version ...", Log::LV_USER)
- # package update
- @pkgsvr_client.update
+ # check if version is same and source_info is different
+ ver_local = @pkginfo.packages[0].version
+ old_source_info = @project.get_source_info( ver_local )
+ if not old_source_info.nil? and old_source_info != source_info then
+ @log.error( "Source code has been changed without increasing version!", Log::LV_USER)
+ @log.error( " * Version : #{ver_local}", Log::LV_USER)
+ @log.error( " * Before : #{old_source_info}", Log::LV_USER)
+ @log.error( " * Current : #{source_info}", Log::LV_USER)
+
+ return false
+ end
+ # compare with package version in package server
for pkg in @pkginfo.packages
- ver_local = pkg.version
- #ver_svr = @pkgsvr_client.get_package_version( pkg.package_name, @os )
- ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version")
- if not ver_svr.nil? and Version.new(ver_local) <= Version.new(ver_svr) then
- @log.error( "Version must be increased : #{ver_local} <= #{ver_svr}", Log::LV_USER)
- return false
+ # check all supported os
+ for os in @server.supported_os_list
+ ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version")
+ # ignore if package does not exist
+ if ver_svr.nil? then next end
+
+ # compare version
+ if Version.new(ver_local) < Version.new(ver_svr) or
+ ( not @force_rebuild and Version.new(ver_local) == Version.new(ver_svr) ) then
+ @log.error( "Version must be increased : #{ver_local} <= #{ver_svr}", Log::LV_USER)
+ return false
+ end
end
end
# build dependency version
+ # make sure that package server has all dependency packages of job
def check_build_dependency()
@log.info( "Checking build dependency ...", Log::LV_USER)
@pkgsvr_client.update
for dep in @pkginfo.get_build_dependencies( @os )
- #ver_svr = @pkgsvr_client.get_package_version( dep.package_name, @os )
- if dep.target_os_list.count != 0 then
- dep_target_os = dep.target_os_list[0]
- else
- dep_target_os = @os
+ # if parent exist, search parent source path first
+ # if not found, check package server
+ ver_svr = nil
+ if not @parent.nil? then
+ local_pkg = get_local_package_of_dependency( dep, @parent.source_path )
+ if not local_pkg.nil? then
+ ver_svr = Utils.get_version_from_package_file( local_pkg )
+ else
+ ver_svr = nil
+ end
end
- ver_svr = @pkgsvr_client.get_attr_from_pkg( dep.package_name, dep_target_os, "version")
+ if not ver_svr.nil? then next end
- if ver_svr.nil?
+ if not remote_package_of_dependency_exist?(dep) then
@log.error( "The package \"#{dep.package_name}\" for build-dependency is not found", Log::LV_USER)
return false
end
-
- if not dep.match? ver_svr
- @log.error( "Version for build-dependency in not matched : server version => #{ver_svr}", Log::LV_USER)
- return false
- end
end
return true
# this process must be skip if it is sub-job
if not @is_rev_build_check_job and not @is_internal_job then
@server.cancel_lock.synchronize{
- @pending_ancestor = get_pending_job()
+ @pending_ancestor = get_pending_ancestor_job()
}
end
# return pending job that wait for me
- def get_pending_job()
+ def get_pending_ancestor_job()
@server.jobmgr.get_pending_jobs.each do |job|
- if job.rev_fail_projects.include? @project then
+ if job.is_rev_fail_project(@project,@os) then
return job
end
end
end
+ def check_compatable_packages
+ compatable_packages = {} # | packag, os_list |
+ @pkginfo.get_target_packages(@os).each do |p|
+ # if package has only os then must build
+ if p.os_list.count > 1 then
+ compat_found = false
+ p.os_list.each do |o|
+ #check other package already in package server
+ ver_svr = @pkgsvr_client.get_attr_from_pkg( p.package_name, o, "version")
+ if not ver_svr.nil? and p.version.eql? ver_svr then
+ # get package file name
+ if compatable_packages[p.package_name].nil? then
+ compatable_packages[p.package_name] = [o]
+ else
+ compatable_packages[p.package_name].push o
+ end
+ compat_found = true
+ end
+ end
+ if not compat_found then return {} end
+ else
+ return {}
+ end
+ end
+ return compatable_packages
+ end
+
+
def build_normal()
@log.info( "Started to build this job...", Log::LV_USER)
builder.log.close
builder.log = @log
- # if sub job(reverse-build), install parent-pkgs and not clean
+ # if sub job, install dependent packages of parent-pkgs and not clean
use_clean = true
local_pkgs = []
local_pkgs += @external_pkgs
- if @is_rev_build_check_job and not @parent.nil? then
- use_clean = false
- # get local packages to install
- src_path = @parent.source_path
- ver = @parent.pkginfo.get_version()
- for pkg in @parent.pkginfo.get_target_packages(@os)
- local_pkgs.push "#{src_path}/#{pkg.package_name}_#{ver}_#{@os}.zip"
- end
- end
- if @is_fullbuild_job and not @parent.nil? then
+ if not @parent.nil? then
use_clean = false
# get local packages to install
src_path = @parent.source_path
deps = @pkginfo.get_build_dependencies(@os)
for dep in deps
- binpkgs = Dir.glob("#{src_path}/#{dep.package_name}_*_#{@os}.zip")
- if binpkgs.count > 0 then
- local_pkgs.push binpkgs[0]
- end
+ pkg = get_local_package_of_dependency( dep, src_path )
+ if not pkg.nil? then local_pkgs.push pkg end
end
end
- # build
- if @is_remote_job then
- result = builder.build(@project.name, @project.passwd, @source_path, @os,
- use_clean, @is_rev_build_check_job, @git_commit, local_pkgs)
+ #compatable os support
+ comp_pkgs = check_compatable_packages
+ if comp_pkgs.size > 0 and not @is_rev_build_check_job then
+ # bring package from server for reverse check
+ comp_pkgs.each do |pkg_name,os_list|
+ loc = @pkgsvr_client.download(pkg_name, os_list[0], false)
+ loc.each do |location|
+ ext = File.extname(location)
+ base_package_name= File.basename( location, "#{os_list[0]}#{ext}" )
+ FileUtils.mv location, "#{@source_path}/#{base_package_name}#{@os}#{ext}"
+ end
+ end
else
- result = builder.build(@source_path, @os, use_clean, local_pkgs, false )
+ # build
+ if @is_remote_job then
+ result = builder.build(@project.name, @project.passwd, @source_path, @os,
+ use_clean, @is_rev_build_check_job, @git_commit, local_pkgs)
+ else
+ result = builder.build(@source_path, @os, use_clean, local_pkgs, false )
+ end
+ if not result then
+ @log.error( "Building job failed", Log::LV_USER)
+ return false
+ end
end
- if not result then
- @log.error( "Building job failed", Log::LV_USER)
- return false
- end
# check reverse dependecy if not sub job
if not @is_rev_build_check_job and not @is_internal_job and
- not check_reverse_build( [], true ).empty? then
+ not ReverseBuildChecker.check( self, true ).empty? then
@log.error( "Reverse-build-check failed!" )
return false
end
end
rev_pkgs.uniq!
rev_projects = @server.prjmgr.get_projects_from_pkgs(rev_pkgs)
- rev_projects -= ignored_projects
+ for ip in ignored_projects
+ for rp in rev_projects
+ if rp[0] == ip[0] and rp[1] == ip[1] then
+ rev_projects.delete rp
+ end
+ end
+ end
+
+ # create reverse build job
+ rev_build_jobs = []
+ for p in rev_projects
+ prj = p[0]
+ os = p[1]
+ version = p[2]
- # build rev-dep project as sub-job
- for prj in rev_projects
+ # check project type
if prj.type != "GIT" then next end
- for os in prj.os_list
- # check version
- version = nil
- for pkg in rev_pkgs
- if os != pkg.os then next end
- if prj.include_package?(pkg.package_name, pkg.version, os) then
- version = pkg.version
+
+ # if this is sub job, the job exists in parent job must be ignored
+ if is_sub_job? then
+ job_found = false
+ for job in @parent.get_sub_jobs
+ sprj = job.get_project()
+ if sprj.name == prj.name and job.os == os then
+ job_found = true
break
end
end
- if version.nil? then next end
- # create sub jobs for checking
- new_job = prj.create_new_job_from_version(os, version)
- new_job.set_rev_build_check_job(self)
- @log.info( " * Checking reverse-build ... #{prj.name}(#{new_job.id})", Log::LV_USER)
- result = new_job.init()
+ if job_found then next end
+ end
+
+ # create job
+ new_job = prj.create_new_job_from_version(os, version)
+ new_job.set_rev_build_check_job(self)
+
+ rev_build_jobs.push new_job
+ end
+
+ # reverse build
+ if rev_build_jobs.count > 0 then
+ rev_prjs_txt = rev_build_jobs.map {|j| "#{j.get_project().name}(#{j.os})"}.join(", ")
+ @log.info( " * Will check reverse-build for projects: #{rev_prjs_txt}", Log::LV_USER)
+ end
+ for new_job in rev_build_jobs
+ @log.info( " * Checking reverse-build ... #{new_job.get_project().name}(#{new_job.id})", Log::LV_USER)
+ # job init
+ result = new_job.init()
+ # if init is succeeded!, try to execute
+ if result then
+ # check available server
rserver = @server.get_available_server( new_job )
if rserver != nil and rserver != @server then
new_job.set_remote_job( rserver )
end
- if result then
- new_job.execute(true)
- if new_job.status == "ERROR" then result = false end
- end
+ # execute
+ new_job.execute(true)
+ if new_job.status == "ERROR" then result = false end
+ end
- # check result
- if not result then
- failed_projects.push prj
- if exit_on_error then
- return failed_projects
- end
+ # check result
+ if not result then
+ failed_projects.push [new_job.get_project(), new_job.os]
+ if exit_on_error then
+ return failed_projects
end
end
end
end
# check reverse dependecy
- @rev_fail_projects = check_reverse_build([], false)
+ @rev_fail_projects = ReverseBuildChecker.check(self, false)
if @rev_fail_projects.empty? then
# if no problem?, it OK
return true
# pending
@status = "PENDING"
@log.info( "Entered the PENDING state ...", Log::LV_USER)
- while @status == "PENDING"
+ old_msg = ""
+ while @status == "PENDING"
+ new_msg = @rev_fail_projects.map {|p| "#{p[0].name}(#{p[1]})"}.join(", ")
+ if old_msg != new_msg then
+ @log.error( " * Waiting for building next projects: #{new_msg}", Log::LV_USER)
+ old_msg = new_msg
+ end
sleep 1
end
# resolve other pending job
def resolve()
- deps = @pkginfo.get_build_dependencies(@os)
- pkgs = deps.map{|x|@pkgsvr_client.get_pkg_from_list(x.package_name, @os)}
- prjs = @server.prjmgr.get_projects_from_pkgs(pkgs)
- @build_dep_prjs = prjs
- old = ""
- while not (@pending_ancestor.rev_fail_projects & prjs).empty?
+
+ # wait for other build-dependent projects are resolved
+ old_msg = ""
+ wait_prjs = @pending_ancestor.rev_fail_projects.select {|p| is_build_dependent_project(p[0], p[1])}
+ @log.info("Checking build dependency before RESOLVE", Log::LV_USER)
+ while not wait_prjs.empty?
@status = "PENDING"
- new = (@pending_ancestor.rev_fail_projects & prjs).map{|x|x.name}.join("\" and \"")
- if new != old then
- @log.info("waiting \"#{new}\" projects", Log::LV_USER)
- old = new
+ new_msg = wait_prjs.map {|p| "#{p[0].name}(#{p[1]})"}.join(", ")
+ if new_msg != old_msg then
+ @log.info(" * Waiting for building next projects: #{new_msg}", Log::LV_USER)
+ old_msg = new_msg
end
sleep 1
+ wait_prjs = @pending_ancestor.rev_fail_projects.select {|p| is_build_dependent_project(p[0], p[1])}
end
+
+ # return back to "WORKING"
@status = "WORKING"
@log.info( "Started to build this job and resolve other pending job...", Log::LV_USER)
return false
end
- # get ignored projects will not be built in reverse build check
- # the projects are failed projects of pending jobs
- # and they must be built by other jobs
- ignored_projects = @pending_ancestor.rev_fail_projects
-
- # check reverse dependecy
- @pending_ancestor.rev_fail_projects = (@pending_ancestor.rev_fail_projects + check_reverse_build(ignored_projects, false)).uniq
+ # check reverse dependecy and update parent rev_fail_project list
+ new_fail_projects = ReverseBuildChecker.check(self, false)
+ for p in new_fail_projects
+ @pending_ancestor.add_rev_fail_project(p[0], p[1])
+ end
# update the status of pending job
@status = "PENDING"
- @pending_ancestor.rev_fail_projects.delete @project
+ @pending_ancestor.remove_rev_fail_project(@project, @os)
@pending_ancestor.rev_success_jobs.push self
if @pending_ancestor.rev_fail_projects.empty? then
@pending_ancestor.status = "RESOLVED"
end
else
@log.info( "Entered the PENDING state ...", Log::LV_USER)
- while @status == "PENDING"
- sleep 1
- end
+ old_msg = ""
+ while @status == "PENDING"
+ new_msg = @pending_ancestor.rev_fail_projects.map {|p| "#{p[0].name}(#{p[1]})"}.join(", ")
+
+ if new_msg != old_msg then
+ @log.info(" * Waiting for building next projects: #{new_msg}", Log::LV_USER)
+ old_msg = new_msg
+ end
+
+ sleep 1
+ end
end
return true
# upload
u_client = Client.new( @server.pkgserver_url, nil, @log )
- u_client.update
snapshot = u_client.upload( @server.pkgserver_addr, @server.pkgserver_port, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd, binpkg_path_list)
if snapshot.nil? then
return true
end
+
+ def get_local_package_of_dependency( dep, src_path )
+ # use the target os if not specified
+ if dep.target_os_list.count != 0 then
+ dep_target_os = dep.target_os_list[0]
+ else
+ dep_target_os = @os
+ end
+
+ # search
+ binpkgs = Dir.glob("#{src_path}/#{dep.package_name}_*_#{dep_target_os}.zip")
+ if binpkgs.count > 0 then
+ pkg = binpkgs[0]
+ version = Utils.get_version_from_package_file(pkg)
+ if dep.match? version then
+ return pkg
+ else
+ return nil
+ end
+ else
+ return nil
+ end
+ end
+
+
+ def remote_package_of_dependency_exist?(dep)
+ # use the target os if not specified
+ if dep.target_os_list.count != 0 then
+ dep_target_os = dep.target_os_list[0]
+ else
+ dep_target_os = @os
+ end
+
+ # search
+ ver_svr = @pkgsvr_client.get_attr_from_pkg( dep.package_name, dep_target_os, "version")
+ if ver_svr.nil? then return false end
+ if not dep.match? ver_svr then return false end
+
+ return true
+ end
+
+
end
attr_accessor :prjmgr
attr_accessor :incoming_path, :outgoing_path
attr_accessor :cancel_lock
+ attr_accessor :supported_os_list
+
CONFIG_ROOT = "#{Utils::HOME}/.build_tools/build_server"
HOST_OS = Utils::HOST_OS
@incoming_path = "#{@path}/incoming"
@outgoing_path = "#{@path}/outgoing"
@cancel_lock = Mutex.new
+ @supported_os_list = []
end
end
+ # add new target OS.
+ # If already exist, return false , otherwise true
+ def add_target_os( os_name )
+
+ # if already exit, return false
+ for os in @supported_os_list
+ if os.eql? os_name then
+ return false
+ end
+ end
+
+ # add it into list
+ @supported_os_list.push os_name
+
+ return true
+ end
+
+
# get remote server
def get_available_server ( job )
candidates = []
# calculate empty rooms
- # if fullbuild job, his parent should be excluded
+ # if sub job, his parent should be excluded
local_empty_rooms = @jobmgr.get_number_of_empty_room
- if job.is_fullbuild_job then
+ if job.is_sub_job? then
local_empty_rooms += 1
end
end
# get availables server
- # but, job must not be "REGISTER" job
- if job.type != "REGISTER" then
+ # but, job must not be "REGISTER" and "MULTIBUILD" job
+ if job.type != "REGISTER" and job.type != "MULTIBUILD" then
for server in @friend_servers
if ( server.status == "RUNNING" and server.can_build?( job ) and
not server.has_waiting_jobs and
# set default
@@instance_map[id].git_server_url="gerrithost:"
- if Utils::HOST_OS == "windows" then
+ if Utils.is_windows_like_os(Utils::HOST_OS) then
@@instance_map[id].git_bin_path="/c/Program\\ Files/Git/bin/git.exe"
else
@@instance_map[id].git_bin_path="/usr/bin/git"
# send request
stop_ok = false
- if client.send "STOP,#{server.password}" then
+ if client.send "STOP|#{server.password}" then
# recevie & print
mismatched = false
result = client.read_lines do |l|
end
+ # add supported target os
+ def self.add_target_os( id, os_name )
+ # TODO:check os foramt
+ if os_name == "default" then
+ puts "Cannot use \"default\" as target OS name!"
+ return false
+ end
+
+ # get server
+ server = get_server(id)
+
+ # add
+ if server.add_target_os( os_name ) then
+
+ # write config
+ server_dir = "#{BuildServer::CONFIG_ROOT}/#{server.id}"
+ f = File.open( "#{server_dir}/supported_os_list", "a" )
+ f.puts "#{os_name}"
+ f.close
+
+ puts "Target OS is added successfully!"
+
+ return true
+ else
+ puts "Target OS already exists in list!"
+ return false
+ end
+ end
+
+
# build git repository and upload
def self.build_git( id, repository, commit, os, url, resolve )
if client.nil? then return false end
# send request
- client.send "BUILD,GIT,#{repository},#{commit},#{os}"
+ client.send "BUILD|GIT|#{repository}|#{commit}|#{os}"
# recevie & print
client.print_stream
if client.nil? then return false end
# send request
- client.send "RESOLVE,GIT,#{repository},#{commit},#{os}"
+ client.send "RESOLVE|GIT|#{repository}|#{commit}|#{os}"
# recevie & print
client.print_stream
if client.nil? then return false end
# send request
- client.send "BUILD,LOCAL,#{local_path},#{os}"
+ client.send "BUILD|LOCAL|#{local_path}|#{os}"
# recevie & print
client.print_stream
if client.nil? then return false end
# send request
- client.send "RESOLVE,LOCAL,#{local_path},#{os}"
+ client.send "RESOLVE|LOCAL|#{local_path}|#{os}"
# recevie & print
client.print_stream
# add project
def self.add_project( id, project_name, git_repos, git_branch, remote_server_id, passwd, os_string )
+ # get server
server = get_server(id)
- # get supported os
- if os_string.nil? then
- os_list = ["linux", "windows"]
+ # get supported os for project.
+ # if not specified, all supported os of the server will be used
+ if os_string == "default" then
+ os_list = server.supported_os_list
else
os_list = os_string.strip.split(",")
end
+ # check OS name
+ for os in os_list
+ if not server.supported_os_list.include? os then
+ puts "Unsupported OS name \"#{os}\" is used!"
+ puts "Check the following supported OS list:"
+ for s_os in server.supported_os_list
+ puts " * #{s_os}"
+ end
+
+ return false
+ end
+ end
+
# add
if not git_repos.nil? and not git_branch.nil? then
result = server.prjmgr.add_git_project( project_name, git_repos, git_branch, passwd, os_list )
# add binary project
def self.add_binary_project( id, project_name, pkg_name, passwd, os_string )
+ # get server
server = get_server(id)
- # get supported os
+ # get supported os for project.
+ # if not specified, all supported os of the server will be used
if os_string.nil? then
- os_list = ["linux", "windows"]
+ os_list = server.supported_os_list
else
os_list = os_string.strip.split(",")
end
# send request
fullbuild_ok = false
- if client.send "FULLBUILD,#{server.password}" then
+ if client.send "FULLBUILD|#{server.password}" then
# recevie & print
mismatched = false
result = client.read_lines do |l|
file_path = File.expand_path(file_path)
# send request
success = false
- if client.send "REGISTER,BINARY-LOCAL,#{file_path},#{server.password}" then
+ if client.send "REGISTER|BINARY-LOCAL|#{file_path}|#{server.password}" then
# recevie & print
mismatched = false
result = client.read_lines do |l|
end
end
+ # check supported os
+ if File.exist? "#{server_dir}/supported_os_list" then
+ File.open( "#{server_dir}/supported_os_list", "r" ) do |f|
+ f.each_line do |l|
+ os_name = l.strip
+ obj.add_target_os( os_name )
+ end
+ end
+ end
+
# set git server url
obj.git_server_url = git_server_url
raise ArgumentError, "Usage: build-svr add-prj -n <server name> -N <project name> [-g <git repository>] [-b <git branch>] [-P <package name>] [-w <password>] [-o <os list>]"
end
+ when "add-os"
+ if options[:name].nil? or options[:name].empty? or
+ options[:os].nil? or options[:os].empty? then
+ raise ArgumentError, "Usage: build-svr add-os -n <server name> -o <os>"
+ end
+
when "fullbuild"
if options[:name].nil? or options[:name].empty? then
raise ArgumentError, "Usage: build-svr fullbuild -n <server name>"
+ "\t" + "build-svr stop -n <server name>" + "\n" \
+ "\t" + "build-svr add-svr -n <server name> -d <friend server address>" + "\n" \
+ "\t" + "build-svr add-prj -n <server name> -N <project name> [-g <git repository>] [-b <git branch>] [-P <package name>] [-w <password>] [-o <os list>]" + "\n" \
+ + "\t" + "build-svr add-os -n <server name> -o <os>" + "\n" \
+ "\t" + "build-svr register -n <server name> -P <package file>" + "\n" \
+ "\t" + "build-svr fullbuild -n <server name>" + "\n" \
+ "\n" + "Options:" + "\n"
cmd = ARGV[0]
if cmd.eql? "create" or cmd.eql? "remove" or cmd.eql? "start" or
- cmd.eql? "stop" or cmd.eql? "add-svr" or cmd.eql? "fullbuild" or
- cmd.eql? "add-prj" or cmd.eql? "register" or
+ cmd.eql? "stop" or cmd.eql? "add-svr" or
+ cmd.eql? "add-prj" or cmd.eql? "add-os" or
+ cmd.eql? "fullbuild" or cmd.eql? "register" or
cmd =~ /(-v)|(--version)/ or
cmd =~ /(help)|(-h)|(--help)/ then
require "BuildJob.rb"
require "utils.rb"
-# mutax for git operation
-$git_mutex = Mutex.new
-
class GitBuildJob < BuildJob
@log.info( "Initializing job...", Log::LV_USER)
- $git_mutex.synchronize {
- # git clone
- if not git_cmd("clone #{@git_repos} temp", @job_root) then
- @log.error( "Failed on \"git clone #{@git_repos}\"", Log::LV_USER)
- @status = "ERROR"
- return false
- end
-
- if @git_commit.nil? then
- # git reset
- if not git_cmd("reset --hard origin/#{@git_branch}", @source_path) then
- @log.error( "Failed on \"git reset --hard origin/#{@git_branch}\"", Log::LV_USER)
- @status = "ERROR"
- return false
- end
-
- # get git commit-id
- commit_id = ""
- result_line = git_cmd_return("log -1",@source_path)
- if result_line != nil then
- result_line.each do |l|
- if l.start_with?("commit ") then
- commit_id = l.split(" ")[1].strip
- end
- end
- end
- @git_commit = commit_id
- else
- # git reset
- if not git_cmd("reset --hard #{@git_commit}", @source_path) then
- @log.error( "Failed on \"git reset --hard #{@git_commit}\"", Log::LV_USER)
- @status = "ERROR"
- return false
- end
- end
- }
+ # download source code
+ @git_commit = @project.get_source_code(@git_repos, @git_branch, @git_commit, @source_path, @log)
+ if @git_commit.nil? then
+ @status = "ERROR"
+ return false
+ end
# check pkginfo.manifest
if not File.exist? "#{@source_path}/package/pkginfo.manifest"
- @log.error( "package/pkginfo.manifest doest not exist", Log::LV_USER)
+ @log.error( "package/pkginfo.manifest does not exist", Log::LV_USER)
@status = "ERROR"
return false
end
# set up pkg info
+ begin
@pkginfo = PackageManifest.new("#{@source_path}/package/pkginfo.manifest")
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ return false
+ end
# set up pkgsvr_client
@pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log)
- @pkgsvr_client.update
-
+
# checking version if not reverse-build job
if not @is_rev_build_check_job and not @is_remote_job and
- not check_package_version() then
+ not check_package_version(@git_commit) then
- # if fullbuild job, its OK
- if @is_fullbuild_job then
- @status = "FINISHED"
- return true
- else
- @status = "ERROR"
- return false
- end
+ @status = "ERROR"
+ return false
end
# check availabiltiy
end
- def git_cmd(cmd, working_dir)
- build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}"
- ret = Utils.execute_shell_with_log(build_command,@log)
-
- return ret
- end
-
-
- def git_cmd_return(cmd, working_dir)
- build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}"
- ret = Utils.execute_shell_return(build_command)
-
- return ret
- end
-
end
=end
require 'fileutils'
+require "thread"
$LOAD_PATH.unshift File.dirname(__FILE__)
require "CommonProject.rb"
require "GitBuildJob.rb"
require "Version.rb"
require "PackageManifest.rb"
+# mutax for git operation
+$git_mutex = Mutex.new
+
class GitBuildProject < CommonProject
attr_accessor :repository, :branch
# create new job
+ # if this project cannot support os, return nil
def create_new_job( os )
- return GitBuildJob.new( self, os, @server )
+ if @os_list.include? os then
+ return GitBuildJob.new( self, os, @server )
+ else
+ return nil
+ end
end
# add package info
def add_package_info( version, path )
+ begin
pkginfo =PackageManifest.new(path)
+ rescue => e
+ puts e.message
+ return
+ end
@package_infos[version] = pkginfo
end
if version.nil? or @package_infos[version].nil? then return false end
+ # check supported os
+ if not os.nil? and not @os_list.include? os then return false end
+
# check name and version
pkginfo=@package_infos[version]
pkg_list = os.nil? ? pkginfo.packages : pkginfo.get_target_packages(os)
return false
end
+
+
+ # download source code to "source_path" and return its commit-id
+ def get_source_code( git_repos, git_branch, git_commit, source_path, log )
+ $git_mutex.synchronize {
+ # check git directory
+ git_path = "#{@server.path}/projects/#{@name}/cache/git"
+ cache_path = "#{@server.path}/projects/#{@name}/cache"
+ if not File.exist? cache_path then
+ FileUtils.mkdir_p cache_path
+ end
+
+ # check branch name
+ if File.exist? git_path then
+ current_branch = git_cmd_return( "branch", git_path)[0].split(" ")[1].strip
+ if current_branch != git_branch then
+ log.warn( "Branch name is changed.", Log::LV_USER)
+ FileUtils.rm_rf git_path
+ end
+ end
+
+ # git pull operation
+ if File.exist? git_path and not git_cmd("pull", git_path,log) then
+ log.warn( "Failed on \"git pull\"", Log::LV_USER)
+ FileUtils.rm_rf git_path
+ end
+
+ # if no git, clone it
+ if not File.exist? git_path then
+ # if "git pull" failed, try to "git clone"
+ if not git_cmd("clone #{git_repos} git", cache_path, log) then
+ log.error( "Failed on \"git clone #{git_repos}\"", Log::LV_USER)
+ return nil
+ end
+ # git checkout
+ if not git_cmd("checkout #{git_branch}", git_path, log) then
+ log.error( "Failed on \"git checkout #{git_branch}\"", Log::LV_USER)
+ return nil
+ end
+ end
+
+ if git_commit.nil? then
+ # get git commit-id
+ commit_id = ""
+ result_line = git_cmd_return("log -1", git_path)
+ if result_line != nil then
+ result_line.each do |l|
+ if l.start_with?("commit ") then
+ commit_id = l.split(" ")[1].strip
+ end
+ end
+ end
+
+ git_commit = commit_id
+ else
+ # git reset
+ if not git_cmd("reset --hard #{git_commit}", git_path, log) then
+ log.error( "Failed on \"git reset --hard #{git_commit}\"", Log::LV_USER)
+ return nil
+ end
+ end
+
+ # copy to source path
+ FileUtils.cp_r(git_path, source_path)
+ }
+
+ return git_commit
+ end
+
+
+ def git_cmd(cmd, working_dir, log)
+ build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}"
+ ret = Utils.execute_shell_with_log(build_command,log)
+
+ return ret
+ end
+
+
+ def git_cmd_return(cmd, working_dir)
+ build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}"
+ ret = Utils.execute_shell_return(build_command)
+
+ return ret
+ end
end
=end
require 'fileutils'
+require 'thread'
$LOAD_PATH.unshift File.dirname(__FILE__)
$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server"
require "SocketJobRequestListener.rb"
require "packageServer.rb"
class JobManager
- attr_accessor :max_working_jobs, :jobs
+ attr_accessor :max_working_jobs, :jobs, :internal_jobs
# initialize
def initialize( parent )
@parent = parent
@jobs = []
@internal_jobs = []
+ @reverse_build_jobs = []
@max_working_jobs=2
@new_job_index = 0
+ @internal_job_schedule = Mutex.new
end
def create_new_register_job( file_path )
- return RegisterPackageJob.new( file_path, @parent )
+ return RegisterPackageJob.new( file_path, nil, @parent )
end
# add a normal job
@jobs.push( new_job )
end
-
- # add internal job (reverse-build or full-build)
+ # add internal job for multi-build job
def add_internal_job( new_job )
@parent.log.info "Added new job \"#{new_job.id}\""
- new_job.status = "JUST_CREATED"
@internal_jobs.push( new_job )
end
+ # add reverse build chek job
+ def add_reverse_build_job( new_job )
+ @parent.log.info "Added new job \"#{new_job.id}\""
+ @reverse_build_jobs.push( new_job )
+ end
+
+ # stop internal job selection
+ def stop_internal_job_schedule()
+ @internal_job_schedule.lock
+ end
+
+
+ # start internal job selection
+ def resume_internal_job_schedule()
+ @internal_job_schedule.unlock
+ end
# intialize normal job
def initialize_job ( job )
job.status = "INITIALIZING"
Thread.new {
- # init
+ # init
if not job.init or job.status == "ERROR" then
- job.status = "ERROR"
+ if job.cancel_state == "NONE" then job.status = "ERROR" end
@parent.log.info "Adding the job \"#{job.id}\" is canceled"
job.terminate()
Thread.current.exit
end
end
+ def cancel_job( job)
+ job.cancel_state = "WORKING"
+ Thread.new {
+ # thread terminate
+ if not job.thread.nil? then
+ #terminate job thread
+ job.thread.terminate
+ job.thread = nil
+ job.terminate
+ end
+
+ # job cacncel
+ job.cancel
+
+ # cancel finished
+ job.status = "CANCELED"
+ }
+ end
# handle
def handle()
+ # for cancel jobs
+ (@jobs + @internal_jobs + @reverse_build_jobs).select{|j| j.cancel_state == "INIT" }.each do |job|
+ cancel_job( job )
+ end
+
+ # for reverse build jobs
+ for job in @reverse_build_jobs
+ # if "ERROR", "FINISHED", "CANCELED" remove it from list
+ if job.status == "ERROR"
+ @parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
+ @reverse_build_jobs.delete job
+ elsif job.status == "FINISHED"
+ @reverse_build_jobs.delete job
+ elsif job.status == "CANCELED"
+ @reverse_build_jobs.delete job
+ end
+
+ # if "JUST_CREATED", initialize it
+ if job.status == "JUST_CREATED" then
+ initialize_job( job )
+ end
+ end
# for internal jobs
for job in @internal_jobs
end
# check the connection if job is not asynchronous job
- if ( job.status == "WAITING" or job.status == "REMOTE_WORKING") and
+ if ( job.status == "WAITING" or job.status == "REMOTE_WORKING" or job.status == "PENDING") and
not job.is_asynchronous_job? and
not job.is_connected? then
end
end
- # check internal job first, if not exist, get new available job
- if @internal_jobs.count > 0 then
- job = get_available_internal_job
- else
- job = get_available_job
- end
+ # reverse build job -> internal job -> normal job
+ job = get_available_job
# available job not exist?, continue
if not job.nil? then
# select the job whith no build-dependency problem
def get_available_job
- return get_available_job_in_list(@jobs)
- end
-
+ # check reverse build job first
+ selected_job = nil
+ for job in @reverse_build_jobs
+ if job.status == "WAITING" then
+ selected_job = job
+ break
+ end
+ end
+ if not selected_job.nil? then return selected_job end
- # select the job whith no build-dependency problem
- def get_available_internal_job
- return get_available_job_in_list(@internal_jobs)
+ # if no reverse build job exist!
+ if not @internal_job_schedule.locked? and @internal_jobs.count > 0 then
+ return get_available_job_in_list(@internal_jobs, true)
+ else
+ return get_available_job_in_list(@jobs, false)
+ end
end
-
# return "max_working_jobs_cnt - current_working_jobs_cnt"
def get_number_of_empty_room
working_cnt = 0
- for job in @jobs + @internal_jobs
+ for job in @jobs + @internal_jobs + @reverse_build_jobs
if job.status == "WORKING" then
working_cnt = working_cnt + 1
end
# check there are working jobs
def has_working_jobs
working_cnt = 0
- for job in @jobs + @internal_jobs
+ for job in @jobs + @internal_jobs + @reverse_build_jobs
if job.status == "WORKING" then
return true
end
# check there are waiting jobs
def has_waiting_jobs
waiting_cnt = 0
- for job in @jobs + @internal_jobs
+ for job in @jobs + @internal_jobs + @reverse_build_jobs
if job.status == "WAITING" then
return true
end
def get_working_jobs
result = []
- for job in @jobs + @internal_jobs
+ for job in @jobs + @internal_jobs + @reverse_build_jobs
if job.status == "WORKING" then
result.push job
end
def get_waiting_jobs
result = []
- for job in @jobs + @internal_jobs
+ for job in @jobs + @internal_jobs + @reverse_build_jobs
if job.status == "WAITING" then
result.push job
end
def get_remote_jobs
result = []
- for job in @jobs + @internal_jobs
+ for job in @jobs + @internal_jobs + @reverse_build_jobs
if job.status == "REMOTE_WORKING" then
result.push job
end
protected
# select the job whith no build-dependency problem
- def get_available_job_in_list( jobs )
+ # if "check_dep_wait" is true, it will check the build dependency
+ # among items of "WAIT" status in list
+ def get_available_job_in_list( jobs, check_dep_wait=false )
# gather all working jobs and full-build jobs
- all_working_jobs = []
- full_build_jobs = []
- uninit_fjob_exist = false #uninitialized full build job exist flag
+ check_dep_jobs = []
for job in jobs
- if job.status == "WORKING" or job.status == "REMOTE_WORKING" then
- all_working_jobs.push job
- end
- if job.is_fullbuild_job then
- if job.status == "WAITING" then
- full_build_jobs.push job
- end
- if job.status == "JUST_CREATED" or job.status == "INITIALIZING" then
- uninit_fjob_exist = true
- end
+ if job.status == "WORKING" or job.status == "REMOTE_WORKING" or job.status == "PENDING" then
+ check_dep_jobs.push job
+ elsif ( check_dep_wait and job.status == "WAITING") then
+ check_dep_jobs.push job
end
end
# for waiting jobs
for job in jobs
if job.status != "WAITING" then next end
-
+
# check build dependency against working job
pre_jobs = []
- for wjob in all_working_jobs
- if job.has_build_dependency?( wjob ) then
- pre_jobs.push wjob
+ for cjob in check_dep_jobs
+ if job == cjob then next end
+ if (cjob.status == "WORKING" or cjob.status == "REMOTE_WORKING" or cjob.status == "PENDING" ) and
+ (job.has_build_dependency?( cjob ) or job.is_compatible_with?( cjob)) then
+ pre_jobs.push cjob
+ elsif check_dep_wait and cjob.status == "WAITING" and
+ (job.does_depend_on? cjob or
+ (job.id > cjob.id and job.is_compatible_with? cjob) ) then
+ pre_jobs.push cjob
end
end
- # if full build job, do something special
- if job.is_fullbuild_job then
- # wait all full build jobs are ready
- if uninit_fjob_exist then next end
- # if full build job, check build-dep among full-build jobs
- for fjob in full_build_jobs
- if job == fjob then next end
- if job.does_depend_on?( fjob ) then
- pre_jobs.push fjob
- end
- end
- end
-
# check pre-requisite jobs are changed, notify to user
is_changed = false
if pre_jobs.count != job.pre_jobs.count then
for bjob in pre_jobs
if bjob.type == "BUILD" then
job.log.info( " * #{bjob.id} #{bjob.pkginfo.packages[0].source}", Log::LV_USER)
- elsif bjob.type == "FULLBUILD" then
- job.log.info( " * #{bjob.id} (Full Build Job)", Log::LV_USER)
+ elsif bjob.type == "MULTIBUILD" then
+ job.log.info( " * #{bjob.id} (Multi Build Job)", Log::LV_USER)
end
end
end
# no pre-requisite jobs, return its job
if job.pre_jobs.count == 0 then
- pre_jobs.push fjob
return job
end
end
return nil
end
+
end
# check pkginfo.manifest
if not File.exist? "#{@source_path}/package/pkginfo.manifest"
- @log.error( "#{@source_path}/package/pkginfo.manifest doest not exist", Log::LV_USER)
+ @log.error( "#{@source_path}/package/pkginfo.manifest does not exist", Log::LV_USER)
@status = "ERROR"
return false
end
# set pkginfo
- @pkginfo = PackageManifest.new("#{@source_path}/package/pkginfo.manifest")
+ begin
+ @pkginfo = PackageManifest.new("#{@source_path}/package/pkginfo.manifest")
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ @status = "ERROR"
+ return false
+ end
# set up pkgsvr_client
@pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log)
- @pkgsvr_client.update
return true
end
--- /dev/null
+=begin
+
+ MultiBuildJob.rb
+
+Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
+
+Contact:
+Taejun Ha <taejun.ha@samsung.com>
+Jiil Hyoun <jiil.hyoun@samsung.com>
+Donghyuk Yang <donghyuk.yang@samsung.com>
+DongHee Yang <donghee.yang@samsung.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Contributors:
+- S-Core Co., Ltd
+=end
+
+require "fileutils"
+$LOAD_PATH.unshift File.dirname(__FILE__)
+$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common"
+$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/builder"
+$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server"
+require "client.rb"
+require "PackageManifest.rb"
+require "Version.rb"
+require "Builder.rb"
+require "RemoteBuilder.rb"
+require "BuildServer.rb"
+require "JobLog.rb"
+require "mail.rb"
+
+class MultiBuildJob
+
+ attr_accessor :id, :server, :pre_jobs, :os, :type
+ attr_accessor :status, :log, :source_path, :cancel_state
+ attr_accessor :pkgsvr_client, :thread, :sub_jobs
+
+ # initialize
+ def initialize (server)
+ @server = server
+ @id = server.jobmgr.get_new_job_id()
+ @log = nil
+ @type = "MULTIBUILD"
+ @os = "Unknown"
+
+ @status = "JUST_CREATED"
+ @host_os = Utils::HOST_OS
+ @pkgserver_url = @server.pkgserver_url
+ @job_root = "#{@server.path}/jobs/#{@id}"
+ @source_path = @job_root+"/temp"
+ @job_working_dir=@job_root+"/works"
+ @buildroot_dir = "#{@job_root}/buildroot"
+ @pre_jobs = [] #pre-requisite jobs
+ @cancel_state = "NONE"
+
+ # children
+ @sub_jobs = []
+ end
+
+
+ # execute
+ def execute(sync=false)
+ @log.info( "Invoking a thread for MULTI-BUILD Job #{@id}", Log::LV_USER)
+ if @status == "ERROR" then return end
+ @thread = Thread.new {
+ # main
+ thread_main()
+
+ # close
+ terminate()
+ }
+
+ if sync then
+ @thread.join
+ end
+
+ return true
+ end
+
+ # cnacel
+ def cancel()
+ @sub_jobs.select{|x| x.cancel_state == "NONE"}.each do |sub|
+ sub.cancel_state = "INIT"
+ end
+ if not @log.nil? then
+ @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER)
+ end
+ end
+
+ #
+ def init
+ # mkdir
+ if not File.exist? @job_root then
+ FileUtils.mkdir_p @job_root
+ end
+
+ # create logger
+ if @log.nil? then
+ @log = JobLog.new(self, nil )
+ end
+
+ @log.info( "Initializing job...", Log::LV_USER)
+
+ # create source path
+ if not File.exist? @source_path then
+ FileUtils.mkdir_p @source_path
+ end
+
+ # initialize all sub jobs and add them to "internal_jobs"
+ for job in @sub_jobs
+ # initialize job
+ if not job.init or job.status == "ERROR" then
+ job.status = "ERROR"
+ @log.info( "Failed to initialize sub-job \"#{job.get_project().name}\" for #{job.os}. (#{job.id})", Log::LV_USER)
+ job.terminate()
+ end
+
+ if job.status != "ERROR" then
+ job.status = "WAITING"
+ else
+ job.status = "ERROR"
+ @status = "ERROR"
+ break
+ end
+ end
+ if @status == "ERROR" then
+ return false
+ end
+
+
+ # set up pkgsvr_client
+ @pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log)
+
+ return true
+ end
+
+
+ #terminate
+ def terminate()
+ # report error
+ if @status == "ERROR" then
+ @log.error( "Job is stopped by ERROR" , Log::LV_USER)
+ @server.cleaner.clean_afterwards(@id)
+ else
+ # clean up
+ @server.cleaner.clean(@id)
+ end
+
+ # close logger
+ @log.close
+ end
+
+
+ def is_sub_job?
+ return false
+ end
+
+
+ def get_sub_jobs()
+ return @sub_jobs
+ end
+
+
+ # check building is possible
+ def can_be_built_on?(host_os)
+ return true
+ end
+
+
+ def get_packages()
+ packages = []
+ for job in @sub_jobs
+ packages = packages + job.get_packages()
+ end
+ packages.uniq!
+
+ return packages
+ end
+
+
+ def get_build_dependencies(target_os)
+ deps = []
+ for job in @sub_jobs
+ deps = deps + job.get_build_dependencies(target_os)
+ end
+ deps.uniq!
+
+ return deps
+ end
+
+
+ def get_source_dependencies(target_os, host_os)
+ deps = []
+ for job in @sub_jobs
+ deps = deps + job.get_source_dependencies(target_os,host_os)
+ end
+ deps.uniq!
+
+ return deps
+ end
+
+
+ def is_compatible_with?(o)
+ return false
+ end
+
+ def has_build_dependency?(other_job)
+
+ if has_same_packages?(other_job) or
+ does_depend_on?(other_job) or
+ does_depended_by?(other_job) then
+
+ return true
+ else
+ return false
+ end
+ end
+
+
+ def has_same_packages?( wjob )
+
+ # same package must have same os
+ if not @os.eql? wjob.os then
+ return false
+ end
+
+ # check package name
+ for pkg in get_packages
+ for wpkg in wjob.get_packages()
+ if pkg.package_name == wpkg.package_name then
+ #puts "Removed from candiated... A == B"
+ return true
+ end
+ end
+ end
+
+ return false
+ end
+
+
+ def does_depend_on?( wjob )
+
+ # compare build dependency
+ for dep in get_build_dependencies(@os)
+ for wpkg in wjob.get_packages()
+ # dep packages of my job must have same name and target os
+ # with packages in working job
+ if dep.package_name == wpkg.package_name and
+ dep.target_os_list.include? wjob.os then
+ #puts "Removed from candiated... A -> B"
+ return true
+ end
+ end
+ end
+
+ return false
+ end
+
+
+ def does_depended_by?( wjob )
+
+ for pkg in get_packages()
+ for dep in wjob.get_build_dependencies(wjob.os)
+ # dep package of working job must have same name and target os
+ # with packages in my job
+ if dep.package_name == pkg.package_name and
+ dep.target_os_list.include? @os then
+ #puts "Checking... A <- B"
+ return true
+ end
+ end
+ end
+ return false
+ end
+
+
+ def is_connected?
+ return true
+ end
+
+
+ # return the job is asyncronous job
+ def is_asynchronous_job?
+ return false
+ end
+
+ # set logger
+ def set_logger( logger )
+ @log = logger
+ end
+
+
+ # add sub job
+ def add_sub_job( job )
+ @sub_jobs.push job
+ # this will make sub-job to share build-root of parent
+ job.set_parent_job( self )
+ end
+
+
+ #
+ # PROTECTED METHODS
+ #
+ protected
+
+
+ # main module
+ def thread_main
+ @log.info( "New Job #{@id} is started", Log::LV_USER)
+
+ # initialize status map
+ job_status_map = {}
+ for job in @sub_jobs
+ job_status_map[job.id] = job.status
+ end
+
+ # add to internal job
+ @server.jobmgr.stop_internal_job_schedule()
+ for job in @sub_jobs
+ # init finished, add internal_jobs
+ @server.jobmgr.add_internal_job(job)
+ @log.info( "Added new job \"#{job.get_project().name}\" for #{job.os}! (#{job.id})",
+ Log::LV_USER)
+ if not @server.job_log_url.empty? then
+ @log.info( " * Log URL : #{@server.job_log_url}/#{job.id}/log", Log::LV_USER)
+ end
+ end
+ @server.jobmgr.resume_internal_job_schedule()
+
+ # show job status changes
+ all_jobs_finished = false
+ error_exist = false
+ while not all_jobs_finished and not error_exist
+ all_jobs_finished = true
+ for job in @sub_jobs
+
+ # check status chanaged, if then print
+ if job_status_map[ job.id ] != job.status then
+ @log.info("Sub-Job \"#{job.get_project().name}\" for #{job.os} has entered \"#{job.status}\" state. (#{job.id})", Log::LV_USER)
+ job_status_map[ job.id ] = job.status
+ end
+ if job.status != "ERROR" and job.status != "FINISHED" and job.cancel_state == "NONE" then
+ all_jobs_finished = false
+ end
+ if job.status == "ERROR" then
+ error_exist = true
+ break
+ end
+ end
+ sleep 1
+ end
+
+ if @cancel_state != "NONE" then
+ @status = "CANCELED"
+ if not @log.nil? then
+ @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER)
+ end
+ else
+ # check error
+ if error_exist then
+ @sub_jobs.each do |sub|
+ if sub.status != "ERROR" and sub.status != "FINISHED" and sub.cancel_state == "NONE" then
+ sub.cancel_state = "INIT"
+ end
+ end
+ @status = "ERROR"
+ return
+ end
+
+ # upload
+ if not upload() then
+ @status = "ERROR"
+ return
+ end
+
+ # INFO. don't change this string
+ @log.info( "Job is completed!", Log::LV_USER)
+ @status = "FINISHED"
+ end
+ end
+
+
+ def upload()
+ @log.info( "Uploading ...", Log::LV_USER)
+
+ # get package path list
+ binpkg_path_list = Dir.glob("#{@source_path}/*_*_*.zip")
+ srcpkg_path_list = Dir.glob("#{@source_path}/*.tar.gz")
+
+ # upload
+ u_client = Client.new( @server.pkgserver_url, nil, @log )
+ snapshot = u_client.upload( @server.pkgserver_addr, @server.pkgserver_port, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd, binpkg_path_list)
+
+ if snapshot.nil? then
+ @log.info( "Upload failed...", Log::LV_USER)
+
+ return false
+ end
+
+ # update local
+ @log.info( "Upload succeeded. Sync local pkg-server again...", Log::LV_USER)
+ @pkgsvr_client.update
+ @log.info("Snapshot: #{snapshot}", Log::LV_USER)
+
+ return true
+ end
+
+end
$LOAD_PATH.unshift File.dirname(__FILE__)
require "GitBuildProject.rb"
require "BinaryUploadProject.rb"
+require "MultiBuildJob.rb"
require "PackageManifest.rb"
require "package.rb"
-require "FullBuildJob.rb"
class ProjectManager
attr_accessor :projects
# create new job for project
+ # if cannot create, return nil
def create_new_job( name, os )
prj = get_project( name )
if prj.nil? then return nil end
end
- def create_new_jobs_for_all_os( name )
- result = []
+ # create new multi build job
+ def create_new_multi_build_job( sub_job_list )
+ result = MultiBuildJob.new( @server )
- prj = get_project( name )
- if prj.nil? then return nil end
+ for job in sub_job_list
+ result.add_sub_job( job )
+ end
+
+ return result
+ end
+
+
+ # create new full job
+ def create_new_full_build_job( )
+ # create multi job
+ result = MultiBuildJob.new( @server )
+
+ # create sub jobs
+ build_jobs = []
+ for prj in @projects
+ if prj.type != "GIT" then next end
+
+ for os in prj.os_list
+ if not @server.supported_os_list.include? os then next end
+
+ new_job = create_new_job( prj.name, os )
+ if new_job.nil? then next end
+
+ # This make project to build
+ # even though there is a package of same version on pkg-server
+ new_job.set_force_rebuild(true)
- for new_os in prj.os_list
- new_job = create_new_job(name, new_os)
- if not new_job.nil? then
- result.push new_job
- end
- end
+ # add to multi job
+ result.add_sub_job( new_job )
+ end
+ end
return result
end
# get project that includes specified pkg name and os
+ # will return [project,os,ver] list
def get_projects_from_pkgs(pkgs)
result = []
for prj in @projects
# check project provide target package
if prj.include_package?(name, ver, os) then
- result.push prj
- project_found = true
+ result.push [prj, os, ver]
break
end
-
- if project_found then break end
end
end
end
- def create_new_fullbuild_job()
- return FullBuildJob.new(@server)
- end
-
protected
# load and create project
passwd=""
repos="none"
branch="master"
- os_list = ["linux","windows"]
+ os_list = @server.supported_os_list
rserver_id=nil
pkg_name=nil
File.open( config_file, "r" ) do |f|
require "JobLog.rb"
require "mail.rb"
require "utils.rb"
+require "ReverseBuildChecker.rb"
class RegisterPackageJob
attr_accessor :id, :server, :pre_jobs, :os, :type
attr_accessor :status, :log, :source_path
- attr_accessor :pkgsvr_client, :thread
- attr_accessor :is_fullbuild_job
- attr_accessor :pkg_name, :pkginfo
+ attr_accessor :pkgsvr_client, :thread, :pkg_type
+ attr_accessor :pkg_name, :pkginfo, :cancel_state
# initialize
- def initialize( local_path, server, ftpurl=nil )
+ def initialize( local_path, project, server, ftpurl=nil )
@server = server
@id = server.jobmgr.get_new_job_id()
@log = nil
@source_path = @job_root+"/temp"
@job_working_dir=@job_root+"/works"
@buildroot_dir = "#{@job_root}/buildroot"
+ @cancel_state = "NONE"
@pre_jobs = []
@local_path=local_path
@pkg_type = "BINARY"
new_name = @filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3')
@pkg_name = new_name.split(",")[0]
+ @pkg_version = new_name.split(",")[1]
@os = new_name.split(",")[2]
else
@pkg_type = "ARCHIVE"
@pkg_name = @filename
end
@pkginfo = nil #This info is valid only for BINARY package
+ @project = project
end
+ def is_sub_job?
+ return false
+ end
+
+
+ def get_project()
+ return @project
+ end
+
# execute
def execute(sync=false)
@log.info( "Invoking a thread for REGISTER Job #{@id}", Log::LV_USER)
FileUtils.mkdir_p @job_root
end
+ if @cancel_state != "NONE" then return false end
+
# create logger
if @log.nil? then
@log = JobLog.new(self, nil )
end
+ if @cancel_state != "NONE" then return false end
+
@log.info( "Initializing job...", Log::LV_USER)
# create dummy source path
FileUtils.mkdir_p @source_path
end
+ if @cancel_state != "NONE" then return false end
+
# set up pkgsvr_client
@pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log)
- @pkgsvr_client.update
- # download from sites
- if not File.exist? @local_path then
-
- end
-
+ if @cancel_state != "NONE" then return false end
+
# copy package file to source path
if not File.exist? @local_path then
@log.error( "File not found!", Log::LV_USER)
FileUtils.cp(@local_path,"#{@source_path}/#{File.basename(@local_path)}")
end
+ if @cancel_state != "NONE" then return false end
+
+ # check if the os is supported by build server
+ if @pkg_type == "BINARY" and
+ not @server.supported_os_list.include? @os then
+ @log.error( "Unsupported OS \"#{@os}\" is used!", Log::LV_USER)
+ @status = "ERROR"
+ return false
+ end
+
+ if @cancel_state != "NONE" then return false end
+
# checking version if not reverse-build job
if @pkg_type == "BINARY" then
# extrac pkg file
return false
end
+ if @cancel_state != "NONE" then return false end
+
# set up pkg info
- @pkginfo = PackageManifest.new("#{@source_path}/pkginfo.manifest")
+ begin
+ @pkginfo = PackageManifest.new("#{@source_path}/pkginfo.manifest")
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ @status = "ERROR"
+ return false
+ end
+
+ if @cancel_state != "NONE" then return false end
if not check_package_version() then
@status = "ERROR"
end
end
+ if @cancel_state != "NONE" then return false end
+
return true
end
#cancel
def cancel()
- #TODO
+ if not @log.nil? then
+ @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER)
+ end
end
end
+ def get_packages()
+ if @pkg_type == "BINARY" then
+ return @pkginfo.packages
+ else
+ return []
+ end
+ end
+
+
+ def get_build_dependencies(target_os)
+ return []
+ end
+
+
+ def get_source_dependencies(target_os,host_os)
+ return []
+ end
+
+
+ def is_compatible_with?(o)
+ return false
+ end
+
+
def has_build_dependency?(other_job)
if has_same_packages?(other_job) or
does_depended_by?(other_job) then
def has_same_packages?( wjob )
- if @type == wjob.type and
- @pkg_name == wjob.pkg_name then
-
- return true
- else
- return false
+ if @type != wjob.type then return false end
+
+ case @pkg_type
+ when "BINARY"
+ if @pkg_name == wjob.pkg_name and
+ @os == wjob.os then
+ return true
+ end
+ when "ARCHIVE"
+ if @pkg_name == wjob.pkg_name then return true end
end
+
+ return false
end
+ # binary/archive package should not have build-dependencies
def does_depend_on?( wjob )
return false
end
def does_depended_by?( wjob )
if @pkg_type == "BINARY" then
- for dep in wjob.pkginfo.get_build_dependencies(wjob.os)
+ for dep in wjob.get_build_dependencies(wjob.os)
# dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == @pkg_name and
end
end
else
- for dep in wjob.pkginfo.get_source_dependencies(wjob.os,@host_os)
+ for dep in wjob.get_source_dependencies(wjob.os,@host_os)
if dep.package_name == @pkg_name then
return true
end
@log.info( "New Job #{@id} is started", Log::LV_USER)
# clean build
- if not check_reverse_build() then
+ if not ReverseBuildChecker.check( self, true ).empty? then
+ @status = "ERROR"
@log.error( "Reverse-build-check failed!" )
- return false
+ return
+ end
+
+ # if this package has compatible OS, check
+ if @pkg_type == "BINARY" and
+ @pkginfo.packages[0].os_list.count > 1 then
+
+ pkg = @pkginfo.packages[0]
+ for os in pkg.os_list
+ if @os == os then next end
+
+ # skip when the os does not exist in project's supported os list
+ if not @project.nil? and not @project.os_list.include? os then next end
+
+ # skip when there is higher version of the package
+ ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version")
+ if not ver_svr.nil? and
+ Version.new(@pkg_version) <= Version.new(ver_svr) then next end
+
+ # make new package file for compatible OS
+ newfile = "#{@pkg_name}_#{@pkg_version}_#{os}.zip"
+ @log.info( "Copying #{@filename} to #{newfile}" )
+ FileUtils.cp(@local_path,"#{@source_path}/#{newfile}")
+
+ # reverse check
+ if not ReverseBuildChecker.check( self, true, os ) then
+ @status = "ERROR"
+ @log.error( "Reverse-build-check failed!" )
+ return
+ end
+ end
end
# upload
# build projects that dependes on me
# can ignore some projects
- def check_reverse_build()
+ def check_reverse_build( target_os )
@log.info( "Checking reverse build dependency ...", Log::LV_USER)
# get reverse-dependent projects
rev_pkgs = []
if @pkg_type == "BINARY" then
- rev_pkgs += @pkgsvr_client.get_reverse_build_dependent_packages(@pkg_name, @os)
+ rev_pkgs += @pkgsvr_client.get_reverse_build_dependent_packages(@pkg_name, target_os)
else
rev_pkgs += @pkgsvr_client.get_reverse_source_dependent_packages(@pkg_name)
end
rev_projects = @server.prjmgr.get_projects_from_pkgs(rev_pkgs)
- # build rev-dep project as sub-job
- for prj in rev_projects
+ # create reverse build job
+ rev_build_jobs = []
+ for p in rev_projects
+ prj = p[0]
+ os = p[1]
+ version = p[2]
+
if prj.type != "GIT" then next end
- for os in prj.os_list
- # check version
- version = nil
- for pkg in rev_pkgs
- if prj.include_package?(pkg.package_name, pkg.version, os) then
- version = pkg.version
- break
- end
- end
- if version.nil? then next end
- # create sub jobs for checking
- new_job = prj.create_new_job_from_version(os, version)
- new_job.set_rev_build_check_job(self)
- @log.info( " * Checking reverse-build ... #{prj.name}(#{new_job.id})", Log::LV_USER)
- result = new_job.init()
+ # create sub jobs for checking
+ new_job = prj.create_new_job_from_version(os, version)
+ new_job.set_rev_build_check_job(self)
+
+ rev_build_jobs.push new_job
+ end
+
+ # reverse build
+ if rev_build_jobs.count > 0 then
+ rev_prjs_txt = rev_build_jobs.map {|j| "#{j.get_project().name}(#{j.os})"}.join(", ")
+ @log.info( " * Will check reverse-build for next projects: #{rev_prjs_txt}", Log::LV_USER)
+ end
+ for new_job in rev_build_jobs
+ @log.info( " * Checking reverse-build ... #{new_job.get_project().name}(#{new_job.id})", Log::LV_USER)
+ # job init
+ result = new_job.init()
+ # if init is succeeded!, try to execute
+ if result then
+ # check available server
rserver = @server.get_available_server( new_job )
if rserver != nil and rserver != @server then
new_job.set_remote_job( rserver )
end
- if result then
- new_job.execute(true)
- if new_job.status == "ERROR" then result = false end
- end
+ # execute
+ new_job.execute(true)
+ if new_job.status == "ERROR" then result = false end
+ end
- # check result
- if not result then
- return false
- end
+ # check result
+ if not result then
+ return false
end
end
@log.info( "Uploading ...", Log::LV_USER)
# get package path list
- binpkg_path_list = [ "#{@source_path}/#{@filename}" ]
+ if @pkg_type == "ARCHIVE" then
+ binpkg_path_list = Dir.glob("#{@source_path}/#{@pkg_name}")
+ else
+ binpkg_path_list = Dir.glob("#{@source_path}/*_*_*.zip")
+ end
# upload
u_client = Client.new( @server.pkgserver_url, nil, @log )
- u_client.update
snapshot = u_client.upload( @server.pkgserver_addr, @server.pkgserver_port, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd, binpkg_path_list)
if snapshot.nil? then
=begin
-
+
RemoteBuildJob.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
#@status = "DISCONNECTED"
client = BuildCommClient.create( @ip, @port )
if client.nil? then return end
- if client.send("QUERY,SYSTEM") then
+ if client.send("QUERY|SYSTEM") then
result = client.read_lines do |l|
tok = l.split(",").map { |x| x.strip }
@host_os = tok[0]
@waiting_jobs = []
client = BuildCommClient.create( @ip, @port )
if client.nil? then return end
- if client.send("QUERY,JOB") then
+ if client.send("QUERY|JOB") then
result = client.read_lines do |l|
tok = l.split(",").map { |x| x.strip }
end
# send build request
+ @log.info( "Sending build request to remote server")
result, result_files = send_build_request(project_name, project_passwd, os, clean,
rev_build_check_job, srcinfo, local_pkgs)
if not result then
# receive binary package
for file_name in result_files
- @log.info( "Receiving file... : #{file_name}", Log::LV_USER )
+ @log.info( "Receiving file from remote server : #{file_name}", Log::LV_USER )
result = receive_binary_package( "#{source_path}/#{file_name}" )
if not result then
@log.error( "File transfering failed!", Log::LV_USER )
# before remote package
def send_binary_package(file_path)
# create client
- client = BuildCommClient.create( @addr, @port )
+ client = BuildCommClient.create( @addr, @port, @log )
if client.nil? then
@log.error( "Creating communication client failed!", Log::LV_USER)
return false
# upload file
result = true
file_name = file_path.split("/")[-1]
- if client.send("UPLOAD,#{file_name}") then
+ if client.send("UPLOAD") then
result=client.send_file( @ftp_addr, @ftp_port, @ftp_username, @ftp_passwd, file_path )
if not result then
@log.error( "File uploading failed...#{file_name}", Log::LV_USER)
def send_build_request( project_name, project_passwd, os, clean, rev_build_job, commit,local_pkgs)
result_files = []
- client = BuildCommClient.create( @addr, @port )
+ client = BuildCommClient.create( @addr, @port, @log )
if client.nil? then
@log.error( "Creating communication client failed!", Log::LV_USER)
return false, result_files
local_pkg_names = local_pkgs.map { |path| File.basename(path) }
# send
- # BUILD,GIT,project_name,os,pkgsvr_url,async,internal,commit,pkgs
+ # BUILD|GIT|project_name|os|pkgsvr_url|async|internal|commit|pkgs
result = true
commit=commit.nil? ? "":commit
pkg_list=local_pkg_names.join(",")
rev=rev_build_job ? "YES":"NO"
- if client.send("BUILD,GIT,#{project_name},#{project_passwd},#{os},NO,YES,#{rev},#{commit},#{pkg_list}") then
+ if client.send("BUILD|GIT|#{project_name}|#{project_passwd}|#{os}|NO|YES|#{rev}|#{commit}|#{pkg_list}") then
result = client.read_lines do |l|
# check build result
if l.include? "Job is stopped by ERROR" then
# receive binary package of remote server
def receive_binary_package(file_path)
# create client
- client = BuildCommClient.create( @addr, @port )
+ client = BuildCommClient.create( @addr, @port, @log )
if client.nil? then
@log.error( "Creating communication client failed!", Log::LV_USER)
return false
# download file
result = true
file_name = file_path.split("/")[-1]
- if client.send("DOWNLOAD,#{file_name}") then
+ if client.send("DOWNLOAD|#{file_name}") then
result=client.receive_file( @ftp_addr, @ftp_port, @ftp_username, @ftp_passwd, file_path )
if not result then
@log.error( "File downloading failed...#{file_name}", Log::LV_USER)
--- /dev/null
+=begin
+
+ ReverseBuildChecker.rb
+
+Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
+
+Contact:
+Taejun Ha <taejun.ha@samsung.com>
+Jiil Hyoun <jiil.hyoun@samsung.com>
+Donghyuk Yang <donghyuk.yang@samsung.com>
+DongHee Yang <donghee.yang@samsung.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Contributors:
+- S-Core Co., Ltd
+=end
+
+require "log"
+$LOAD_PATH.unshift File.dirname(__FILE__)
+$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common"
+$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server"
+require "utils.rb"
+require "client.rb"
+require "BuildServer.rb"
+require "JobLog.rb"
+require "PackageManifest.rb"
+require "BuildJob.rb"
+require "RegisterPackageJob.rb"
+
+class ReverseBuildChecker
+
+ # check
+ def ReverseBuildChecker.check( job, exit_on_error, override_os = nil )
+ log = job.log
+ job_os = (override_os.nil?) ? job.os : override_os
+
+ # start
+ log.info( "Checking reverse build dependency ...", Log::LV_USER)
+
+ # get target packages that be checked
+ bin_pkg_name_list = []
+ src_pkg_name_list = []
+ case job.type
+ when "BUILD"
+ for pkg in job.pkginfo.get_target_packages(job_os)
+ bin_pkg_name_list.push pkg.package_name
+ end
+ when "REGISTER"
+ if job.pkg_type == "BINARY" then
+ bin_pkg_name_list.push job.pkg_name
+ else
+ src_pkg_name_list.push job.pkg_name
+ end
+ end
+
+ # get reverse projects from build dependency
+ rev_pkgs = []
+ for pkg_name in bin_pkg_name_list
+ rev_pkgs += job.pkgsvr_client.get_reverse_build_dependent_packages(pkg_name, job_os)
+ end
+ for pkg_name in src_pkg_name_list
+ rev_pkgs += job.pkgsvr_client.get_reverse_source_dependent_packages(pkg_name)
+ end
+ rev_pkgs.uniq!
+ rev_projects = job.server.prjmgr.get_projects_from_pkgs(rev_pkgs)
+
+ # create reverse build job
+ rev_build_jobs = []
+ for p in rev_projects
+ rev_prj = p[0]
+ rev_os = p[1]
+ rev_ver = p[2]
+
+ # if not "GIT" project, ignore it
+ if rev_prj.type != "GIT" then next end
+
+ # if job on resolve process, its unresolved project
+ #of pending ancestor must be excluded.
+ if not job.pending_ancestor.nil? then
+ found = false
+ job.pending_ancestor.rev_fail_projects.each { |fp|
+ f_prj = fp[0]
+ f_os = fp[1]
+
+ if rev_prj == f_prj and rev_os == os then
+ found = true
+ break
+ end
+ }
+ if found then next end
+ end
+
+ # if this is sub job, all other sibling job must be excluded
+ if job.is_sub_job? then
+ for sub_job in job.get_parent_job().get_sub_jobs
+ sub_prj = sub_job.get_project()
+ sub_os = sub_job.os
+ if rev_prj == sub_prj and rev_os == sub_os then
+ found = true
+ break
+ end
+ end
+ if found then next end
+ end
+
+ # create job
+ new_job = rev_prj.create_new_job_from_version( rev_os, rev_ver )
+ new_job.set_rev_build_check_job( job )
+
+ rev_build_jobs.push new_job
+ end
+
+ # reverse build
+ if rev_build_jobs.count > 0 then
+ rev_prjs_msg = rev_build_jobs.map {|j| "#{j.get_project().name}(#{j.os})"}.join(", ")
+ log.info( " * Will check reverse-build for projects: #{rev_prjs_msg}", Log::LV_USER)
+ end
+
+ # for all reverse job
+ for new_job in rev_build_jobs
+ # add to job manager
+ job.server.jobmgr.add_reverse_build_job(new_job)
+ log.info( " * Added new job for reverse-build ... #{rev_prj.name}(#{rev_os}) (#{new_job.id})", Log::LV_USER)
+ end
+
+ # wait for job finish
+ rev_build_finished = false
+ success_list = []
+ failure_list = []
+ cancel_other_jobs = false
+ while not rev_build_finished
+ rev_build_finished = true
+ for rev_job in rev_build_jobs
+ rev_prj = rev_job.get_project()
+ rev_os = rev_job.os
+
+ case rev_job.status
+ when "ERROR", "CANCELED"
+ # add fail list
+ if not is_project_included?(failure_list, rev_prj, rev_os) then
+ log.info( " * Reverse-build FAIL ... #{rev_prj.name}(#{rev_os}) (#{rev_job.id})", Log::LV_USER)
+ failure_list.push [ rev_prj, rev_os ]
+ end
+
+ if exit_on_error then
+ # cancel all other jobs
+ cancel_other_jobs = true
+
+ # exit
+ break
+ end
+ when "FINISHED"
+ # add success list
+ if not success_list.include? rev_job then
+ log.info( " * Reverse-build OK ... #{rev_prj.name}(#{rev_os}) (#{rev_job.id})", Log::LV_USER)
+ success_list.push rev_job
+ end
+ else
+ if exit_on_error and cancel_other_jobs then
+ rev_job.cancel_state = "INIT"
+ else
+ rev_build_finished = false
+ end
+ end
+ end
+
+ sleep 1
+ end
+
+ return failure_list
+ end
+
+
+ private
+ def self.is_project_included?( prj_list, prj, os )
+ for p in prj_list
+ if p[0] == prj and p[1] == os then return true end
+ end
+
+ return false
+ end
+end
begin
ftp_url = Utils.generate_ftp_url(@parent_server.ftp_addr, @parent_server.ftp_port,
@parent_server.ftp_username, @parent_server.ftp_passwd)
- @comm_server = BuildCommServer.new(@parent_server.port, @log, ftp_url)
+ @comm_server = BuildCommServer.create(@parent_server.port, @log, ftp_url)
rescue
@log.info "Server creation failed"
puts "Server creation failed"
# parse request
cmd = ""
- if req_line.split(",").count > 0 then
- cmd = req_line.split(",")[0].strip
+ if req_line.split("|").count > 0 then
+ cmd = req_line.split("|")[0].strip
end
case cmd
# "BUILD"
def handle_cmd_build( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 3 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
case tok[1]
- # BUILD,GIT,proj,os,url,async,internal
+ # BUILD|GIT|proj|os|url|async|internal
# "internal" means that this job requested by other server.
when "GIT"
# parse
- project_name=tok[2]
+ project_name_list=tok[2].split(",")
passwd=tok[3]
- os=tok[4]
+ os_list=tok[4].split(",")
async = (not tok[5].nil? and tok[5]=="YES" ? true:false)
is_internal = (not tok[6].nil? and tok[6]=="YES" ? true:false)
- # check project
- prj = check_project_exist(project_name, req)
- if prj.nil? then
- raise "Requested project does not exist!"
- end
+ # for all project
+ new_job_list = []
+ for project_name in project_name_list
+ # check project
+ prj = check_project_exist(project_name, req)
+ if prj.nil? then
+ raise "Requested project does not exist!"
+ end
- # check passwd
- if not check_project_password(prj, passwd, req) then
- raise "Project's password is not matched!!"
- end
+ # check passwd
+ if not check_project_password(prj, passwd, req) then
+ raise "Project's password is not matched!!"
+ end
+
+ # check os
+ os_list = check_supported_os( os_list , req )
+ if os_list.nil? or os_list.empty? then
+ raise "Unsupported OS name is used!"
+ end
- # create new job
- @log.info "Received a request for building this project : #{tok[2]}"
- new_job_list = create_new_job( project_name, os, req )
- if new_job_list.empty? then
- raise "Creating build job failed : #{project_name}, #{os}"
+ # create new job
+ for os in os_list
+ new_job = create_new_job( project_name, os, req )
+ if new_job.nil? then
+ @log.info "Building project(#{project_name}, #{os}) is ignored."
+ next
+ end
+ new_job_list.push new_job
+ @log.info "Received a request for building this project : #{project_name}, #{os}"
+ end
end
- for new_job in new_job_list
- # set internal flags
+ # if multi build job,
+ if new_job_list.count == 1 then
+ # for all internal job, set reverse check flagd, commit-id, external_package file
if is_internal then
# set job type
new_job.set_internal_job()
pkg_file = tok[i]
end
end
+
+ # create logger and set
+ new_job = new_job_list[0]
+ elsif new_job_list.count > 1 then
+ new_job = @parent_server.prjmgr.create_new_multi_build_job( new_job_list )
+
+ else
+ BuildCommServer.send_begin(req)
+ req.puts "Error: There is no valid job to build!"
+ BuildCommServer.send_end(req)
+ raise "No valid jobs!"
end
-
+
# create logger and set
- if new_job_list.count == 1 then
- logger = JobLog.new( new_job_list[0], req )
- if not async then new_job.set_logger(logger) end
- else
- logger = JobLog.new( nil, req )
- end
+ logger = JobLog.new( new_job, req )
+ if not async then new_job.set_logger(logger) end
logger.init
# notify that job has been received
- for job in new_job_list
- logger.info( "Added new job \"#{job.id}\" for #{job.os}!", Log::LV_USER)
- if not @parent_server.job_log_url.empty? then
- logger.info( " * Log URL : #{@parent_server.job_log_url}/#{job.id}/log", Log::LV_USER)
- end
+ logger.info( "Added new job \"#{new_job.id}\" for #{new_job.os}!", Log::LV_USER)
+ if not @parent_server.job_log_url.empty? then
+ logger.info( " * Log URL : #{@parent_server.job_log_url}/#{new_job.id}/log", Log::LV_USER)
end
# if asynchronouse, quit connection
- if os == "all" or async then
+ if async then
logger.info( "Above job(s) will be processed asynchronously!", Log::LV_USER)
logger.close
end
- # add
- for new_job in new_job_list
- @parent_server.jobmgr.add_job( new_job )
- end
+ # add to job queue
+ @parent_server.jobmgr.add_job( new_job )
else
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
# "RESOLVE"
def handle_cmd_resolve( line ,req)
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 3 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
case tok[1]
- # RESOLVE,GIT,repos,commit,os,url
+ # RESOLVE|GIT|repos|commit|os|url
when "GIT"
# parse
project_name=tok[2]
raise "Project's password is not matched!!"
end
+ # check os
+ os_list = check_supported_os( os , req )
+ if os_list.nil? or os_list.empty? then
+ raise "Unsupported OS name is used!"
+ end
+ os = os_list[0]
+
# create new job
- @log.info "Received a request for resolving this project : #{tok[2]}"
- new_job_list = create_new_job( project_name, os, req )
- if new_job_list.empty? then
+ new_job = create_new_job( project_name, os, req )
+ if new_job.nil? then
raise "Creating build job failed : #{project_name}, #{os}"
end
+ @log.info "Received a request for resolving this project : #{project_name}, #{os}"
# resolve
- for new_job in new_job_list
- new_job.set_resolve_flag()
- end
+ new_job.set_resolve_flag()
# create logger and set
- if new_job_list.count == 1 then
- logger = JobLog.new( new_job_list[0], req )
- if not async then new_job.set_logger(logger) end
- else
- logger = JobLog.new( nil, req )
- end
+ logger = JobLog.new( new_job, req )
+ if not async then new_job.set_logger(logger) end
logger.init
# notify that job has been received
- for job in new_job_list
- logger.info( "Added new job \"#{job.id}\" for #{job.os}!", Log::LV_USER)
- if not @parent_server.job_log_url.empty? then
- logger.info( " * Log URL : #{@parent_server.job_log_url}/#{job.id}/log", Log::LV_USER)
- end
+ logger.info( "Added new job \"#{new_job.id}\" for #{new_job.os}!", Log::LV_USER)
+ if not @parent_server.job_log_url.empty? then
+ logger.info( " * Log URL : #{@parent_server.job_log_url}/#{new_job.id}/log", Log::LV_USER)
end
# if asynchronouse, quit connection
- if os == "all" or async then
+ if async then
logger.info( "Above job(s) will be processed asynchronously!", Log::LV_USER)
logger.close
end
- for new_job in new_job_list
- @parent_server.jobmgr.add_job( new_job )
- end
+ @parent_server.jobmgr.add_job( new_job )
else
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
# "QUERY"
def handle_cmd_query( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 2 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
BuildCommServer.send(req,"#{@parent_server.ftp_addr},#{@parent_server.ftp_username},#{@parent_server.ftp_passwd}")
BuildCommServer.send_end(req)
BuildCommServer.disconnect(req)
-
+
# QUERY,JOB
when "JOB"
#puts "Received QUERY JOB"
+ # gather all jobs to show
+ job_list = @parent_server.jobmgr.jobs + @parent_server.jobmgr.internal_jobs
+
+ # send the status
BuildCommServer.send_begin(req)
- for job in @parent_server.jobmgr.get_working_jobs
- BuildCommServer.send(req,"WORKING,#{job.id},#{job.pkginfo.packages[0].source}")
- end
- for job in @parent_server.jobmgr.get_waiting_jobs
- BuildCommServer.send(req,"WAITING,#{job.id},#{job.pkginfo.packages[0].source}")
- end
- for job in @parent_server.jobmgr.get_remote_jobs
- BuildCommServer.send(req,"REMOTE ,#{job.id},#{job.pkginfo.packages[0].source}")
+ for job in job_list
+ status = job.status
+ if status == "REMOTE_WORKING" then status = "REMOTE" end
+ if job.cancel_state != "NONE" then status = "CANCEL" end
+
+ case job.type
+ when "BUILD"
+ if status == "PENDING" then
+ if job.pending_ancestor.nil? then
+ ids = "/"
+ else
+ ids = job.pending_ancestor.id
+ end
+ BuildCommServer.send(req,"#{status}:#{ids},#{job.id},#{job.get_project().name},#{job.os}")
+ else
+ BuildCommServer.send(req,"#{status},#{job.id},#{job.get_project().name},#{job.os}")
+ end
+ when "REGISTER"
+ if job.pkg_type == "BINARY" and not job.get_project().nil? then
+ BuildCommServer.send(req,"#{status},#{job.id},#{job.get_project().name},#{job.os}")
+ else
+ BuildCommServer.send(req,"#{status},#{job.id},#{job.pkg_name}")
+ end
+ when "MULTIBUILD"
+ BuildCommServer.send(req,"#{status},#{job.id},MULTI-BUILD : #{job.sub_jobs.map{|x| x.id}.join(" ")}")
+ end
end
+
BuildCommServer.send_end(req)
BuildCommServer.disconnect(req)
BuildCommServer.send_end(req)
BuildCommServer.disconnect(req)
+ when "OS"
+ BuildCommServer.send_begin(req)
+ # print GIT projects
+ for os_name in @parent_server.supported_os_list
+ BuildCommServer.send(req,"#{os_name}")
+ end
+ BuildCommServer.send_end(req)
+ BuildCommServer.disconnect(req)
+
else
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
# "CANCEL"
def handle_cmd_cancel( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 2 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
BuildCommServer.send(req, "There is no job \"#{tok[1]}\"")
raise "There is no job \"#{tok[1]}\""
else
+ if cancel_job.cancel_state == "NONE" then
# check passwd
- if not check_project_password( cancel_job.get_project, tok[2], req) then
- raise "Project's password is not matched!!"
- end
+ if cancel_job.type == "MULTIBUILD" then
+ cancel_job.sub_jobs.select{|x| x.cancel_state == "NONE" }.each do |sub|
+ if not check_project_password( sub.get_project, tok[2], req) then
+ BuildCommServer.send(req, "Project's password is not matched!!")
+ raise "Project's password is not matched!!"
+ end
+ end
- BuildCommServer.send(req, "\"#{cancel_job.id} #{cancel_job.pkginfo.packages[0].source} #{cancel_job.status}\"")
- cancel_job.cancel
+ BuildCommServer.send(req, "\"#{cancel_job.id}, #{cancel_job.sub_jobs.map{|x| x.id}.join(", ")}\" will be canceled")
+ cancel_job.cancel_state = "INIT"
+ else
+ if not check_project_password( cancel_job.get_project, tok[2], req) then
+ BuildCommServer.send(req, "Project's password is not matched!!")
+ raise "Project's password is not matched!!"
+ else
+ BuildCommServer.send(req, "\"#{cancel_job.id}\" will be canceled")
+ cancel_job.cancel_state = "INIT"
+ end
+ end
+ else
+ BuildCommServer.send(req, "\"#{cancel_job.id}\" is already canceled")
+ end
end
BuildCommServer.send_end(req)
BuildCommServer.disconnect(req)
# "STOP"
def handle_cmd_stop( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 2 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
# "FULLBUILD"
def handle_cmd_fullbuild( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 2 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
+
+ server_passwd = tok[1]
- # create full build job
- new_job = @parent_server.prjmgr.create_new_fullbuild_job()
- logger = JobLog.new( new_job, req )
- new_job.set_logger(logger)
- logger.init
+ # check server password
+ if server_passwd != @parent_server.password then
+ BuildCommServer.send_begin(req)
+ BuildCommServer.send(req,"Password mismatched!")
+ BuildCommServer.send_end(req)
+ BuildCommServer.disconnect(req)
+ else
+ # create full build job
+ new_job = @parent_server.prjmgr.create_new_full_build_job()
- # add to job
- @parent_server.jobmgr.add_job( new_job )
+ # set logger
+ logger = JobLog.new( new_job, req )
+ new_job.set_logger(logger)
+ logger.init
+
+ # add to job
+ @parent_server.jobmgr.add_job( new_job )
+ end
end
# "REGISTER"
def handle_cmd_register( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 4 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
type = tok[1]
case type
- # REGISTER,BINARY-LOCAL,local_path
- # REGISTER,SOURCE-LOCAL,local_path
+ # REGISTER|BINARY-LOCAL|local_path
+ # REGISTER|SOURCE-LOCAL|local_path
when "BINARY-LOCAL", "SOURCE-LOCAL"
file_path = tok[2]
new_job = @parent_server.jobmgr.create_new_register_job( file_path )
# add
@parent_server.jobmgr.add_job( new_job )
- # REGISTER,BINARY,filename,passwd
+ # REGISTER|BINARY|filename|passwd
when "BINARY"
# parse
filename = tok[2]
# "UPLOAD"
def handle_cmd_upload( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 1 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
- file_name = tok[1]
- @log.info "Received a request for upload file : #{file_name}"
BuildCommServer.send_begin(req)
begin
@comm_server.receive_file(req, @parent_server.incoming_path)
# "DOWNLOAD"
def handle_cmd_download( line, req )
- tok = line.split(",").map { |x| x.strip }
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 1 then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
private
- def create_new_job( project_name, os, req )
- result = []
+ def check_supported_os(os_list, req)
- if os == "all" then
- jobs = @parent_server.prjmgr.create_new_jobs_for_all_os(project_name)
- result += jobs
- else
- new_job = @parent_server.prjmgr.create_new_job(project_name, os)
- if not new_job.nil? then
- result.push new_job
+ # check if supported os list contain at least one OS
+ if @parent_server.supported_os_list.empty? then
+ BuildCommServer.send_begin(req)
+ req.puts "Error: There is no OS supported by the build server."
+ BuildCommServer.send_end(req)
+ return nil
+ end
+
+ result = []
+ for os in os_list
+ if os == "all" or os == "*" then
+ result = result + @parent_server.supported_os_list
+
+ elsif os == "default" then
+ os = @parent_server.supported_os_list[0]
+ result.push os
+ @log.info "The default OS \"#{os}\" is used as target OS"
+
+ elsif os.include? "*" then
+ reg_os = os.gsub("*","[a-zA-Z0-9.]*")
+ for svr_os in @parent_server.supported_os_list
+ matches = svr_os.match("#{reg_os}")
+ if not matches.nil? and matches.size == 1 and
+ matches[0] == svr_os then
+ result.push svr_os
+ end
+ end
+ else
+ if not @parent_server.supported_os_list.include?(os) then
+ BuildCommServer.send_begin(req)
+ req.puts "Error: Unsupported OS name \"#{os}\" is used!"
+ req.puts "Error: Check the following supported OS list. "
+ for os_name in @parent_server.supported_os_list
+ req.puts " * #{os_name}"
+ end
+ BuildCommServer.send_end(req)
+ return nil
+ else
+ result.push os
+ end
end
end
- if result.empty? then
+ if result.empty? then
BuildCommServer.send_begin(req)
- req.puts "Error: Creating job failed: #{project_name}, #{os}"
+ req.puts "Error: There is no OS supported by the build server."
BuildCommServer.send_end(req)
+ return nil
end
+ result.uniq!
+
return result
end
+ private
+ def create_new_job( project_name, os, req )
+
+ return @parent_server.prjmgr.create_new_job(project_name, os)
+ end
+
+
def create_new_upload_job( project_name, filename, req)
new_job = @parent_server.prjmgr.get_project(project_name).create_new_job(filename)
# clean
def clean( src_path )
- build_root_dir = @buildroot_dir
-
- # create pkginfo
- pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest")
-
- # make clean
- for pkg in pkginfo.packages
- for os1 in ["linux","windows","darwin"]
- if File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{os1}" then
- FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{os1}"
- end
- end
- end
-
- # execute
- return execute_build_command("clean", src_path, build_root_dir, @host_os )
+ return clean_project_directory( src_path, nil )
end
# build
- def build( src_path, os, clean, local_pkgs, use_cache )
+ def build( src_path, os, clean, local_pkgs, is_local_build )
# create pkginfo
- pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest")
-
- # check there are packages which can be built
- if not pkginfo.package_exist?(os, Utils::HOST_OS ) then
- @log.error( "There are no packages which can be built on this host OS: #{Utils::HOST_OS}")
- @log.error( " * Check \"Build-host-os\" in pkginfo.manifest" )
+ if not File.exist? "#{src_path}/package/pkginfo.manifest" then
+ @log.error( "The \"package/pkginfo.manifest\" file does not exist!", Log::LV_USER)
+ return false
+ end
+
+ # read pkginfo
+ begin
+ pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest")
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
return false
end
+ # set default build os
+ build_host_os = @host_os
+
+ # check there are packages which can be built
+ if not pkginfo.package_exist?(os, build_host_os ) then
+ if is_local_build and File.exist? "#{src_path}/package/pkginfo.manifest.local" then
+ begin
+ pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest.local")
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ return false
+ end
+ if not pkginfo.package_exist?(os, build_host_os ) then
+
+ @log.error( "This project does not support a build on this host OS: #{build_host_os}")
+ @log.error( " * Check \"Build-host-os\" in pkginfo.manifest or pkginfo.manifest.local" )
+
+ return false
+ end
+ else
+ @log.error( "This project does not support a build on this host OS: #{build_host_os}")
+ @log.error( " * Check \"Build-host-os\" in pkginfo.manifest" )
+
+ return false
+ end
+ end
+
# set build root
build_root_dir = @buildroot_dir
if not File.exist? build_root_dir then
cl = Client.new(@pkgserver_url, build_root_dir, @log)
if clean then
cl.clean(true)
- cl.update
end
# install build dependencies
- package_overwrite_list = []
@log.info( "Installing dependent packages...", Log::LV_USER)
pkginfo.get_build_dependencies( os ).each do |dep|
if dep.target_os_list.count != 0 then
# get local dependent package
pkgexp = Regexp.new("\/#{dep.package_name}_.*_#{dep_target_os}\.zip$")
- package_overwrite_list += local_pkgs.select{|l| l =~ pkgexp}
+ local_dep_pkgs = local_pkgs.select{|l| l =~ pkgexp}
- # install pkgs
- if not cl.install(dep.package_name, dep_target_os, true, false) then
- @log.error( "Installing \"#{dep.package_name}\" failed!", Log::LV_USER)
- return false
+ # install package from remote package server
+ if local_dep_pkgs.empty? then
+ if not cl.install(dep.package_name, dep_target_os, true, false) then
+ @log.error( "Installing \"#{dep.package_name}\" failed!", Log::LV_USER)
+ return false
+ end
+ else
+ for l in local_dep_pkgs
+ @log.info( "Installing local pacakge...#{l}", Log::LV_USER)
+ cl.install_local_pkg(l,false)
+ end
end
end
- # overwrite local dependent packages
- package_overwrite_list.each do |l|
- @log.info( "Overwriting...#{l}", Log::LV_USER)
- cl.install_local_pkg(l,false)
- end
-
@log.info( "Downloading dependent source packages...", Log::LV_USER)
- pkginfo.get_source_dependencies(os,@host_os).each do |dep|
- @log.info( " * #{dep.package_name}", Log::LV_USER)
-
- if cl.download_dep_source(dep.package_name).nil? then
- @log.error( "Downloading \"#{dep.package_name}\" failed!", Log::LV_USER)
+ src_archive_list = []
+ pkginfo.get_source_dependencies(os,build_host_os).each do |dep|
+ src_archive_list.push dep.package_name
+ end
+ src_archive_list.uniq!
+ for archive_name in src_archive_list
+ @log.info( " * #{archive_name}", Log::LV_USER)
+ if cl.download_dep_source(archive_name).nil? then
+ @log.error( "Downloading \"#{archive_name}\" failed!", Log::LV_USER)
return false
end
- end
+ end
# make clean
@log.info( "Make clean...", Log::LV_USER)
- for pkg in pkginfo.packages
- for os1 in ["linux","windows","darwin"]
- if File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{os1}" then
- FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{os1}"
- end
- end
- end
-
- # convert path if windows
- if Utils::HOST_OS == "windows" then
- build_root_dir = Utils.get_unix_path( build_root_dir )
- end
-
- if not execute_build_command("clean", src_path, build_root_dir, os) then
+ if not clean_project_directory( src_path, os ) then
return false
end
- # execute build script
- #if use_cache then
- # @log.info( "Make build from cache...", Log::LV_USER)
- # if not execute_build_command("build_cache", src_path, build_root_dir, os) then
- # @log.warn( "Build from cache failed, \"build\" target will used instead...", Log::LV_USER)
- # if not execute_build_command("build", src_path, build_root_dir, os) then
- # return false
- # end
- # end
- #else
- # @log.info( "Make build...", Log::LV_USER)
- # if not execute_build_command("build", src_path, build_root_dir, os) then
- # return false
- # end
- #
- # # saving build cache
- # if not execute_build_command("save_cache", src_path, build_root_dir, os) then
- # @log.warn( "Saving cache failed...", Log::LV_USER)
- # else
- # @log.info( "Saved build information to cache...", Log::LV_USER)
- # end
- #end
+ @log.info( "Make build...", Log::LV_USER)
if not execute_build_command("build", src_path, build_root_dir, os) then
return false
end
-
# execute install script
@log.info( "Make install...", Log::LV_USER)
if not execute_build_command("install", src_path, build_root_dir, os) then
@log.error( "Creating packages failed!", Log::LV_USER)
return false
end
-
+
return true
end
# execute build command
def execute_build_command( target, src_path, build_root_dir, os )
-
- pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest")
- env_def =
+ # get category
+ os_category = Utils.get_os_category( os )
+
+ env_def =
"BUILD_TARGET_OS=#{os} \
+ TARGET_OS=#{os} \
+ TARGET_OS_CATEGORY=#{os_category} \
SRCDIR=\"#{src_path}\" \
- PKG_CACHE_DIR=\"#{@cache_dir}/#{pkginfo.packages[0].source}/#{os}\" \
ROOTDIR=\"#{build_root_dir}\" "
+ # check script file
+ script_file = "#{src_path}/package/build.#{@host_os}"
+ if not File.exist? script_file then
+ if Utils.is_linux_like_os( @host_os ) then
+ script_file = "#{src_path}/package/build.linux"
+ elsif Utils.is_windows_like_os( @host_os ) then
+ script_file = "#{src_path}/package/build.windows"
+ elsif Utils.is_macos_like_os( @host_os ) then
+ script_file = "#{src_path}/package/build.macos"
+ end
+ # check old script file
+ if not File.exist? script_file then
+ @log.error( "The script file not found!: \"package/build.#{@host_os}\"", Log::LV_USER)
+ return false
+ end
+ end
+
# read build script
# this will ignore last lines without block
contents = []
- File.open( "#{src_path}/package/build.#{@host_os}", "r" ) do |f|
+ File.open( script_file, "r" ) do |f|
lines = []
f.each_line do |l|
lines.push l
when "install"
f.puts " "
else
- @log.warn( "Wron build-target is used: \"#{target}\"", Log::LV_USER)
+ @log.warn( "Wrong build-target is used: \"#{target}\"", Log::LV_USER)
return false
end
f.puts "#{target}"
end
- # write pkginfo.manifest and install/remove script
+ # write pkginfo.manifest
def write_pkginfo_files(pkginfo,os,src_path)
+ # get category
+ os_category = Utils.get_os_category( os )
+
for pkg in pkginfo.packages
# skip if not support the target os
- if not pkg.os.include? os
+ if not pkg.os_list.include? os
next
end
# write manifest file
install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}"
-
+
# if there is no intall directory, error
if not File.exist? install_dir then
- @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER)
- @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER)
- return false
+ install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}"
+
+ if not File.exist? install_dir then
+ @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER)
+ @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER)
+ return false
+ end
end
-
- # write pkginfo.manifest
+
+ # write pkginfo.manifest
File.open("#{install_dir}/pkginfo.manifest", "w") do |f|
- pkg.print_to_file_with_os( f, os )
+ pkg.print_to_file( f )
end
end
tar = nil
+ # get category
+ os_category = Utils.get_os_category( os )
+
if File.exist? "#{src_path}/package/#{pkg.package_name}.install.#{os}"
src = "#{src_path}/package/#{pkg.package_name}.install.#{os}"
else
- src = nil
+ if File.exist? "#{src_path}/package/#{pkg.package_name}.install.#{os_category}"
+ src = "#{src_path}/package/#{pkg.package_name}.install.#{os_category}"
+ else
+ src = nil
+ end
+ end
+
+ install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}"
+
+ # if there is no intall directory, error
+ if not File.exist? install_dir then
+ install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}"
+
+ if not File.exist? install_dir then
+ @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER)
+ @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER)
+ return false
+ end
end
if not src.nil? then
- if os == "linux" or os == "darwin" then
- tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/install.sh"
- elsif os == "windows" then
- tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/install.BAT"
+ if Utils.is_unix_like_os( os ) then
+ tar = "#{install_dir}/install.sh"
+ elsif Utils.is_windows_like_os( os) then
+ tar = "#{install_dir}/install.BAT"
else
puts "Unknown OS: #{os} "
return
tar = nil
+ # get category
+ os_category = Utils.get_os_category( os )
+
if File.exist? "#{src_path}/package/#{pkg.package_name}.remove.#{os}"
src = "#{src_path}/package/#{pkg.package_name}.remove.#{os}"
else
- src = nil
+ if File.exist? "#{src_path}/package/#{pkg.package_name}.remove.#{os_category}"
+ src = "#{src_path}/package/#{pkg.package_name}.remove.#{os_category}"
+ else
+ src = nil
+ end
+ end
+
+ install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}"
+
+ # if there is no intall directory, error
+ if not File.exist? install_dir then
+ install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}"
+
+ if not File.exist? install_dir then
+ @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER)
+ @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER)
+ return false
+ end
end
if not src.nil?
- if os == "linux" or os == "darwin" then
- tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/remove.sh"
- elsif os == "windows" then
- tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/remove.BAT"
+ if Utils.is_unix_like_os( os ) then
+ tar = "#{install_dir}/remove.sh"
+ elsif Utils.is_windows_like_os( os) then
+ tar = "#{install_dir}/remove.BAT"
else
puts "Unknown OS: #{os} "
return
# create package file
def make_zip(pkginfo,os,src_path)
+ # get category
+ os_category = Utils.get_os_category( os )
+
for pkg in pkginfo.packages
# skip if not support the target os
- if not pkg.os.include? os
+ if not pkg.os_list.include? os
next
end
# cd install dir
install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}"
+ if not File.exist? install_dir then
+ install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}"
+
+ if not File.exist? install_dir then
+ @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER)
+ @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER)
+ return false
+ end
+ end
# zip
@log.info( "Creating package file ... #{pkg.package_name}_#{pkg.version}_#{os}.zip", Log::LV_USER)
end
return true
end
+
+
+ # clean the temporary directory for packaged
+ def clean_project_directory(src_path, target_os = nil)
+
+ # if os is not set, use host os instead
+ if target_os.nil? then target_os = @host_os end
+
+ # convert path if windows
+ if Utils.is_windows_like_os(@host_os) then
+ build_root_dir = Utils.get_unix_path( @buildroot_dir )
+ else
+ build_root_dir = @buildroot_dir
+ end
+
+ # create pkginfo
+ begin
+ pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest")
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ return false
+ end
+
+ # get category
+ # make clean
+ for pkg in pkginfo.packages
+ os = pkg.os
+ os_category = Utils.get_os_category( os )
+
+ if File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}" then
+ FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}"
+ elsif File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" then
+ FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{os_category}"
+ end
+ end
+
+ # clean local-only package's directory
+ if File.exist? "#{src_path}/package/pkginfo.manifest.local" then
+ begin
+ pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest.local")
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ return false
+ end
+ for pkg in pkginfo.packages
+ os = pkg.os
+ os_category = Utils.get_os_category( os )
+
+ if File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}" then
+ FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}"
+ elsif File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" then
+ FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{os_category}"
+ end
+ end
+ end
+
+ # execute
+ return execute_build_command("clean", src_path, build_root_dir, target_os )
+ end
end
attr_accessor :packages
def initialize( file_path )
- @pkg_map = Parser.read_multy_pkginfo_from file_path
- @packages = @pkg_map.values
+ @packages = Parser.read_multy_pkginfo_from file_path
end
list = []
for pkg in @packages
# package that has the target os
- if not pkg.os.include?(target_os)
+ if not pkg.os_list.include?(target_os)
next
end
# package that has the target os
for dep in pkg.build_dep_list
- list.push dep
+ list.push dep
end
end
list.uniq!
list = []
for pkg in @packages
# only package that used in target os
- if not pkg.os.include?(target_os)
+ if not pkg.os_list.include?(target_os)
next
end
list = []
for pkg in @packages
# only package that used in target os
- if not pkg.os.include?(target_os)
+ if not pkg.os_list.include?(target_os)
next
end
def package_exist?(target_os, host_os)
for pkg in @packages
# only package that used in target os
- if pkg.os.include?(target_os) and
+ if pkg.os_list.include?(target_os) and
pkg.build_host_os.include?(host_os)
return true
end
def get_target_packages(target_os)
pkgs = []
for pkg in @packages
- if pkg.os.include?(target_os) then
+ if pkg.os_list.include?(target_os) then
pkgs.push pkg
end
end
else
ftp.connect(ip, port)
end
+ @@log.info "[FTP log] Connected FTP server (#{ip}:#{port})"
ftp.login(username, passwd)
ftp.binary = true
ftp.mkdir(uniqdir)
ftp.chdir(uniqdir)
ftp.put(bpath)
+ @@log.info "[FTP log] Put a file"
+ @@log.info "[FTP log] from \"#{bpath}\" to \"#{ftp_filepath}\""
+ files = ftp.list(filename)
+ if files.empty? then
+ @@log.error "[FTP log] Failed to upload file (#{filename} does not exist)"
+ return nil
+ end
ftp.quit
+ @@log.info "[FTP log] Disconnected FTP server"
rescue => e
- if not @@log.nil? then
- @@log.error "FileTransfer::putfile"
- end
- raise e
+ @@log.error "[FTP log] Exception"
+ @@log.error e.message
+ @@log.error e.backtrace.inspect
+ return nil
end
- return ftp_filepath
+ return ftp_filepath
end
def FileTransfer.getfile(ip, port, username, passwd, bpath, target)
else
ftp.connect(ip, port)
end
+ @@log.info "[FTP log] Connected FTP server (#{ip}:#{port})"
ftp.login(username, passwd)
ftp.binary = true
ftp.chdir(dirname)
ftp.get(filename, dst_file)
+ @@log.info "[FTP log] Get a file"
+ @@log.info "[FTP log] from \"#{bpath}\" to \"#{dst_file}\""
ftp.quit
+ @@log.info "[FTP log] Disconnected FTP server"
rescue => e
- @@log.error "FileTransfer::getfile"
- raise e
- end
+ @@log.error "[FTP log] Exception"
+ @@log.error e.message
+ @@log.error e.backtrace.inspect
+ return nil
+ end
+ if not File.exist? dst_file then
+ @@log.error "[FTP log] Failed to download file (#{dst_file} does not exist)"
+ return nil
+ end
return bpath
end
else
ftp.connect(ip, port)
end
+ @@log.info "[FTP log] Connected FTP server (#{ip}:#{port})"
ftp.login(username, passwd)
old_dir = ftp.pwd
ftp.chdir(dirname)
end
ftp.chdir(old_dir)
ftp.rmdir(dirname)
+ @@log.info "[FTP log] Clean dir (#{dirname})"
ftp.quit
+ @@log.info "[FTP log] Disconnected FTP server"
rescue => e
- @@log.error "FileTransfer::cleandir"
- raise e
+ @@log.error "[FTP log] Exception"
+ @@log.error e.message
+ @@log.error e.backtrace.inspect
+ return nil
end
return true
=begin
-
- package.rb
+
+ package.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
=end
class Package
- attr_accessor :package_name, :label, :version, :os, :build_host_os, :maintainer, :attribute, :install_dep_list, :build_dep_list, :source_dep_list, :conflicts, :source, :src_path, :path, :origin, :checksum, :size, :description
+ attr_accessor :package_name, :label, :version, :os, :build_host_os, :maintainer, :attribute, :install_dep_list, :build_dep_list, :source_dep_list, :conflicts, :source, :src_path, :path, :origin, :checksum, :size, :description, :os_list, :custom
def initialize (package_name)
@package_name = package_name
@label = ""
@version = ""
@os = ""
+ @os_list = []
@build_host_os = []
@maintainer = ""
@attribute = []
@checksum = ""
@size = ""
@description = ""
- end
+ @custom = ""
+ end
+
def print
- puts "Package : " + @package_name
- if not @label.empty? then puts "Label : " + @label end
- if not @version.empty? then puts "Version : " + @version end
- if not @os.empty? then puts "OS : " + @os end
- if not @build_host_os.empty? then puts "Build-host-os : " + @build_host_os.join("|") end
- if not @maintainer.empty? then puts "Maintainer : " + @maintainer end
- if not @attribute.empty? then puts "Attribute : " + @attribute.join("|") end
- if not @install_dep_list.empty? then
- puts "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @build_dep_list.empty? then
- puts "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @source_dep_list.empty? then
- puts "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @conflicts.empty? then
- puts "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ")
- end
- if not @source.empty? then puts "Source : " + @source end
- if not @src_path.empty? then puts "Src-path : " + @src_path end
- if not @path.empty? then puts "Path : " + @path end
- if not @origin.empty? then puts "Origin : " + @origin end
- if not @checksum.empty? then puts "SHA256 : " + @checksum end
- if not @size.empty? then puts "Size : " + @size end
- if not @description.empty? then puts "Description : " + @description end
- end
+ puts self.to_s
+ end
def to_s
string = "Package : " + @package_name
- if not @label.empty? then string = string + "\n" + "Label : " + @label end
- if not @version.empty? then string = string + "\n" + "Version : " + @version end
- if not @os.empty? then string = string + "\n" + "OS : " + @os end
- if not @build_host_os.empty? then string = string + "\n" + "Build-host-os : " + @build_host_os.join("|") end
- if not @maintainer.empty? then string = string + "\n" + "Maintainer : " + @maintainer end
- if not @attribute.empty? then string = string + "\n" + "Attribute : " + @attribute.join("|") end
- if not @install_dep_list.empty? then
+ if not @label.empty? then string = string + "\n" + "Label : " + @label end
+ if not @version.empty? then string = string + "\n" + "Version : " + @version end
+ if not @os_list.empty? then string = string + "\n" + "OS : " + @os_list.join(", ") end
+ if not @build_host_os.empty? then string = string + "\n" + "Build-host-os : " + @build_host_os.join(", ") end
+ if not @maintainer.empty? then string = string + "\n" + "Maintainer : " + @maintainer end
+ if not @attribute.empty? then string = string + "\n" + "Attribute : " + @attribute.join("|") end
+ if not @install_dep_list.empty? then
string = string + "\n" + "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @build_dep_list.empty? then
+ end
+ if not @build_dep_list.empty? then
string = string + "\n" + "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @source_dep_list.empty? then
+ end
+ if not @source_dep_list.empty? then
string = string + "\n" + "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @conflicts.empty? then
+ end
+ if not @conflicts.empty? then
string = string + "\n" + "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ")
- end
- if not @source.empty? then string = string + "\n" + "Source : " + @source end
- if not @src_path.empty? then string = string + "\n" + "Src-path : " + @src_path end
- if not @path.empty? then string = string + "\n" + "Path : " + @path end
- if not @origin.empty? then string = string + "\n" + "Origin : " + @origin end
- if not @checksum.empty? then string = string + "\n" + "SHA256 : " + @checksum end
- if not @size.empty? then string = string + "\n" + "Size : " + @size end
- if not @description.empty? then string = string + "\n" + "Description : " + @description end
- return string
- end
- def print_to_file(file)
- file.puts "Package : " + @package_name
- if not @label.empty? then file.puts "Label : " + @label end
- if not @version.empty? then file.puts "Version : " + @version end
- if not @os.empty? then file.puts "OS : " + @os end
- if not @build_host_os.empty? then file.puts "Build-host-os : " + @build_host_os.join("|") end
- if not @maintainer.empty? then file.puts "Maintainer : " + @maintainer end
- if not @attribute.empty? then file.puts "Attribute : " + @attribute.join("|") end
- if not @install_dep_list.empty? then
- file.puts "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @build_dep_list.empty? then
- file.puts "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @source_dep_list.empty? then
- file.puts "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @conflicts.empty? then
- file.puts "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ")
- end
- if not @source.empty? then file.puts "Source : " + @source end
- if not @src_path.empty? then file.puts "Src-path : " + @src_path end
- if not @path.empty? then file.puts "Path : " + @path end
- if not @origin.empty? then file.puts "Origin : " + @origin end
- if not @checksum.empty? then file.puts "SHA256 : " + @checksum end
- if not @size.empty? then file.puts "Size : " + @size end
- if not @description.empty? then file.puts "Description : " + @description end
+ end
+ if not @source.empty? then string = string + "\n" + "Source : " + @source end
+ if not @src_path.empty? then string = string + "\n" + "Src-path : " + @src_path end
+ if not @path.empty? then string = string + "\n" + "Path : " + @path end
+ if not @origin.empty? then string = string + "\n" + "Origin : " + @origin end
+ if not @checksum.empty? then string = string + "\n" + "SHA256 : " + @checksum end
+ if not @size.empty? then string = string + "\n" + "Size : " + @size end
+ if not @custom.empty? then string = string + "\n" + @custom end
+ if not @description.empty? then string = string + "\n" + "Description : " + @description end
+ return string
end
- def print_to_file_with_os(file,target_os)
- file.puts "Package : " + @package_name
- if not @version.empty? then file.puts "Version : " + @version end
- if not @label.empty? then file.puts "Label : " + @label end
- file.puts "OS : " + target_os
- if not @build_host_os.empty? then file.puts "Build-host-os : " + @build_host_os.join("|") end
- if not @maintainer.empty? then file.puts "Maintainer : " + @maintainer end
- if not @attribute.empty? then file.puts "Attribute : " + @attribute.join("|") end
- if not @install_dep_list.empty? then
- file.puts "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @build_dep_list.empty? then
- file.puts "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @source_dep_list.empty? then
- file.puts "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ")
- end
- if not @conflicts.empty? then
- file.puts "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ")
- end
- if not @source.empty? then file.puts "Source : " + @source end
- if not @src_path.empty? then file.puts "Src-path : " + @src_path end
- if not @path.empty? then file.puts "Path : " + @path end
- if not @origin.empty? then file.puts "Origin : " + @origin end
- if not @checksum.empty? then file.puts "SHA256 : " + @checksum end
- if not @size.empty? then file.puts "Size : " + @size end
- if not @description.empty? then file.puts "Description : " + @description end
+ def print_to_file(file)
+ file.puts self.to_s
end
-end
+end
=begin
-
- parser.rb
+
+ parser.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
require "dependency"
class Parser
- def Parser.read_multy_pkginfo_from (file)
- pkglist = {}
+ def Parser.read_multy_pkginfo_from (file, only_common = false)
+ pkglist = []
+ package = nil
+
+ #file check
+
File.open file,"r" do |f|
#variable initialize
- package_name = ""
- label = ""
- version = ""
- os = ""
- build_host_os = []
- maintainer = ""
- attribute = []
- install_dep_list = []
- build_dep_list = []
- source_dep_list = []
- conflicts = []
- source = ""
- src_path = ""
- path = ""
- origin = ""
- checksum = ""
- size = ""
- description = ""
-
+ state = "INIT"
+ common_source = ""
+ common_version = ""
+ common_maintainer = ""
f.each_line do |l|
# separator
- if l.strip.empty? then
- #make package and initialize
- if not package_name.empty? and not os.empty? then
- package = Package.new(package_name)
- if not label.empty? then package.label = label end
- if not version.empty? then package.version = version end
- if not os.empty? then package.os = os end
- if not build_host_os.empty? then package.build_host_os = build_host_os end
- if not maintainer.empty? then package.maintainer = maintainer end
- if not attribute.empty? then package.attribute = attribute end
- if not install_dep_list.empty? then package.install_dep_list = install_dep_list end
- if not build_dep_list.empty? then package.build_dep_list = build_dep_list end
- if not source_dep_list.empty? then package.source_dep_list = source_dep_list end
- if not conflicts.empty? then package.conflicts = conflicts end
- if not source.empty? then package.source = source end
- if not src_path.empty? then package.src_path = src_path end
- if not path.empty? then package.path = path end
- if not origin.empty? then package.origin = origin end
- if not checksum.empty? then package.checksum = checksum end
- if not size.empty? then package.size = size end
- if not description.empty? then package.description = description end
- pkglist[[package_name,os]] = package
- package_name = ""
- label = ""
- version = ""
- os = ""
- bulid_host_os = []
- maintainer = ""
- attribute = []
- install_dep_list = []
- build_dep_list = []
- source_dep_list = []
- conflicts = []
- source = ""
- src_path = ""
- path = ""
- origin = ""
- checksum = ""
- size = ""
- description = ""
- end
- next
- end
- # commant
- if l.strip.start_with? "#" then next end
- #contents
- dsic_on = false
- case l.strip.split(':')[0].strip
- when /^Package/i then
- package_name = l.sub(/^[ \t]*Package[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Label/i then
- label = l.sub(/^[ \t]*Label[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Version/i then
- version = l.sub(/^[ \t]*Version[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^OS/i then
- os = l.sub(/^[ \t]*OS[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Build-host-os/i then
- build_host_os = l.sub(/^[ \t]*Build-host-os[ \t]*:[ \t]*/i,"").tr(" \t\n\r", "").split("|")
- disc_on=false
- when /^Maintainer/i then
- maintainer = l.sub(/^[ \t]*Maintainer[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Attribute/i then
- attribute = l.sub(/^[ \t]*Attribute[ \t]*:[ \t]*/i,"").tr(" \t\n\r","").split("|")
- disc_on=false
- when /^Install-dependency/i then
- install_dep_list = dep_parser l.sub(/^[ \t]*Install-dependency[ \t]*:[ \t]*/i,"").split(',')
- disc_on=false
- when /^Build-dependency/i then
- build_dep_list = dep_parser l.sub(/^[ \t]*Build-dependency[ \t]*:[ \t]*/i,"").split(',')
- disc_on=false
- when /^Source-dependency/i then
- source_dep_list = dep_parser l.sub(/^[ \t]*Source-dependency[ \t]*:[ \t]*/i,"").split(',')
- disc_on=false
- when /^Conflicts/i then
- conflicts = dep_parser l.sub(/^[ \t]*Conflicts[ \t]*:[ \t]*/i,"").split(',')
- disc_on=false
- when /^Source/i then
- source = l.sub(/^[ \t]*Source[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Src-path/i then
- src_path = l.sub(/^[ \t]*Src-path[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Path/i then
- path = l.sub(/^[ \t]*Path[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Origin/i then
- origin = l.sub(/^[ \t]*Origin[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^SHA256/i then
- checksum = l.sub(/^[ \t]*SHA256[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Size/i then
- size = l.sub(/^[ \t]*Size[ \t]*:[ \t]*/i,"").strip
- disc_on=false
- when /^Description/i then
- description = l.sub(/^[ \t]*Description[ \t]*:[ \t]*/i,"")
- disc_on=true
- else
- if disc_on then
- description = description + l
- else
- puts "unknown section : #{l}"
- end
- end
+ if l.strip.empty? then
+ #make package and initialize
+ if state == "PACKAGE" then
+ if not package.package_name.empty? then
+ pkglist.push package
+ else
+ raise RuntimeError, "#{file} format is not valid"
+ end
+ end
+ state = "INIT"
+ package = nil
+ next
+ end
+ # commant
+ if l.strip.start_with? "#" then next end
+ #contents
+ dsic_on = false
+ case l.strip.split(':')[0].strip
+ when /^Package/i then
+ if only_common then return [common_source, common_version, common_maintainer] end
+ # state control
+ case state
+ when "INIT" then state = "PACKAGE"
+ when "COMMON" then state = "PACKAGE"
+ when "PACKAGE" then
+ if not package.package_name.empty? then
+ pkglist.push package
+ else
+ raise RuntimeError, "Package name is not set in \"#{file}\" file"
+ end
+ end
+ package_name = l.sub(/^[ \t]*Package[ \t]*:[ \t]*/i,"").strip
+ if not package_name.empty? then
+ package = Package.new(package_name)
+ package.source = common_source
+ package.version = common_version
+ package.maintainer = common_maintainer
+ else
+ raise RuntimeError, "Package name is not set in \"#{file}\" file"
+ end
+ disc_on=false
+ when /^Label/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Label field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.label = l.sub(/^[ \t]*Label[ \t]*:[ \t]*/i,"").strip
+ end
+ disc_on=false
+ when /^Version/i then
+ case state
+ when "INIT" , "COMMON" then
+ if common_version.empty? then
+ common_version = l.sub(/^[ \t]*Version[ \t]*:[ \t]*/i,"").strip
+ else
+ raise RuntimeError, "Version information is conflict in \"#{file}\" file\nIf use Version field in Common section then Package section can't contain Version field"
+ end
+ when "PACKAGE" then
+ if common_version.empty? then
+ package.version = l.sub(/^[ \t]*Version[ \t]*:[ \t]*/i,"").strip
+ else
+ raise RuntimeError, "Version information is conflict in \"#{file}\" file\nIf use Version field in Common section then Package section can't contain Version field"
+ end
+ end
+ disc_on=false
+ when /^OS/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support OS field in Common section in \"#{file}\" file"
+ when "PACKAGE" then
+ package.os_list = l.sub(/^[ \t]*OS[ \t]*:[ \t]*/i,"").tr(" \t\n\r", "").split(",")
+ package.os = package.os_list[0]
+ end
+ disc_on=false
+ when /^Build-host-os/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Build-host-os field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.build_host_os = l.sub(/^[ \t]*Build-host-os[ \t]*:[ \t]*/i,"").tr(" \t\n\r", "").split(",")
+ end
+ disc_on=false
+ when /^Maintainer/i then
+ case state
+ when "INIT" , "COMMON" then
+ if common_maintainer.empty? then
+ common_maintainer = l.sub(/^[ \t]*Maintainer[ \t]*:[ \t]*/i,"").strip
+ else
+ raise RuntimeError, "Maintainer information is conflict in \"#{file}\" file\nIf use Maintainer field in Common section then Package section can't contain Maintainer field"
+ end
+ when "PACKAGE" then
+ if common_maintainer.empty? then
+ package.maintainer = l.sub(/^[ \t]*Maintainer[ \t]*:[ \t]*/i,"").strip
+ else
+ raise RuntimeError, "Maintainer information is conflict in \"#{file}\" file\nIf use Maintainer field in Common section then Package section can't contain Maintainer field"
+ end
+ end
+ disc_on=false
+ when /^Attribute/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Attribute field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.attribute = l.sub(/^[ \t]*Attribute[ \t]*:[ \t]*/i,"").tr(" \t\n\r","").split("|")
+ end
+ disc_on=false
+ when /^Install-dependency/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Install-dependency field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.install_dep_list = dep_parser l.sub(/^[ \t]*Install-dependency[ \t]*:[ \t]*/i,"").split(',')
+ end
+ disc_on=false
+ when /^Build-dependency/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Build-dependency field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.build_dep_list = dep_parser l.sub(/^[ \t]*Build-dependency[ \t]*:[ \t]*/i,"").split(',')
+ end
+ disc_on=false
+ when /^Source-dependency/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Source-dependency field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.source_dep_list = dep_parser l.sub(/^[ \t]*Source-dependency[ \t]*:[ \t]*/i,"").split(',')
+ end
+ disc_on=false
+ when /^Conflicts/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Conflicts field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.conflicts = dep_parser l.sub(/^[ \t]*Conflicts[ \t]*:[ \t]*/i,"").split(',')
+ end
+ disc_on=false
+ when /^Source/i then
+ case state
+ when "INIT" , "COMMON" then
+ state = "COMMON"
+ if common_source.empty? then
+ common_source = l.sub(/^[ \t]*Source[ \t]*:[ \t]*/i,"").strip
+ else
+ raise RuntimeError, "Source information is conflict in \"#{file}\" file\nIf use Source field in Common section then Package section can't contain Source field"
+ end
+ when "PACKAGE" then
+ if common_source.empty? then
+ package.source = l.sub(/^[ \t]*Source[ \t]*:[ \t]*/i,"").strip
+ else
+ raise RuntimeError, "Source information is conflict in \"#{file}\" file\nIf use Source field in Common section then Package section can't contain Source field"
+ end
+ end
+ disc_on=false
+ when /^Src-path/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Src-path field in Common section in \"#{file}\" file"
+ when "PACKAGE" then
+ package.src_path = l.sub(/^[ \t]*Src-path[ \t]*:[ \t]*/i,"").strip
+ end
+ disc_on=false
+ when /^ORIGIN/ then
+ #for compatable
+ next
+ when /^Include/i then
+ case state
+ when "INIT", "COMMON" then
+ pfile = File.dirname(file) + "/" + l.sub(/^[ \t]*Include[ \t]*:[ \t]*/i,"").strip
+ if File.exist? pfile then
+ pkglist = Parser.read_multy_pkginfo_from pfile
+ list = Parser.read_multy_pkginfo_from(pfile, true)
+ common_source = list[0]
+ common_version = list[1]
+ common_maintainer = list[2]
+ else
+ raise RuntimeError, "Not exist \"#{pfile}\""
+ end
+ when "PACKAGE" then raise RuntimeError, "Not support Include field in Common section in \"#{file}\" file"
+ end
+ disc_on=false
+ when /^Path/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Path field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.path = l.sub(/^[ \t]*Path[ \t]*:[ \t]*/i,"").strip
+ end
+ disc_on=false
+ when /^Origin/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Origin field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.origin = l.sub(/^[ \t]*Origin[ \t]*:[ \t]*/i,"").strip
+ end
+ disc_on=false
+ when /^SHA256/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support SHA256 field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.checksum = l.sub(/^[ \t]*SHA256[ \t]*:[ \t]*/i,"").strip
+ end
+ disc_on=false
+ when /^Size/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Size field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.size = l.sub(/^[ \t]*Size[ \t]*:[ \t]*/i,"").strip
+ end
+ disc_on=false
+ when /^Description/i then
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Description field in Common section in \"#{file}\" file"
+ when "PACKAGE" then package.description = l.sub(/^[ \t]*Description[ \t]*:[ \t]*/i,"")
+ end
+ disc_on=true
+ when /^C-/ then
+ #custom field
+ case state
+ when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file"
+ when "COMMON" then raise RuntimeError, "Not support Description field in Common section in \"#{file}\" file"
+ when "PACKAGE" then
+ if package.custom.empty? then
+ package.custom = l.strip
+ else
+ package.custom = package.custom + "\n" + l.strip
+ end
+ end
+ disc_on=false
+ else
+ if disc_on and state == "PACKAGE" then
+ package.description = package.description + l
+ else
+ raise RuntimeError, "Can't parse below line in \"#{file}\" file \n\t#{l}"
+ end
+ end
+ end
- end
- #i essent
-
- # check last package
- if not package_name.empty? and not os.empty? then
- package = Package.new(package_name)
- if not label.empty? then package.label = label end
- if not version.empty? then package.version = version end
- if not os.empty? then package.os = os end
- if not build_host_os.empty? then package.build_host_os = build_host_os end
- if not maintainer.empty? then package.maintainer = maintainer end
- if not attribute.empty? then package.attribute = attribute end
- if not install_dep_list.empty? then package.install_dep_list = install_dep_list end
- if not build_dep_list.empty? then package.build_dep_list = build_dep_list end
- if not source_dep_list.empty? then package.source_dep_list = source_dep_list end
- if not conflicts.empty? then package.conflicts = conflicts end
- if not source.empty? then package.source = source end
- if not src_path.empty? then package.src_path = src_path end
- if not path.empty? then package.path = path end
- if not origin.empty? then package.origin = origin end
- if not checksum.empty? then package.checksum = checksum end
- if not size.empty? then package.size = size end
- if not description.empty? then package.description = description end
- pkglist[[package_name,os]] = package
- end
- end
- return pkglist
- end
+ if only_common then return [common_source, common_version, common_maintainer] end
+
+ # check last package
+ if state == "PACKAGE" then
+ if not package.package_name.empty? then
+ pkglist.push package
+ else
+ raise RuntimeError, "Package name is not set in \"#{file}\" file"
+ end
+ end
+ end
+ return pkglist
+ end
def Parser.read_single_pkginfo_from (file)
- return read_repo_pkg_list_from(file).values[0]
- end
+ return read_multy_pkginfo_from(file)[0]
+ end
def Parser.read_repo_pkg_list_from (file)
result = {}
- read_multy_pkginfo_from(file).values.each { |x| result[x.package_name]=x }
+ read_multy_pkginfo_from(file).each { |x| result[x.package_name]=x }
return result
- end
+ end
+
+ #for test
+ def Parser.print (array)
+ array.each do |package|
+ puts package.to_s
+ puts ""
+ end
+ end
private
def Parser.dep_parser (string_list)
class Utils
- if defined?(HOST_OS).nil? then
+ def Utils.identify_current_OS()
+ os = "UnsupportedOS"
+
case `uname -s`.strip
when "Linux"
- HOST_OS = "linux"
- when /MINGW32.*/
- HOST_OS = "windows"
+ if File.exist? "/etc/debian_version" then
+ arch = (`uname -i`.strip == "x86_64") ? "64" : "32"
+ os = "ubuntu-#{arch}"
+ elsif File.exist? "/etc/redhat-release" then
+ os = "redhat-unknown"
+ elsif File.exist? "/etc/SuSE-release" then
+ os = "opensuse-unknown"
+ elsif File.exist? "/etc/mandrake-release" then
+ os = "mandrake-unknown"
+ end
+ when "MINGW32_NT-5.1"
+ progfile_path = Utils.execute_shell_return("echo $PROGRAMFILES","windows")[0].strip
+ if progfile_path.include?("(x86)") then arch = "64" else arch = "32" end
+ os = "windows-#{arch}"
+ when "MINGW32_NT-6.1"
+ progfile_path = Utils.execute_shell_return("echo $PROGRAMFILES","windows")[0].strip
+ if progfile_path.include?("(x86)") then arch = "64" else arch = "32" end
+ os = "windows-#{arch}"
when "Darwin"
- HOST_OS = "darwin"
- else
- end
+ os = "macos-64"
+ end
+
+ return os
end
- # set static variable in WORKING_DIR, HOME
- if defined?(WORKING_DIR).nil? then WORKING_DIR = Dir.pwd end
- if defined?(HOME).nil? then
- # get home directory, using Dir.chdir
- Dir.chdir
- HOME = Dir.pwd
- Dir.chdir WORKING_DIR
+
+ def Utils.check_host_OS()
+ if HOST_OS == "ubuntu-32" or
+ HOST_OS == "ubuntu-64" or
+ HOST_OS == "windows-32" or
+ HOST_OS == "windows-64" or
+ HOST_OS == "macos-64" then
+
+ return true
+ else
+ return false
+ end
end
+
def Utils.create_uniq_name
time = Time.new
# uniq snapshot_name name is year_month_day_hour_min_sec_microsec
end
- def Utils.execute_shell(cmd)
+ def Utils.execute_shell(cmd, os_category = nil)
ret = false
- if HOST_OS.eql? "windows" then
+ if os_category.nil? then os_category = get_os_category( HOST_OS ) end
+
+ if os_category == "windows" then
mingw_path = "sh.exe -c "
cmd = cmd.gsub("\"", "\\\"")
cmd = mingw_path + "\"#{cmd}\""
end
- def Utils.execute_shell_return(cmd)
+ def Utils.execute_shell_return(cmd, os_category = nil)
result_lines = []
- if HOST_OS.eql? "windows" then
+ if os_category.nil? then os_category = get_os_category( HOST_OS ) end
+
+ if os_category == "windows" then
mingw_path = "sh.exe -c "
cmd = cmd.gsub("\"", "\\\"")
cmd = mingw_path + "\"#{cmd}\""
end
end
- def Utils.execute_shell_return_ret(cmd)
- if HOST_OS.eql? "windows" then
+ def Utils.execute_shell_return_ret(cmd, os_category = nil)
+ if os_category.nil? then os_category = get_os_category( HOST_OS ) end
+
+ if os_category == "windows" then
mingw_path = "sh.exe -c "
cmd = cmd.gsub("\"", "\\\"")
cmd = mingw_path + "\"#{cmd}\""
return `#{cmd}`
end
- def Utils.execute_shell_with_log(cmd, log)
+ def Utils.execute_shell_with_log(cmd, log, os_category = nil)
+
+ if os_category.nil? then os_category = get_os_category( HOST_OS ) end
- if HOST_OS.eql? "windows" then
+ if os_category == "windows" then
mingw_path = "sh.exe -c "
cmd = cmd.gsub("\"", "\\\"")
cmd = mingw_path + "\"#{cmd}\""
def Utils.is_absolute_path(path)
- if HOST_OS.eql? "linux" or HOST_OS.eql? "darwin" then
+ if is_unix_like_os( HOST_OS ) then
# if path start "/" then absoulte path
if path.start_with?("/") then
return true
else
return false
end
- elsif HOST_OS.eql? "windows" then
+ elsif is_windows_like_os( HOST_OS ) then
# if path start "c:/" or "D:/" or ... then absoulte path
if path =~ /^[a-zA-Z]:[\/]/ then
return true
# this will be used on MinGW/MSYS
def Utils.get_unix_path(path)
- if HOST_OS.eql? "linux" or HOST_OS.eql? "darwin" then
+ if is_unix_like_os( HOST_OS ) then
return path
- elsif HOST_OS.eql? "windows" then
+ elsif is_windows_like_os( HOST_OS ) then
new_path = path
if is_absolute_path( new_path ) then
new_path = "/" + new_path[0,1] + new_path[2..-1]
return true
end
end
+
+
+ # check if the os is windows-like
+ def Utils.is_windows_like_os(os_name)
+ if os_name.start_with? "windows-" then
+ return true
+ else
+ return false
+ end
+ end
+
+
+ # check if the os is unix-like
+ def Utils.is_unix_like_os(os_name)
+ if os_name.start_with? "ubuntu-" or
+ os_name.start_with?"macos-" then
+ return true
+ else
+ return false
+ end
+ end
+
+
+ # check if the os is linux-like
+ def Utils.is_linux_like_os(os_name)
+ if os_name.start_with? "ubuntu-" then
+ return true
+ else
+ return false
+ end
+ end
+
+
+ # check if the os is macos-like
+ def Utils.is_macos_like_os(os_name)
+ if os_name.start_with?"macos-" then
+ return true
+ else
+ return false
+ end
+ end
+
+
+ def Utils.get_os_category(os_name)
+ if os_name.start_with? "ubuntu-" then
+ return "linux"
+ elsif os_name.start_with?"macos-" then
+ return "macos"
+ elsif os_name.start_with? "windows-" then
+ return "windows"
+ else
+ return os_name
+ end
+ end
+
+
+ def Utils.get_package_name_from_package_file( local_path )
+ filename = File.basename(local_path)
+ if filename =~ /.*_.*_.*\.zip/ then
+ new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3')
+ return new_name.split(",")[0]
+ end
+ return nil
+ end
+
+
+ def Utils.get_version_from_package_file( local_path )
+ filename = File.basename(local_path)
+ if filename =~ /.*_.*_.*\.zip/ then
+ new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3')
+ return new_name.split(",")[1]
+ end
+ return nil
+ end
+
+
+ def Utils.get_os_from_package_file( local_path )
+ filename = File.basename(local_path)
+ if filename =~ /.*_.*_.*\.zip/ then
+ new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3')
+ return new_name.split(",")[2]
+ end
+ return nil
+ end
+
+
+ if defined?(HOST_OS).nil? then
+ HOST_OS = Utils.identify_current_OS()
+ end
+
+ # set static variable in WORKING_DIR, HOME
+ if defined?(WORKING_DIR).nil? then WORKING_DIR = Dir.pwd end
+ if defined?(HOME).nil? then
+ # get home directory, using Dir.chdir
+ Dir.chdir
+ HOME = Dir.pwd
+ Dir.chdir WORKING_DIR
+ end
+
+
end
@log.info "SocketRegisterListener entering main loop"
# server open
begin
- @comm_server = BuildCommServer.new(@parent_server.port, @log)
+ @comm_server = BuildCommServer.create(@parent_server.port, @log)
rescue => e
@log.info "Server creation failed"
@log.error e.message
# parse request
cmd = ""
- if req_line.split(",").count > 0 then
- cmd = req_line.split(",")[0].strip
+ if req_line.split("|").count > 0 then
+ cmd = req_line.split("|")[0].strip
end
case cmd
# "Register"
def handle_cmd_register( line, req )
@log.info "Received register REQ : #{line}"
-
- @parent_server.reload_dist_package()
-
- tok = line.split(",").map { |x| x.strip }
BuildCommServer.send_begin(req)
+
+ tok = line.split("|").map { |x| x.strip }
if tok.count < 3 then
@log.error "Received Wrong REQ : #{line}"
BuildCommServer.send(req, "ERROR,Invalid REQ format")
end
begin
- snapshot_name = @parent_server.register( file_path_list, dist_name, true, false)
+ @parent_server.reload_dist_package()
+ snapshot_name = @parent_server.register( file_path_list, dist_name, true, false, true)
rescue => e
@log.error "register failed"
@log.error e.message
@log.error e.backtrace.inspect
- BuildCommServer.send(req, "ERROR,#{e.message}")
+ BuildCommServer.send(req, "ERROR|#{e.message}")
@parent_server.release_lock_file
return
end
- BuildCommServer.send(req,"SUCC,#{snapshot_name}")
+ BuildCommServer.send(req,"SUCC|#{snapshot_name}")
BuildCommServer.send_end(req)
end
-
=begin
-
+
client.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
class Client
# constant
- SUPPORTED_OS = ["linux", "windows", "darwin"]
PKG_LIST_FILE_PREFIX = "pkg_list_"
INSTALLED_PKG_LIST_FILE = "installedpackage.list"
CONFIG_PATH = "#{PackageServerConfig::CONFIG_ROOT}/client"
PACKAGE_INFO_DIR = ".info"
DEFAULT_INSTALL_DIR = "#{Utils::HOME}/build_root"
DEFAULT_SERVER_ADDR = "http://172.21.17.55/dibs/unstable"
+ OS_INFO_FILE = "os_info"
- attr_accessor :server_addr, :location, :pkg_hash_os, :is_server_remote, :installed_pkg_hash_loc, :archive_pkg_list, :all_dep_list, :log
+ attr_accessor :server_addr, :location, :pkg_hash_os, :is_server_remote, :installed_pkg_hash_loc, :archive_pkg_list, :all_dep_list, :log, :support_os_list, :config_dist_path
public
# initialize
if server_addr.nil? then server_addr = get_default_server_addr() end
if location.nil? then location = get_default_inst_dir() end
- # chop server address, if end with "/"
- if server_addr.strip.end_with? "/" then server_addr = server_addr.chop end
-
@server_addr = server_addr
@location = location
@pkg_hash_os = {}
@archive_pkg_list = []
@all_dep_list = []
@is_server_remote = Utils.is_url_remote(server_addr)
+ @support_os_list = []
+ @config_dist_path = CONFIG_PATH + "/" + get_distribution
+
+ # create directory
+ if not File.exist? @config_dist_path then FileUtils.mkdir_p "#{@config_dist_path}" end
+
+ # chop server address, if end with "/"
+ if server_addr.strip.end_with? "/" then server_addr = server_addr.chop end
# set log
if logger.nil? or logger.class.to_s.eql? "String" then
# read installed pkg list, and create hash
if not File.exist? @location then FileUtils.mkdir_p "#{@location}" end
- create_installed_pkg_hash()
-
- # readk remote pkg list, and hash list
- create_remote_pkg_hash(false)
- @log.info "Initialize - #{server_addr}, #{location}"
- end
+ create_installed_pkg_hash()
+
+ # read remote os list
+ get_remote_pkg_os_list()
+
+ # read remote pkg list, and hash list
+ update()
+ @log.info "Initialize - #{server_addr}, #{location}"
+ end
public
# update package list from server
def update()
# update operation must be processed one by one
$update_mutex.synchronize {
- if not create_remote_pkg_hash(true) then
+ if not create_remote_pkg_hash(@is_server_remote) then
@log.error "\"#{@server_addr}\" does not have package list file properly."
return false
end
result = false
filename = "archive_pkg_list"
- local_file_path = File.join(CONFIG_PATH, filename)
+ local_file_path = File.join(@config_dist_path, filename)
if File.exist? local_file_path then
File.open(local_file_path, "r") do |f|
f.each_line do |l|
binary_list = []
binary_path_list.each do |bpath|
filename = File.basename(bpath)
- client = BuildCommClient.create(ip, port)
+ client = BuildCommClient.create(ip, port, @log)
if client.nil? then
@log.error "Failed to create BuildCommClient instance.."
end
# register file
- client = BuildCommClient.create(ip, port)
- dist = get_distribution
- if dist.empty? then
- @log.error "Distribution is empty.."
- return nil
- end
-
- @log.info "Send register message.. [REGISTER,#{dist},#{binary_list.join(",")}]"
- snapshot = nil
- if client.send "REGISTER,#{dist},#{binary_list.join(",")}" then
- output = client.read_lines do |l|
- line = l.split(",")
- if line[0].strip == "ERROR" then
- @log.error l.strip
- return nil
- elsif line[0].strip == "SUCC" then
- snapshot = line[1].strip
- end
- end
- if not output then
- @log.error "Failed to register"
+ if not binary_list.empty? then
+ client = BuildCommClient.create(ip, port, @log)
+ dist = get_distribution
+ if dist.empty? then
+ @log.error "Distribution is empty.."
return nil
end
- end
+
+ @log.info "Send register message.. [REGISTER|#{dist}|#{binary_list.join("|")}]"
+ snapshot = nil
+ if client.send "REGISTER|#{dist}|#{binary_list.join("|")}" then
+ output = client.read_lines do |l|
+ line = l.split("|")
+ if line[0].strip == "ERROR" then
+ @log.error l.strip
+ return nil
+ elsif line[0].strip == "SUCC" then
+ snapshot = line[1].strip
+ end
+ end
+ if not output then
+ @log.error "Failed to register"
+ return nil
+ end
+ end
- client.terminate
- snapshot = @server_addr + "/snapshots/" + snapshot
- @log.info "Registered successfully! [#{binary_path_list.join(",")}]"
- if snapshot.empty? then
- @log.error "Failed to generate snapshot"
- end
+ client.terminate
+ snapshot = @server_addr + "/snapshots/" + snapshot
+ @log.info "Registered successfully! [#{binary_path_list.join("|")}]"
+ if snapshot.empty? then
+ @log.error "Failed to generate snapshot"
+ end
+ end
return snapshot
end
@log.info "Removed #{manifest_path}"
@log.info "Removed #{path}"
raise Interrupt
+ rescue RuntimeError => e
+ @log.error( e.message, Log::LV_USER)
+ FileUtils.rm_f(manifest_path)
+ FileUtils.remove_dir(path, true)
+ @log.info "Removed #{mainfest_path}"
+ @log.info "Removed #{path}"
+ return false
end
new_pkg_ver = pkg.version
new_pkg_install_dep_list = pkg.install_dep_list
rescue Interrupt
@log.error "Client: Interrupted.."
FileUtils.rm_f(manifest_path)
- FileUtils.remove_dir(path, true)
+ FileUtils.remove_dir(path, true)
@log.info "Removed #{mainfest_path}"
@log.info "Removed #{path}"
raise Interrupt
+ rescue RuntimeError => e
+ @log.error( e.message, Log::LV_USER)
+ FileUtils.rm_f(manifest_path)
+ FileUtils.remove_dir(path, true)
+ @log.info "Removed #{mainfest_path}"
+ @log.info "Removed #{path}"
+ return false
end
compare_result = compare_version_with_installed_pkg(pkg_name, new_pkg_ver)
end
if File.exist? @location then FileUtils.rm_rf(@location) end
FileUtils.mkdir_p(@location)
- @pkg_hash_os.clear
- @installed_pkg_hash_loc.clear
- @archive_pkg_list.clear
+ #@pkg_hash_os.clear
+ #@installed_pkg_hash_loc.clear
+ #@archive_pkg_list.clear
@log.info "Cleaned \"#{@location}\" path.. OK"
end
result = []
pkg_hash = @pkg_hash_os[os]
+ if pkg_hash.nil? then return [] end
pkg_list = pkg_hash.values
pkg_list.each do |pkg|
pkg.build_dep_list.each do |dep|
def get_reverse_source_dependent_packages(pkg_name)
result = []
- for os in SUPPORTED_OS
+ for os in @support_os_list
pkg_hash = @pkg_hash_os[os]
pkg_list = pkg_hash.values
pkg_list.each do |pkg|
def read_pkginfo_file(pkg_name, path)
file_path = File.join(path, "pkginfo.manifest")
- pkg = Parser.read_single_pkginfo_from file_path
+ begin
+ pkg = Parser.read_single_pkginfo_from file_path
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ return nil
+ end
if pkg.nil? then
@log.error "Failed to read manifest file : #{file_path}"
end
private
+ def get_remote_pkg_os_list()
+ file_url = @server_addr + "/" + OS_INFO_FILE
+
+ if(is_server_remote) then
+ FileDownLoader.download(file_url, @config_dist_path)
+ File.open( "#{@config_dist_path}/#{OS_INFO_FILE}", "r" ) do |f|
+ f.each_line do |l|
+ @support_os_list.push l.strip
+ end
+ end
+ else
+ if File.exist?(file_url) then
+ File.open(file_url) do |f|
+ f.each_line do |l|
+ @support_os_list.push l.strip
+ end
+ end
+ end
+ end
+ end
+
# from_server : if true, update from server
def create_remote_pkg_hash(from_server)
-
- for os in SUPPORTED_OS
+ for os in @support_os_list
filename = PKG_LIST_FILE_PREFIX + os
file_url = @server_addr + "/" + filename
- local_file_path = File.join(CONFIG_PATH, filename)
- if from_server then
- if not FileDownLoader.download(file_url, CONFIG_PATH) then
+ local_file_path = File.join(@config_dist_path, filename)
+ if from_server then
+ if not FileDownLoader.download(file_url, @config_dist_path) then
return false
end
+ else
+ FileUtils.cp(file_url, @config_dist_path)
end
- local_file_path = File.join(CONFIG_PATH, filename)
+ local_file_path = File.join(@config_dist_path, filename)
if File.exist? local_file_path then
- pkg_hash = Parser.read_repo_pkg_list_from local_file_path
- @pkg_hash_os[os] = pkg_hash
- else
- @pkg_hash_os[os] = {}
- end
- end
+ begin
+ pkg_hash = Parser.read_repo_pkg_list_from local_file_path
+ @pkg_hash_os[os] = pkg_hash
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ @pkg_hash_os[os] = {}
+ end
+ else
+ @pkg_hash_os[os] = {}
+ end
+ end
filename = "archive_pkg_list"
file_url = @server_addr + "/" + filename
- if from_server then
- if not FileDownLoader.download(file_url, CONFIG_PATH) then
+ if from_server then
+ if not FileDownLoader.download(file_url, @config_dist_path) then
@log.warn "Server does not have \"#{filename}\" file. This error can be ignored."
end
+ else
+ FileUtils.cp(file_url, @config_dist_path)
end
- local_file_path = File.join(CONFIG_PATH, filename)
+ local_file_path = File.join(@config_dist_path, filename)
if File.exist? local_file_path then
File.open(local_file_path, "r") do |f|
f.each_line do |l|
end
end
- return true
- end
-
- private
- # create installed package hash
- def create_installed_pkg_hash()
+ return true
+ end
+
+ private
+ # create installed package hash
+ def create_installed_pkg_hash()
config_path = File.join(@location, PACKAGE_INFO_DIR)
if not File.directory? config_path then return end
installed_pkg_hash_key = get_installed_pkg_list_file_path()
if @installed_pkg_hash_loc.has_key? installed_pkg_hash_key then return
- else
+ else
file_path = installed_pkg_hash_key
if not File.exist? file_path then
#raise RuntimeError, "#{file_path} file does not exist"
return
end
- pkg_hash = Parser.read_repo_pkg_list_from file_path
- @installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash
- end
- end
+ begin
+ pkg_hash = Parser.read_repo_pkg_list_from file_path
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ return
+ end
+ @installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash
+ end
+ end
private
# check to exist installed package list file
if not File.exist? config_path then FileUtils.mkdir_p "#{config_path}" end
if File.exist? file_path then File.delete(file_path) end
File.open(file_path, "a+") do |file|
- file.puts "ORIGIN : #{@server_addr}"
- file.puts "\n"
pkg_list = pkg_hash.values
pkg_list.each do |pkg|
pkg.print_to_file(file)
+ "\t" + "list-rpkg Show the all packages in the package-server." + "\n" \
+ "\t" + "show-lpkg Show the package in your SDK environment." + "\n" \
+ "\t" + "list-lpkg Show the all packages in your SDK environment." + "\n" \
+ + "\t" + "build-dep Show build-dependency packages" + "\n" \
+ + "\t" + "install-dep Show install-dependency packages" + "\n" \
+ "\n" + "Subcommand usage:" + "\n" \
+ "\t" + "pkg-cli update [-u <remote server url>]" + "\n" \
+ "\t" + "pkg-cli clean [-l <location>] [--force]" + "\n" \
+ "\t" + "pkg-cli list-rpkg [-o <os>] [-u <package server url>]" + "\n" \
+ "\t" + "pkg-cli show-lpkg -P <package name> [-l <location>]" + "\n" \
+ "\t" + "pkg-cli list-lpkg [-l <location>]" + "\n" \
+ + "\t" + "pkg-cli build-dep -P <package name> [-o <os>]" + "\n" \
+ + "\t" + "pkg-cli install-dep -P <package name> [-o <os>]" + "\n" \
+ "\n" + "Options:" + "\n"
optparse = OptionParser.new(nil, 32, ' '*8) do|opts|
options[:pkg] = name
end
- opts.on( '-o', '--os <operating system>', 'target operating system: linux/windows/darwin' ) do |os|
+ opts.on( '-o', '--os <operating system>', 'target operating system: ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64' ) do |os|
options[:os] = os
end
attr_accessor :name, :location, :server_url, :lock_file_name
# constant
- SUPPORTED_OS = ["linux", "windows", "darwin"]
PKG_LIST_FILE_PREFIX = "pkg_list_"
ARCHIVE_PKG_FILE = "archive_pkg_list"
@log = pkg_server.log
@integrity = pkg_server.integrity
@lock_file_name = "#{location}/.lock_file"
- @snapshot_info_file = "#{location}/snapshot.info"
+ @os_info_file_path = "#{location}/os_info"
+ @snapshot_info_file_path = "#{location}/snapshot.info"
@pkg_hash_os = {}
@archive_pkg_list = []
@snapshot_hash = []
+ @support_os_list = []
@log.info "Distribution class[#{name}] initialize "
initialize_pkg_list()
end
- def register (file_path, pkg)
+ def register (file_path, pkg, internal_flag)
if pkg.nil? then
raise RuntimeError, "package file does not contain pkginfo.manifest: [#{file_path}]"
end
+ if not @pkg_hash_os.has_key?(pkg.os) then
+ raise RuntimeError, "package server does not support package's os : [#{pkg.os}]"
+ end
+
exist_pkg = @pkg_hash_os[pkg.os][pkg.package_name]
# version check and if existing version is higher then upload version?
- if not exist_pkg.nil?
+ if (not exist_pkg.nil?) and (not internal_flag) then
if not ( Utils.compare_version( exist_pkg.version, pkg.version ).eql? 1 ) then
- raise RuntimeError, "existing package's version is higher than register package : [#{pkg.package_name}] in [#{pkg.os}]"
+ raise RuntimeError, "existing package's version is higher then register package : [#{pkg.package_name}] in [#{pkg.os}]"
end
end
pkg.origin = "local"
pkg.source = ""
pkg.path = "/binary/" + File.basename( file_path )
- # TODO: windows and mac : sha256sum
- if Utils::HOST_OS.eql? "linux" then
- pkg.checksum = `sha256sum #{file_path}`.split(" ")[0]
- end
- pkg.size = `du -b #{file_path}`.split[0].strip
+ if pkg.checksum.empty? then
+ # TODO: windows and mac : sha256sum
+ if Utils.is_unix_like_os( Utils::HOST_OS ) then
+ pkg.checksum = `sha256sum #{file_path}`.split(" ")[0]
+ end
+ end
+
+ if pkg.size.empty? then
+ pkg.size = `du -b #{file_path}`.split[0].strip
+ end
@pkg_hash_os[pkg.os][pkg.package_name] = pkg
pkg.source = ""
pkg.path = "/temp/" + File.basename( file_path )
# TODO: windows and mac : sha256sum
- if Utils::HOST_OS.eql? "linux" then
+ if Utils.is_unix_like_os( Utils::HOST_OS ) then
pkg.checksum = `sha256sum #{file_path}`.split(" ")[0]
end
pkg.size = `du -b #{file_path}`.split[0].strip
end
# copy package list
- for os in SUPPORTED_OS
+ for os in @support_os_list
FileUtils.copy( "#{snapshot_path}/#{PKG_LIST_FILE_PREFIX}#{os}", \
"#{@location}/snapshots/#{name}/#{PKG_LIST_FILE_PREFIX}#{os}" )
end
FileUtils.copy( "#{snapshot_path}/#{ARCHIVE_PKG_FILE}", \
"#{@location}/snapshots/#{name}/#{ARCHIVE_PKG_FILE}" )
- File.open(@snapshot_info_file, "a") do |f|
+ File.open(@snapshot_info_file_path, "a") do |f|
f.puts "name : #{name}"
f.puts "time : #{Time.now.strftime("%Y%m%d%H%M%S")}"
if from_cmd then
return name
end
- def sync( force, os )
+ def sync(force)
# check distribution's server_url
if @server_url.empty? then
@log.error("This distribution has not remote server", Log::LV_USER)
# generate client class
client = Client.new( @server_url, "#{@location}/binary", @log )
- client.update
- # error check
- if client.pkg_hash_os[os].nil? then
- raise "Package list can't generated. url is #{@server_url}. os is #{os}"
- end
+ # update os list
+ client.support_os_list.each do |os|
+ if not @support_os_list.include? os then
+ add_os(os)
+ end
+ end
- server_pkg_name_list = client.pkg_hash_os[os].keys
- local_pkg_name_list = @pkg_hash_os[os].keys
- full_pkg_name_list = server_pkg_name_list + local_pkg_name_list
+ for os in @support_os_list
+ # error check
+ if client.pkg_hash_os[os].nil? then
+ raise "Package list can't generated. url is #{@server_url}. os is #{os}"
+ end
- full_pkg_name_list.each do |pkg_name|
- sync_package( pkg_name, client, os, force )
+ server_pkg_name_list = client.pkg_hash_os[os].keys
+ local_pkg_name_list = @pkg_hash_os[os].keys
+ full_pkg_name_list = server_pkg_name_list + local_pkg_name_list
+
+ full_pkg_name_list.each do |pkg_name|
+ sync_package( pkg_name, client, os, force )
+ end
end
-
- write_pkg_list(os)
+
+ write_all_pkg_list()
end
def sync_archive_pkg
client = Client.new( @server_url, "#{@location}/source", @log )
- client.update
download_list = client.archive_pkg_list - @archive_pkg_list
download_list.each do |pkg|
write_archive_pkg_list()
end
+ def add_os(os)
+ if @support_os_list.include? os then
+ @log.error("[#{os} is already exist ", Log::LV_USER)
+ end
+
+ # update os information
+ @support_os_list.push os
+ @pkg_hash_os[os] = {}
+ File.open(@os_info_file_path, "a") do |f|
+ f.puts os
+ end
+
+ # create pkg_list_#{os} file
+ File.open( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", "w" ) do |f| end
+ end
+
def clean( snapshot_list )
file_list = []
used_archive_list = []
# collect remaining file's name from current package server version
- for os in SUPPORTED_OS
+ for os in @support_os_list
@pkg_hash_os[os].each_value{ |pkg|
file_list.push(pkg.path.sub("/binary/",""))
# collect remaning file's name from snapshot list
for snapshot in snapshot_list
- for os in SUPPORTED_OS
- pkg_list = Parser.read_repo_pkg_list_from "#{@location}/snapshots/#{snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}"
-
- pkg_list.each_value{ |pkg|
- file_list.push(pkg.path.sub("/binary/",""))
- }
+ for os in @support_os_list
+ begin
+ pkg_list = Parser.read_repo_pkg_list_from "#{@location}/snapshots/#{snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}"
+
+ pkg_list.each_value{ |pkg|
+ file_list.push(pkg.path.sub("/binary/",""))
+ }
+ rescue => e
+ @log.error( e.message, Log::USER)
+ end
end
used_archive_list = used_archive_list + read_archive_pkg_list( snapshot )
clean_snapshot_info_file(snapshot_list)
end
+ def write_all_pkg_list
+ for os in @support_os_list
+ write_pkg_list(os)
+ end
+ end
+
def write_pkg_list( os )
+ # if input os is empty then return
+ if os.nil? or os.empty? then return end
File.open( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", "w" ) do |f|
@pkg_hash_os[os].each_value do |pkg|
# if pkginfo.manifest file exist
if not ret.nil? then
- pkg = Parser.read_single_pkginfo_from "#{tmp_dir}/pkginfo.manifest"
-
+ begin
+ pkg = Parser.read_single_pkginfo_from "#{tmp_dir}/pkginfo.manifest"
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ return nil
+ end
+
FileUtils.rm_rf tmp_dir
return pkg
- # if pkginfo.manifest file does not exist
+ # if pkginfo.manifest file does not exist
else
FileUtils.rm_rf tmp_dir
return nil
end
def remove_pkg( pkg_name_list, os, recursive )
- if os.eql? "all" then os_list = SUPPORTED_OS
+ if os.eql? "all" then os_list = @support_os_list
else os_list = [ os ]
end
removed_flag = false
for os in os_list
+ if not @support_os_list.include? os then
+ @log.error( "package server does not support input os : #{os}")
+ next
+ end
+
if @pkg_hash_os[os].key?(package_name) then
@log.info( "remove package [#{package_name}] in #{os}", Log::LV_USER)
@pkg_hash_os[os].delete(package_name)
# update pkg_list file
- for os in SUPPORTED_OS
+ for os in os_list
write_pkg_list(os)
end
write_archive_pkg_list
end
end
+ # modify info file
+ info_file = File.readlines(@snapshot_info_file_path)
+ File.open(@snapshot_info_file_path, 'w') do |f|
+ remove_flag = false
+ info_file.each { |line|
+ if line =~ /name :/ then
+ if snapshot_list.include? line.split(':')[1].strip then
+ remove_flag = true
+ else
+ remove_flag = false
+ end
+
+ end
+
+ if not remove_flag then
+ f.puts line
+ end
+ }
+ end
+
if not snapshot_list.empty? then
@log.output( "snapshot not exist : #{snapshot_list.join(",")}", Log::LV_USER )
end
def check_integrity
@log.info "check server pkg's install dependency integrity"
- for os in SUPPORTED_OS
+ for os in @support_os_list
for pkg in @pkg_hash_os[os].each_value
check_package_integrity(pkg)
end
end
def initialize_pkg_list
+ if not File.exist? @os_info_file_path then
+ return
+ end
+
+ # get support_os_list
+ File.open( @os_info_file_path, "r" ) do |f|
+ f.each_line do |l|
+ @support_os_list.push l.strip
+ end
+ end
+
# read package_list file
- for os in SUPPORTED_OS
+ for os in @support_os_list
@pkg_hash_os[os] = {}
pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}"
-
+
if File.exist? pkg_list_file then
- @pkg_hash_os[os] = Parser.read_repo_pkg_list_from( pkg_list_file )
+ begin
+ @pkg_hash_os[os] = Parser.read_repo_pkg_list_from( pkg_list_file )
+ rescue => e
+ @log.error( e.message, Log::LV_USER)
+ @pkg_hash_os[os] = nil
+ end
end
end
@archive_pkg_list = read_archive_pkg_list("")
end
+ def get_link_package(pkg, pkg_os)
+ pkg.os_list.each do |os|
+ # skip in same os for origin package
+ if pkg_os.eql? os then next end
+ # skip in unsupported os
+ if not @support_os_list.include? os then next end
+
+ exist_pkg = @pkg_hash_os[os][pkg.package_name]
+ if exist_pkg.nil? then next end
+
+ compare_version = Utils.compare_version(pkg.version, exist_pkg.version)
+ # if version same then compatible package
+ if compare_version.eql? 0 then
+ return exist_pkg
+ end
+ end
+
+ return nil
+ end
+
# PRIVATE METHODS/VARIABLES
private
end
def clean_snapshot_info_file(snapshot_list)
- if not File.exist?(@snapshot_info_file)
+ if not File.exist? @snapshot_info_file_path
@log.error "Can not find snapshot info file"
return
end
# modify snapshot info File
- f = File.open(@snapshot_info_file, "r")
+ f = File.open(@snapshot_info_file_path, "r")
info_lines = []
save_flag = false
f.each_line do |l|
end
f.close
- f = File.open(@snapshot_info_file, "w")
+ f = File.open(@snapshot_info_file_path, "w")
info_lines.each do |i|
f.puts i
end
def get_all_reverse_depends_pkgs(pkg, checked_list)
depends_list = []
- for os in SUPPORTED_OS
+ for os in @support_os_list
@pkg_hash_os[os].each_value{ |dpkg|
if dpkg.install_dep_list.include? pkg or \
dpkg.build_dep_list.include? pkg then
require "packageServerConfig"
require "log"
require "utils"
-if Utils::HOST_OS.eql? "windows" then
+if Utils.is_windows_like_os( Utils::HOST_OS ) then
require "rubygems"
require "zip/zip"
end
uniq_name = Utils.create_uniq_name
path = Utils::HOME + "/tmp/#{uniq_name}"
# windows has limitation for file path length
- if Utils::HOST_OS.eql? "windows" then
+ if Utils.is_windows_like_os( Utils::HOST_OS ) then
drive = Utils::HOME.split("/")[0]
path = "#{drive}/#{uniq_name}"
end
if not script_file.nil? then
@@log.info "Execute \"#{script_file}\" file"
- if Utils::HOST_OS.eql? "windows" then
+ if Utils.is_windows_like_os( Utils::HOST_OS ) then
cmd = "set INSTALLED_PATH=\"#{target_path}\"& #{script_file}"
else
cmd = "INSTALLED_PATH=\"#{target_path}\" #{script_file}"
if not script_file.nil? then
@@log.info "Execute \"#{script_file}\" file"
- if Utils::HOST_OS.eql? "windows" then
+ if Utils.is_windows_like_os( Utils::HOST_OS ) then
cmd = "set INSTALLED_PATH=\"#{target_path}\"& #{script_file}"
else
cmd = "INSTALLED_PATH=\"#{target_path}\" #{script_file}"
case ext
when ".zip" then
- if Utils::HOST_OS.eql? "windows" then
+ if Utils.is_windows_like_os( Utils::HOST_OS ) then
log = unzip_file(package_file_path, path)
else
log = `#{extract_file_list_command}`
attr_accessor :finish, :port
attr_accessor :incoming_path
- # constant
- SUPPORTED_OS = ["linux", "windows", "darwin"]
-
# initialize
def initialize (id)
@location = ""
@log.output( "package server [#{@id}] created successfully", Log::LV_USER )
end
- def register( file_path_list, dist_name, snapshot, test_flag )
+ def register( file_path_list, dist_name, snapshot, test_flag, internal_flag = false )
@log.info "package register in server"
distribution = get_distribution( dist_name )
updated_os_list = []
registed_package_list = []
binary_pkg_file_path_list = []
+ link_pkg_file_path_list = []
archive_pkg_file_path_list = []
snapshot_name = ""
# binary package
if not pkg.nil? then
- updated_pkg = register_package(distribution, pkg, f, test_flag)
+
+ # find link package
+ pkg_os = Utils.get_os_from_package_file(f)
+ link_pkg = distribution.get_link_package(pkg, pkg_os)
+ if link_pkg.nil? then
+ binary_pkg_file_path_list.push f
+ else
+ link_pkg_file_path_list.push [link_pkg.path, File.basename(f)]
+ pkg.checksum = link_pkg.checksum
+ pkg.size = link_pkg.size
+ end
+
+ # update os information
+ if pkg.os_list.include? pkg_os then
+ pkg.os = pkg_os
+ pkg.os_list = [pkg_os]
+ else
+ raise RuntimeError, "package file name is incorrect [#{f}]"
+ end
+
+ updated_pkg = register_package(distribution, pkg, f, test_flag, internal_flag)
updated_os_list.push updated_pkg.os
registed_package_list.push updated_pkg
- binary_pkg_file_path_list.push f
# archive package
else
if test_flag then
end
end
+ # link to package server
+ link_pkg_file_path_list.each do |l|
+ if test_flag then
+ src_file = File.join(distribution.location, l[0])
+ dest_file = File.join(distribution.location, "temp", l[1])
+ FileUtils.ln( src_file, dest_file, :force => true )
+ else
+ src_file = File.join(distribution.location, l[0])
+ dest_file = File.join(distribution.location, "binary", l[1])
+ FileUtils.ln( src_file, dest_file, :force => true )
+ end
+ end
+
archive_pkg_file_path_list.each do |l|
FileUtils.mv( l, "#{distribution.location}/source/" )
end
# write package list for updated os
updated_os_list.uniq!
updated_os_list.each do |os|
- distribution.write_pkg_list( os )
+ distribution.write_pkg_list(os)
end
# register archive pakcage list.
end
@lock_file = Utils.file_lock(distribution.lock_file_name)
- distribution.sync( mode, "linux" )
- distribution.sync( mode, "windows" )
- distribution.sync( mode, "darwin" )
+ distribution.sync(mode)
distribution.sync_archive_pkg
Utils.file_unlock(@lock_file)
@log.output( "distribution [#{dist_name}] added successfully", Log::LV_USER )
end
+ def add_os(dist_name, os)
+ dist = get_distribution(dist_name)
+
+ # distribution lock
+ @lock_file = Utils.file_lock(dist.lock_file_name)
+
+ dist.add_os(os)
+
+ Utils.file_unlock(@lock_file)
+ @log.output( "package server add os [#{os}] successfully", Log::LV_USER )
+ end
+
def remove_server()
@log.info( "Package server [#{@id}] will be removed and all server information delete", Log::LV_USER)
@port = port
@finish = false
- client = BuildCommClient.create("127.0.0.1", @port)
+ client = BuildCommClient.create("127.0.0.1", @port, @log)
if client.nil? then
raise RuntimeError, "Server does not listen in #{@port} port"
end
@@log.output( "=== server ID list ===", Log::LV_USER)
s.each do |id|
+ if File.extname(id).eql?(".log") then next end
+
@@log.output( id, Log::LV_USER)
end
end
end
end
end
- end
- @dist_to_server_url.each do |dist_name, server_url|
- @distribution_list.push Distribution.new( dist_name, "#{@location}/#{dist_name}", server_url, self )
+ @dist_to_server_url.each do |dist_name, server_url|
+ @distribution_list.push Distribution.new( dist_name, "#{@location}/#{dist_name}", server_url, self )
+ end
end
end
@log.info "[#{dist_name}] distribution creation. using local server [#{server_url}]"
end
- distribution.sync( false, "linux" )
- distribution.sync( false, "windows" )
- distribution.sync( false, "darwin" )
+ distribution.sync(false)
distribution.sync_archive_pkg
else
@log.info "generate package server do not using remote package server"
# write_pkg_list for empty file
- distribution.write_pkg_list( "linux" )
- distribution.write_pkg_list( "windows" )
- distribution.write_pkg_list( "darwin" )
+ distribution.write_pkg_list(nil)
distribution.write_archive_pkg_list()
end
- # add dist information to .info file
+ # add dist information to distribution.info file
File.open("#{@location}/distribution.info", "a") do |f|
f.puts "name : #{dist_name}"
f.puts "time : #{Time.now.strftime("%Y%m%d%H%M%S")}"
end
- def register_package(distribution, pkg, file_path, test_flag)
+ def register_package(distribution, pkg, file_path, test_flag, internal_flag)
# get package class using bianry file
if pkg.nil? or pkg.package_name.empty? then
raise "[#{file_path}]'s pkginfo.manifest file is incomplete."
raise "[#{file_path}]'s pkginfo.manifest file is incomplete."
end
- updated_pkg = distribution.register(file_path, pkg )
+ updated_pkg = distribution.register(file_path, pkg, internal_flag )
else
updated_pkg = distribution.register_for_test(file_path, pkg )
end
def update_config_information(id)
@id = id
@config_dir = "#{PackageServerConfig::SERVER_ROOT}/#{@id}"
+ @log_file_path = "#{PackageServerConfig::SERVER_ROOT}/#{@id}.log"
@config_file_path = "#{@config_dir}/config"
@incoming_path = "#{@config_dir}/incoming"
- @log_file_path = "#{PackageServerConfig::SERVER_ROOT}/.#{@id}.log"
end
end
options[:test] = false
options[:clone] = false
options[:recursive] = false
+ options[:origin_pkg_name] = ""
+ options[:origin_pkg_os] = ""
end
def option_error_check( options )
if options[:id].empty? or options[:dist].empty? then
raise ArgumentError, "Usage: pkg-svr add-dist -n <server name> -d <distribution> [-u <remote_server_url>] [--clone] "
end
+ when "add-os"
+ if options[:os].empty? then
+ raise ArgumentError, "Usage: pkg-svr add-os -n <server name> -d <distribution> -o <os>] "
+ end
when "remove-dist"
if options[:id].empty? or options[:dist].empty? then
raise ArgumentError, "Usage: pkg-svr remove-dist -n <server name> -d <distribution> "
+ "\n" + "Usage: pkg-svr <SUBCOMMAND> [OPTS] or pkg-svr -h" + "\n" \
+ "\n" + "Subcommands:" + "\n" \
+ "\t" + "create Create a package-server." + "\n" \
- + "\t" + "add-dist Add a distribution to package-server." + "\n" \
- + "\t" + "register Register a package in package-server." + "\n" \
- + "\t" + "remove Remove a package-server." + "\n" \
- + "\t" + "remove-dist Remove a distribution to package-server." + "\n" \
+ + "\t" + "add-dist Add a distribution to package-server." + "\n" \
+ + "\t" + "add-os Add supported os." + "\n" \
+ + "\t" + "register Register a package in package-server." + "\n" \
+ + "\t" + "remove Remove a package-server." + "\n" \
+ + "\t" + "remove-dist Remove a distribution to package-server." + "\n" \
+ + "\t" + "remove-pkg Remove a package in package-server." + "\n" \
+ "\t" + "remove-snapshot Remove a snapshot in package-server." + "\n" \
+ "\t" + "gen-snapshot Generate a snapshot in package-server." + "\n" \
- + "\t" + "sync Synchronize the package-server from parent package server." + "\n" \
- + "\t" + "start Start the package-server." + "\n" \
- + "\t" + "stop Stop the package-server." + "\n" \
- + "\t" + "clean Delete unneeded package files in package-server." + "\n" \
- + "\t" + "list Show all pack" + "\n" \
- + "\n" + "Subcommand usage:" + "\n" \
+ + "\t" + "sync Synchronize the package-server from parent package server." + "\n" \
+ + "\t" + "start Start the package-server." + "\n" \
+ + "\t" + "stop Stop the package-server." + "\n" \
+ + "\t" + "clean Delete unneeded package files in package-server." + "\n" \
+ + "\t" + "list Show all pack" + "\n" \
+ + "\n" + "Subcommand usage:" + "\n" \
+ "\t" + "pkg-svr create -n <server name> -d <distribution> [-u <remote server url>] [-l <location>] " + "\n" \
+ "\t" + "pkg-svr add-dist -n <server name> -d <distribution> [-u <remote_server_url>] [--clone] " + "\n" \
+ + "\t" + "pkg-svr add-os -n <server name> -d <distribution> -o <os> " + "\n" \
+ "\t" + "pkg-svr register -n <server name> -d <distribution> -P <package file list> [--gen] [--test] " + "\n" \
+ "\t" + "pkg-svr remove -n <server name> " + "\n" \
+ "\t" + "pkg-svr remove-dist -n <server name> -d <distribution>" + "\n" \
options[:url] = url
end
- opts.on( '-o', '--os <operating system>', 'target operating system: linux/windows/darwin' ) do|os|
+ opts.on( '-o', '--os <operating system>', 'target operating system' ) do|os|
options[:os] = os
end
cmd = ARGV[0]
- if cmd.eql? "create" or cmd.eql? "register" or cmd.eql? "sync" \
- or cmd.eql? "gen-snapshot" or cmd.eql? "add-dist" \
+ if cmd.eql? "create" or cmd.eql? "sync" \
+ or cmd.eql? "register" \
+ or cmd.eql? "gen-snapshot" \
+ or cmd.eql? "add-dist" or cmd.eql? "add-os" \
or cmd.eql? "remove" or cmd.eql? "remove-dist" \
or cmd.eql? "remove-pkg" or cmd.eql? "remove-snapshot" \
or cmd.eql? "start" or cmd.eql? "stop" or cmd.eql? "clean" \
../build-cli -h
#POST-EXEC
#EXPECT
-Usage: build-cli {build|resolve|query|cancel} ...
- build-cli build -N <project name> -d <server address> [-o <os>] [-w <password>] [--async]
- build-cli resolve -N <project name> -d <server address> [-o <os>] [-w <password>] [--async]
- build-cli query -d <server address>
- build-cli cancel -j <job number> -d <server address> [-w <password>]
- build-cli register -P <file name> -d <server address> -t <ftp server url> [-w <password>]
- -N, --project <project name> project name
- -d, --address <server address> build server address: 127.0.0.1:2224
- -o, --os <operating system> target operating system: linux/windows/darwin
- --async asynchronous job
- -j, --job <job number> job number
- -w, --passwd <password> password for managing project
- -P, --pkg <package file> package file path
- -t, --ftp <ftp server url> ftp server url: ftp://dibsftp:dibsftp@127.0.0.1
- -h, --help display manual
- -v, --version display version
+Requiest service to build-server command-line tool.
+
+Usage: build-cli <SUBCOMMAND> [OPTS] or build-cli -h
+
+Subcommands:
+build Build and create package.
+resolve Request change to resolve-status for build-conflict.
+query Query information about build-server.
+query-system Query system information about build-server.
+query-project Query project information about build-server.
+query-job Query job information about build-server.
+cancel Cancel a building project.
+register Register the package to the build-server.
+
+Subcommand usage:
+build-cli build -N <project name> -d <server address> [-o <os>] [-w <password>] [--async]
+build-cli resolve -N <project name> -d <server address> [-o <os>] [-w <password>] [--async]
+build-cli query -d <server address>
+build-cli query-system -d <server address>
+build-cli query-project -d <server address>
+build-cli query-job -d <server address>
+build-cli cancel -j <job number> -d <server address> [-w <password>]
+build-cli register -P <file name> -d <server address> -t <ftp server url> [-w <password>]
+
+Options:
+-N, --project <project name> project name
+-d, --address <server address> build server address: 127.0.0.1:2224
+-o, --os <operating system> target operating system: ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64
+--async asynchronous job
+-j, --job <job number> job number
+-w, --passwd <password> password for managing project
+-P, --pkg <package file> package file path
+-t, --ftp <ftp server url> ftp server url: ftp://dibsftp:dibsftp@127.0.0.1
+-h, --help display manual
+-v, --version display version
../build-cli query -d 127.0.0.1:2223
#POST-EXEC
#EXPECT
-HOST-OS:
-MAX_WORKING_JOBS:
+* SYSTEM INFO *
+HOST-OS: ubuntu-32
+MAX_WORKING_JOBS: 2
* FTP *
FTP_ADDR:
FTP_USERNAME:
-* PROJECT(S) *
+* SUPPORTED OS LIST *
+ubuntu-32
+windows-32
-* JOB(S) *
+* PROJECT(S) *
+testd NORMAL
+testb NORMAL
+testa NORMAL
+testa1 NORMAL
+testc NORMAL
+teste REMOTE
#PRE-EXEC
#EXEC
-../build-cli build -N testa -d 127.0.0.1:2223
+../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
Info: Initializing job...
Info: Checking package version ...
-Info: Checking build dependency ...
Info: Invoking a thread for building Job
Info: New Job
+Info: Checking build dependency ...
Info: Started to build this job...
Info: JobBuilder
Info: Downloding client is initializing...
Info: Downloading dependent source packages...
Info: Make clean...
Info: Make build...
-Warn: Failed on build script: "save_cache"
-Warn: Saving cache failed...
Info: Make install...
Info: Generatiing pkginfo.manifest...
Info: Zipping...
-Info: Creating package file ... a_0.0.3_linux.zip
+Info: Creating package file ... a_0.0.1_ubuntu-32.zip
Info: Checking reverse build dependency ...
Info: Uploading ...
Info: Upload succeeded. Sync local pkg-server again...
--- /dev/null
+#PRE-EXEC
+echo "This is the test case for omitting os"
+../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a
+#EXEC
+../build-cli build -N testa -d 127.0.0.1:2223
+#POST-EXEC
+#EXPECT
+Info: Added new job
+Info: Initializing job...
+Info: Checking package version ...
+Info: Invoking a thread for building Job
+Info: New Job
+Info: Checking build dependency ...
+Info: Started to build this job...
+Info: JobBuilder
+Info: Downloding client is initializing...
+Info: Installing dependent packages...
+Info: Downloading dependent source packages...
+Info: Make clean...
+Info: Make install...
+Info: Generatiing pkginfo.manifest...
+Info: Zipping...
+Info: Creating package file ... a_0.0.1_ubuntu-32.zip
+Info: Checking reverse build dependency ...
+Info: Uploading ...
+Info: Upload succeeded. Sync local pkg-server again...
+Info: Snapshot:
+Info: Job is completed!
#PRE-EXEC
#EXEC
-../build-cli build -N non_exist_project -d 127.0.0.1:2223
+../build-cli build -N non_exist_project -d 127.0.0.1:2223 -o ubuntu-32
#POST-EXEC
#EXPECT
Error: Requested project does not exist!
#PRE-EXEC
#EXEC
-../build-cli build -N testa -d 172.21.111.217:1111
+../build-cli build -N testa -d 127.0.0.1:11113 -o ubuntu-32
#POST-EXEC
#EXPECT
Connection to server failed!
#PRE-EXEC
#EXEC
-../build-cli build -N testa -d 111.11q.111.111:1111
+../build-cli build -N testa -d 111.11q.111.111:1111 -o ubuntu-32
#POST-EXEC
#EXPECT
Connection to server failed!
#PRE-EXEC
echo "testa project is already built and uploaded in previeous testcase"
#EXEC
-../build-cli build -N testa -d 127.0.0.1:2223
+../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
#PRE-EXEC
echo "Assume testa project is already built and uploaded in previeous testcase"
#EXEC
-../build-cli build -N testb -d 127.0.0.1:2223
+../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
Info: Initializing job...
Info: Checking package version ...
-Info: Checking build dependency ...
Info: Invoking a thread for building Job
Info: New Job
+Info: Checking build dependency ...
Info: Started to build this job...
Info: JobBuilder
Info: Downloding client is initializing...
Info: Downloading dependent source packages...
Info: Make clean...
Info: Make build...
-Warn: Failed on build script: "save_cache"
-Warn: Saving cache failed...
Info: Make install...
Info: Generatiing pkginfo.manifest...
Info: Zipping...
-Info: Creating package file
+Info: Creating package file ... b_0.0.1_ubuntu-32.zip
Info: Checking reverse build dependency ...
Info: Uploading ...
Info: Upload succeeded. Sync local pkg-server again...
#PRE-EXEC
+echo "if build-dep package does not exist in server, will show the error"
echo "Assume testa/testb project is already built and uploaded in previeous testcase"
../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P b
../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a
#EXEC
-../build-cli build -N testb -d 127.0.0.1:2223
+../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
Info: Initializing job...
Info: Checking package version ...
+Info: Invoking a thread for building Job
+Info: New Job
Info: Checking build dependency ...
Error: The package "a" for build-dependency is not found
Error: Job is stopped by ERROR
#PRE-EXEC
+echo "This is the test case for omitting os"
+../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a
#EXEC
-../build-cli build -N testa -d 127.0.0.1:2223 -o linux
+../build-cli build -N testa -d 127.0.0.1:2223
#POST-EXEC
#EXPECT
Info: Added new job
Info: Initializing job...
Info: Checking package version ...
-Info: Checking build dependency ...
Info: Invoking a thread for building Job
Info: New Job
+Info: Checking build dependency ...
Info: Started to build this job...
Info: JobBuilder
Info: Downloding client is initializing...
Info: Downloading dependent source packages...
Info: Make clean...
Info: Make build...
-Warn: Failed on build script: "save_cache"
-Warn: Saving cache failed...
Info: Make install...
Info: Generatiing pkginfo.manifest...
Info: Zipping...
-Info: Creating package file ... a_0.0.3_linux.zip
+Info: Creating package file ... a_0.0.1_ubuntu-32.zip
Info: Checking reverse build dependency ...
Info: Uploading ...
Info: Upload succeeded. Sync local pkg-server again...
#PRE-EXEC
+echo "if there doe not exist server to build, error"
#EXEC
-../build-cli build -N testa -d 127.0.0.1:2223 -o windows
+../build-cli build -N testa -d 127.0.0.1:2223 -o windows-32
#POST-EXEC
#EXPECT
Info: Added new job
#PRE-EXEC
+echo "wrong os name in build command"
+../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a
#EXEC
-../build-cli build -N testa -d 127.0.0.1:2223 -o unknown_os
+../build-cli build -N testa -d 127.0.0.1:2223 -o wrong_os_name
#POST-EXEC
#EXPECT
-We have no plan to Buld OS "unknown_os"
- please check your option OS
+Error: Unsupported OS name "wrong_os_name" is used!
+Error: Check the following supported OS list.
+* ubuntu-32
+* windows-32
--- /dev/null
+#PRE-EXEC
+echo "wrong os name in resolve command"
+#EXEC
+../build-cli resolve -N testa -d 127.0.0.1:2223 -o wrong_os_name
+#POST-EXEC
+#EXPECT
+Error: Unsupported OS name "wrong_os_name" is used!
+Error: Check the following supported OS list.
+* ubuntu-32
+* windows-32
#PRE-EXEC
echo "Assume that testc project has the password (1111)"
echo "Assume that testa,testb which are depended by testc are built and uploaded"
-../build-cli build -N testa -d 127.0.0.1:2223
-../build-cli build -N testb -d 127.0.0.1:2223
+echo "For, work around solution, removed cache"
+rm -rf buildsvr01/projects/testa/cache
+../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32
+../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32
#EXEC
-../build-cli build -N testc -d 127.0.0.1:2223 -w 1111
+../build-cli build -N testc -d 127.0.0.1:2223 -w 1111 -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
Info: Initializing job...
Info: Checking package version ...
-Info: Checking build dependency ...
Info: Invoking a thread for building Job
Info: New Job
+Info: Checking build dependency ...
Info: Started to build this job...
Info: JobBuilder
Info: Downloding client is initializing...
Info: Installing dependent packages...
Info: * a
+Info: * b
Info: Downloading dependent source packages...
Info: Make clean...
Info: Make build...
-Warn: Failed on build script: "save_cache"
-Warn: Saving cache failed...
Info: Make install...
Info: Generatiing pkginfo.manifest...
Info: Zipping...
-Info: Creating package file ... c_0.0.3_linux.zip
+Info: Creating package file ... c_0.0.1_ubuntu-32.zip
Info: Checking reverse build dependency ...
Info: Uploading ...
Info: Upload succeeded. Sync local pkg-server again...
echo "Assume that testc project has the password (1111)"
echo "Assume that testa,testb which are depended by testc are built and uploaded"
#EXEC
-../build-cli build -N testc -d 127.0.0.1:2223
+../build-cli build -N testc -d 127.0.0.1:2223 -o linux
#POST-EXEC
#EXPECT
Error: Project's password is not matched!
echo "Assume that testc project has the password (1111)"
echo "Assume that testa,testb which are depended by testc are built and uploaded"
#EXEC
-../build-cli build -N testc -d 127.0.0.1:2223 -w 2222
+../build-cli build -N testc -d 127.0.0.1:2223 -w 2222 -o linux
#POST-EXEC
#EXPECT
Error: Project's password is not matched!
#PRE-EXEC
../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P c
#EXEC
-../build-cli build -N testc -d 127.0.0.1:2223 -w 1111 --async
+../build-cli build -N testc -d 127.0.0.1:2223 -w 1111 --async -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P b
../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a
#EXEC
-../build-cli build -N testa -d 127.0.0.1:2223 --async
+../build-cli build -N testa -d 127.0.0.1:2223 --async -o ubuntu-32
sleep 1
-../build-cli build -N testb -d 127.0.0.1:2223
+../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
Info: Initializing job...
Info: Checking package version ...
Info: Waiting for finishing following jobs:
-Info: *
+Info: *
Info: Invoking a thread for building Job
Info: New Job
Info: Checking build dependency ...
Info: Downloading dependent source packages...
Info: Make clean...
Info: Make build...
-Warn: Failed on build script: "save_cache"
-Warn: Saving cache failed...
Info: Make install...
Info: Generatiing pkginfo.manifest...
Info: Zipping...
-Info: Creating package file ...
+Info: Creating package file ... b_0.0.1_ubuntu-32.zip
Info: Checking reverse build dependency ...
Info: Uploading ...
Info: Upload succeeded. Sync local pkg-server again...
Info: Snapshot:
Info: Job is completed!
-
#PRE-EXEC
-../build-cli build -N testa -d 127.0.0.1:2223
-../build-cli build -N testb -d 127.0.0.1:2223
-../build-cli build -N testc -d 127.0.0.1:2223 -w 1111
+echo "reverse fail"
#EXEC
rm -rf git01/a
-cd git01;tar xvf a_new.tar.gz
-../build-cli build -N testa -d 127.0.0.1:2223
+cd git01;tar xf a_v2.tar.gz
+../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32
#POST-EXEC
#EXPECT
Info: Added new job
Info: Downloading dependent source packages...
Info: Make clean...
Info: Make build...
-Warn: Failed on build script: "save_cache"
-Warn: Saving cache failed...
Info: Make install...
Info: Generatiing pkginfo.manifest...
Info: Zipping...
-Info: Creating package file ... a_0.0.4_linux.zip
+Info: Creating package file ... a_0.0.2_ubuntu-32.zip
Info: Checking reverse build dependency ...
-Info: * Checking reverse-build ... testc
+Info: * Will check reverse-build for projects: testb(ubuntu-32)
+Info: * Added new job for reverse-build ... testb(ubuntu-32)
+Info: * Reverse-build FAIL ... testb(ubuntu-32)
Error: Job is stopped by ERROR
--- /dev/null
+#PRE-EXEC
+#EXEC
+../build-cli query-system -d 127.0.0.1:2223
+#POST-EXEC
+#EXPECT
+* SYSTEM INFO *
+HOST-OS:
+MAX_WORKING_JOBS:
+
+* FTP *
+FTP_ADDR:
+FTP_USERNAME:
+
+* SUPPORTED OS LIST *
+ubuntu-32
+windows-32
--- /dev/null
+#PRE-EXEC
+#EXEC
+../build-cli query-project -d 127.0.0.1:2223
+#POST-EXEC
+#EXPECT
+* PROJECT(S) *
--- /dev/null
+#PRE-EXEC
+#EXEC
+../build-cli query-job -d 127.0.0.1:2223
+#POST-EXEC
+#EXPECT
+* JOB(S) *
--- /dev/null
+#PRE-EXEC
+echo "Trying to upload a_0.0.1 with different commit-id is already uploaded"
+rm -rf git01/c
+cd git01;tar xf c_v1_1.tar.gz
+#EXEC
+../build-cli build -N testc -d 127.0.0.1:2223 -o ubuntu-32 -w 1111
+#POST-EXEC
+#EXPECT
+Info: Added new job
+Info: Initializing job...
+Info: Checking package version ...
+Error: Source code has been changed without increasing version!
+Error: * Version :
+Error: * Before :
+Error: * Current :
+Error: Job is stopped by ERROR
--- /dev/null
+#PRE-EXEC
+cd git01;tar xf a_v2.tar.gz
+cd git01;tar xf b_v2.tar.gz
+cd git01;tar xf c_v2.tar.gz
+#EXEC
+../build-cli build -N testa,testb,testc -d 127.0.0.1:2223 -o ubuntu-32 -w 1111
+#POST-EXEC
+#EXPECT
+Info: Added new job
+Info: Initializing job...
+Info: Invoking a thread for MULTI-BUILD Job
+Info: New Job
+Info: Added new job "testa" for ubuntu-32!
+Info: Added new job "testb" for ubuntu-32!
+Info: Added new job "testc" for ubuntu-32!
+Info: Sub-Job "testa" for ubuntu-32 has entered "WORKING" state.
+Info: Sub-Job "testa" for ubuntu-32 has entered "FINISHED" state.
+Info: Sub-Job "testb" for ubuntu-32 has entered "WORKING" state.
+Info: Sub-Job "testb" for ubuntu-32 has entered "FINISHED" state.
+Info: Sub-Job "testc" for ubuntu-32 has entered "WORKING" state.
+Info: Sub-Job "testc" for ubuntu-32 has entered "FINISHED" state.
+Info: Uploading ...
+Info: Upload succeeded. Sync local pkg-server again...
+Info: Snapshot:
+Info: Job is completed!
--- /dev/null
+#PRE-EXEC
+echo "This test case must be execute right after testcase 22"
+#EXEC
+../build-cli build -N testa,testb,testc -d 127.0.0.1:2223 -o ubuntu-32 -w 1111
+#POST-EXEC
+#EXPECT
+Info: Added new job
+Info: Initializing job...
+Info: Failed to initialize sub-job
+Error: Job is stopped by ERROR
--- /dev/null
+#PRE-EXEC
+cd git01;tar xf a_v3.tar.gz
+#EXEC
+../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32,windows-32
+#POST-EXEC
+#EXPECT
+Info: Added new job
+Info: Initializing job...
+Info: Invoking a thread for MULTI-BUILD Job
+Info: New Job
+Info: Added new job
+Info: Added new job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Uploading ...
+Info: Upload succeeded. Sync local pkg-server again...
+Info: Snapshot:
+Info: Job is completed!
--- /dev/null
+#PRE-EXEC
+echo "testa, testb: build because of version change"
+echo "testc, testa1, testd: rebuild with same version"
+
+cd git01;tar xf a_v4.tar.gz
+cd git01;tar xf b_v4.tar.gz
+cd git01;tar xf c_v4.tar.gz
+#EXEC
+../build-svr fullbuild -n testserver3
+#POST-EXEC
+#EXPECT
+Info: Initializing job...
+Info: Invoking a thread for MULTI-BUILD Job
+Info: New Job
+Info: Added new job "testd" for ubuntu-32!
+Info: Added new job "testb" for ubuntu-32!
+Info: Added new job "testb" for windows-32!
+Info: Added new job "testa" for ubuntu-32!
+Info: Added new job "testa" for windows-32!
+Info: Added new job "testa1" for ubuntu-32!
+Info: Added new job "testa1" for windows-32!
+Info: Added new job "testc" for ubuntu-32!
+Info: Added new job "testc" for windows-32!
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Sub-Job
+Info: Uploading ...
+Info: Upload succeeded. Sync local pkg-server again...
+Info: Snapshot:
+Info: Job is completed!
--- /dev/null
+#PRE-EXEC
+cd git01;tar xf c_v5.tar.gz
+#EXEC
+../build-cli build -N testc -d 127.0.0.1:2223 -o li_* -w 1111
+#POST-EXEC
+#EXPECT
+Error: There is no OS supported by the build server.
--- /dev/null
+#PRE-EXEC
+echo "wild card"
+#EXEC
+../build-cli build -N testc -d 127.0.0.1:2223 -o ubuntu-* -w 1111
+#POST-EXEC
+#EXPECT
+Info: Added new job
+Info: Initializing job...
+Info: Checking package version ...
+Info: Invoking a thread for building Job
+Info: New Job
+Info: Checking build dependency ...
+Info: Started to build this job...
+Info: JobBuilder
+Info: Downloding client is initializing...
+Info: Installing dependent packages...
+Info: * a
+Info: * b
+Info: Downloading dependent source packages...
+Info: Make clean...
+Info: Make build...
+Info: Make install...
+Info: Generatiing pkginfo.manifest...
+Info: Zipping...
+Info: Creating package file ... c_0.0.5_ubuntu-32.zip
+Info: Checking reverse build dependency ...
+Info: Uploading ...
+Info: Upload succeeded. Sync local pkg-server again...
+Info: Snapshot:
--- /dev/null
+#PRE-EXEC
+echo "reverse success"
+#EXEC
+rm -rf git01/a
+cd git01;tar xf a_v5.tar.gz
+../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32
+#POST-EXEC
+#EXPECT
+Info: Added new job
+Info: Initializing job...
+Info: Checking package version ...
+Info: Invoking a thread for building Job
+Info: New Job
+Info: Checking build dependency ...
+Info: Started to build this job...
+Info: JobBuilder
+Info: Downloding client is initializing...
+Info: Installing dependent packages...
+Info: Downloading dependent source packages...
+Info: Make clean...
+Info: Make build...
+Info: Make install...
+Info: Generatiing pkginfo.manifest...
+Info: Zipping...
+Info: Creating package file ... a_0.0.5_ubuntu-32.zip
+Info: Checking reverse build dependency ...
+Info: * Will check reverse-build for projects: testb(ubuntu-32), testc(ubuntu-32)
+Info: * Added new job for reverse-build ... testc(ubuntu-32)
+Info: * Added new job for reverse-build ... testc(ubuntu-32)
+Info: * Reverse-build OK ...
+Info: * Reverse-build OK ...
+Info: Uploading ...
+Info: Upload succeeded. Sync local pkg-server again...
+Info: Snapshot:
+Info: Job is completed!
../build-svr -h
#POST-EXEC
#EXPECT
-Usage: build-svr {create|remove|start|stop|add-svr|add-prj|fullbuild|help} ...
+Build-server administer service command-line tool.
+
+Usage: build-svr <SUBCOMMAND> [OPTS] or build-svr -h
+
+Subcommands:
+ create Create the build-server.
+ remove Remove the build-server.
+ start Start the build-server.
+ stop Stop the build-server.
+ add-svr Add build-server for support multi-OS or distribute build job.
+ add-prj Register information for project what you want build berfore building a project.
+ register Register the package to the build-server.
+ fullbuild Build all your projects and upload them to package server.
+
+Subcommand usage:
build-svr create -n <server name> -u <package server url> -d <package server address> -t <ftp server url>
build-svr remove -n <server name>
build-svr start -n <server name> -p <port>
build-svr stop -n <server name>
build-svr add-svr -n <server name> -d <friend server address>
build-svr add-prj -n <server name> -N <project name> [-g <git repository>] [-b <git branch>] [-P <package name>] [-w <password>] [-o <os list>]
+ build-svr add-os -n <server name> -o <os>
build-svr register -n <server name> -P <package file>
build-svr fullbuild -n <server name>
- -n, --name <server name> build server name
- -u, --url <package server url> package server url: http://127.0.0.1/dibs/unstable
- -d, --address <server address> server address: 127.0.0.1:2224
- -p, --port <port> server port number: 2224
- -P, --pkg <package name/file> package file path or name
- -o, --os <target os list> ex) linux,windows
- -N, --pname <project name> project name
- -g, --git <git repository> git repository
- -b, --branch <git branch> git branch
- -w, --passwd <password> password for managing project
- -t, --ftp <ftp server url> ftp server url: ftp://dibsftp:dibsftp@127.0.0.1
- -h, --help display this information
- -v, --version display version
+
+Options:
+ -n, --name <server name> build server name
+ -u, --url <package server url> package server url: http://127.0.0.1/dibs/unstable
+ -d, --address <server address> server address: 127.0.0.1:2224
+ -p, --port <port> server port number: 2224
+ -P, --pkg <package name/file> package file path or name
+ -o, --os <target os list> ex) linux,windows
+ -N, --pname <project name> project name
+ -g, --git <git repository> git repository
+ -b, --branch <git branch> git branch
+ -w, --passwd <password> password for managing project
+ -t, --ftp <ftp server url> ftp server url: ftp://dibsftp:dibsftp@127.0.0.1:1024
+ -h, --help display this information
+ -v, --version display version
+
rm -rf buildsvr01
mkdir buildsvr01
cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132
+../build-svr add-os -n testserver3 -o linux
#EXEC
../build-svr add-prj -n testserver3 -N testa -g test_git -b test_branch
#POST-EXEC
rm -rf buildsvr01
mkdir buildsvr01
cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132
+../build-svr add-os -n testserver3 -o linux
#EXEC
../build-svr add-prj -n testserver3 -N testx -g test_git -b test_branch -o linux
#POST-EXEC
--- /dev/null
+#PRE-EXEC
+rm -rf buildsvr01
+mkdir buildsvr01
+cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132
+#EXEC
+../build-svr add-os -n testserver3 -o linux
+cat ~/.build_tools/build_server/testserver3/supported_os_list
+#POST-EXEC
+../build-svr remove -n testserver3
+rm -rf buildsvr01
+#EXPECT
+Target OS is added successfully!
+linux
--- /dev/null
+#PRE-EXEC
+rm -rf buildsvr01
+mkdir buildsvr01
+cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132
+#EXEC
+../build-svr add-os -n testserver3 -o linux
+../build-svr add-os -n testserver3 -o linux
+#POST-EXEC
+../build-svr remove -n testserver3
+rm -rf buildsvr01
+#EXPECT
+Target OS is added successfully!
+Target OS already exists in list!
--- /dev/null
+#PRE-EXEC
+rm -rf buildsvr01
+mkdir buildsvr01
+cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132
+../build-svr add-os -n testserver3 -o linux
+../build-svr add-os -n testserver3 -o windows
+#EXEC
+../build-svr add-prj -n testserver3 -N new_project -g new_git -b new_branch -o wrong_os_name
+#POST-EXEC
+../build-svr remove -n testserver3
+rm -rf buildsvr01
+#EXPECT
+Unsupported OS name "wrong_os_name" is used!
+Check the following supported OS list:
+ * linux
+ * windows
--- /dev/null
+#PRE-EXEC
+rm -rf buildsvr01
+mkdir buildsvr01
+cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132
+../build-svr add-os -n testserver3 -o linux
+cp bin/bin_0.0.0_linux.zip bin/bin_0.0.0_wrongosname.zip
+../build-svr start -n testserver3 -p 2223 &
+#EXEC
+sleep 1
+../build-svr register -n testserver3 -P bin/bin_0.0.0_wrongosname.zip
+#POST-EXEC
+../build-svr stop -n testserver3
+sleep 1
+../build-svr remove -n testserver3
+rm -rf buildsvr01
+rm -rf bin/bin/bin_0.0.0_wrongosname.zip
+#EXPECT
+Info: Initializing job...
+Error: Unsupported OS "wrongosname" is used!
+Error: Job is stopped by ERROR
build-cli-10.testcase
build-cli-11.testcase
build-cli-12.testcase
+build-cli-12_1.testcase
build-cli-13.testcase
build-cli-14.testcase
build-cli-15.testcase
build-cli-16.testcase
build-cli-17.testcase
build-cli-18.testcase
+build-cli-19.testcase
+build-cli-20.testcase
+build-cli-21.testcase
+build-cli-22.testcase
+build-cli-23.testcase
+build-cli-24.testcase
+build-cli-25.testcase
+build-cli-26.testcase
+build-cli-27.testcase
+build-cli-28.testcase
+build-cli-29.testcase
build-svr-12.testcase
build-svr-13.testcase
build-svr-14.testcase
+build-svr-17.testcase
+build-svr-18.testcase
+build-svr-19.testcase
+build-svr-20.testcase
../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://tmax:tmax@172.21.111.217
cd ..
cd git01
-tar xvf a.tar.gz
-tar xvf b.tar.gz
-tar xvf c.tar.gz
-tar xvf d.tar.gz
+rm -rf a
+rm -rf a1
+rm -rf b
+rm -rf c
+rm -rf d
+tar xvf a_v1.tar.gz
+tar xvf b_v1.tar.gz
+tar xvf c_v1.tar.gz
+tar xvf d_v0.tar.gz
+tar xvf a1_v1.tar.gz
cd ..
+../build-svr add-os -n testserver3 -o ubuntu-32
+../build-svr add-os -n testserver3 -o windows-32
../build-svr add-prj -n testserver3 -N testa -g `pwd`/git01/a -b master
../build-svr add-prj -n testserver3 -N testb -g `pwd`/git01/b -b master
../build-svr add-prj -n testserver3 -N testc -g `pwd`/git01/c -b master -w 1111
-../build-svr add-prj -n testserver3 -N testd -g `pwd`/git01/d -b master
+../build-svr add-prj -n testserver3 -N testd -g `pwd`/git01/d -b master -o ubuntu-32
../build-svr add-prj -n testserver3 -N teste -P bin
+../build-svr add-prj -n testserver3 -N testa1 -g `pwd`/git01/a1 -b master
+../pkg-svr register -n pkgsvr01 -d unstable -P bin/bin_0.0.0_ubuntu-32.zip
../build-svr start -n testserver3 -p 2223
packageserver01.testcase
packageserver02.testcase
+packageserver24.testcase
packageserver03.testcase
packageserver04.testcase
packageserver05.testcase
packageserver07.testcase
packageserver08.testcase
packageserver09.testcase
-packageserver10.testcase
packageserver11.testcase
-packageserver12.testcase
packageserver13.testcase
packageserver14.testcase
packageserver15.testcase
../pkg-svr -h
#POST-EXEC
#EXPECT
-Usage: pkg-svr {create|register|gen-snapshot|sync|add-dist|spkg-path|remove|remove-pkg|list|help} ...
- pkg-svr create -i <id> -d <distribution> [-u <remote_server_url>] [-l <location>]
- pkg-svr add-dist -i<id> -d <distribution> [-u <remote_server_url>] [-c]
- pkg-svr remove -i <id>
- pkg-svr register -i <id> -d <distribution> -p <binary_package_file_path_list> -s <source_package_file_path_list> [-g] [-t]
- pkg-svr remove-pkg -i <id> -d <distribution> -p <binary_package_name_list>
- pkg-svr gen-snapshot -i<id> -d <distribution> -n <snapshot name> [-b <base_snapshot_name>]
- pkg-svr sync -i <id> -d <distribution> [-f]
- pkg-svr spkg-path -i <id> -d <distribution> -s <source_package_name>
- pkg-svr list [-i <id>]
- -i, --id <id> package server id
- -d, --dist <distribution> package server distribution
- -u, --url <server_address> remote server address
- -o, --os <operating system> target operating system
- -p <binary_pakcage_file_path_list>
- --bpackage binary package file path list
- -s <source_pakcage_file_path_list>
- --spackage source package file path
- -g, --generate snapshot is generate
- -n, --sname <snapshot> snapshot name
- -b <base_snapshot_name> base snapshot name
- --bsnapshot
- -l, --location <location> server location
- -f, --force force update pkg file
- -t, --test upload for test
- -c, --clone clone mode
- -h, --help display this information
+Package-server administer service command-line tool.
+
+Usage: pkg-svr <SUBCOMMAND> [OPTS] or pkg-svr -h
+
+Subcommands:
+ create Create a package-server.
+ add-dist Add a distribution to package-server.
+ register Register a package in package-server.
+ remove Remove a package-server.
+ remove-dist Remove a distribution to package-server.
+ remove-snapshot Remove a snapshot in package-server.
+ gen-snapshot Generate a snapshot in package-server.
+ sync Synchronize the package-server from parent package server.
+ start Start the package-server.
+ stop Stop the package-server.
+ clean Delete unneeded package files in package-server.
+ list Show all pack
+
+Subcommand usage:
+ pkg-svr create -n <server name> -d <distribution> [-u <remote server url>] [-l <location>]
+ pkg-svr add-dist -n <server name> -d <distribution> [-u <remote_server_url>] [--clone]
+ pkg-svr add-os -n <server name> -d <distribution> -o <os>
+ pkg-svr register -n <server name> -d <distribution> -P <package file list> [--gen] [--test]
+ pkg-svr link -n <server name> -d <distribution> --origin-pkg-name <origin pkg name> --origin-pkg-os <origin pkg os> --link-os-list <link os list>
+ pkg-svr remove -n <server name>
+ pkg-svr remove-dist -n <server name> -d <distribution>
+ pkg-svr remove-pkg -n <server name> -d <distribution> -P <package name list> [-o <os>]
+ pkg-svr remove-snapshot -n <server name> -d <distribution> -s <snapshot list>
+ pkg-svr gen-snapshot -n <server name> -d <distribution> -s <snapshot name> [-b <base snapshot name>]
+ pkg-svr sync -n <server name> -d <distribution> [--force]
+ pkg-svr clean -n <server name> -d <distribution> [-s <snapshot list>]
+ pkg-svr start -n <server name> -p <port>
+ pkg-svr stop -n <server name> -p <port>
+ pkg-svr list [-n <server name>]
+
+Options:
+ -n, --name <server name> package server name
+ -d, --dist <distribution> package server distribution
+ -u, --url <server url> remote server url: http://127.0.0.1/dibs/unstable
+ -o, --os <operating system> target operating system
+ -P, --pkgs <package file list> package file path list
+ -s, --snapshot <snapshot> a snapshot name or snapshot list
+ -b, --base <base snapshot> base snapshot name
+ -l, --loc <location> server location
+ -p, --port <port> port number
+ --recursive remove all depends packages
+ --clone clone mode
+ --force force update pkg file
+ --test upload for test
+ --gen generate snapshot
+ --origin-pkg-name <origin_pkg_name>
+ origin package name
+ --origin-pkg-os <origin_pkg_os>
+ origin package os
+ --link-os-list <link_os_list>
+ target os list to link origin file
+ -h, --help display manual
+ -v, --version display version
#PRE-EXEC
+../pkg-svr remove -n temp_local --force
#EXEC
-../pkg-svr create -i temp_local -d unstable
+../pkg-svr create -n temp_local -d unstable
#POST-EXEC
#EXPECT
package server [temp_local] created successfully
#PRE-EXEC
+../pkg-svr remove -n temp_remote --force
#EXEC
-../pkg-svr create -i temp_remote -d unstable -u http://172.21.111.177/tmppkgsvr/tmp
+../pkg-svr create -n temp_remote -d unstable -u http://172.21.111.177/tmppkgsvr/tmp
#POST-EXEC
#EXPECT
package server [temp_remote] created successfully
#PRE-EXEC
+../pkg-svr remove -n temp_remote_dup --force
#EXEC
-../pkg-svr create -i temp_remote_dup -d unstable -u temp_remote/unstable
+../pkg-svr create -n temp_remote_dup -d unstable -u temp_remote/unstable
#POST-EXEC
#EXPECT
package server [temp_remote_dup] created successfully
#PRE-EXEC
#EXEC
-../pkg-svr add-dist -i temp_local -d stable
+../pkg-svr add-dist -n temp_local -d stable
#POST-EXEC
#EXPECT
distribution [stable] added successfully
#PRE-EXEC
#EXEC
-../pkg-svr sync -i temp_remote -d unstable
+../pkg-svr sync -n temp_remote -d unstable
#POST-EXEC
#EXPECT
-package server [temp_remote]'s distribution [unstable] has the synchronization.
+package server [temp_remote]'s distribution [unstable] has been synchronized.
#PRE-EXEC
#EXEC
-../pkg-svr sync -i temp_remote_dup -d unstable -f
+../pkg-svr sync -n temp_remote_dup -d unstable --force
#POST-EXEC
#EXPECT
-package server [temp_remote_dup]'s distribution [unstable] has the synchronization.
+package server [temp_remote_dup]'s distribution [unstable] has been synchronized.
#PRE-EXEC
#EXEC
-../pkg-svr gen-snapshot -i temp_remote
+../pkg-svr gen-snapshot -n temp_remote -s snap01
#POST-EXEC
#EXPECT
snapshot is generated :
#PRE-EXEC
#EXEC
-../pkg-svr gen-snapshot -i temp_remote -d unstable
+../pkg-svr gen-snapshot -n temp_remote -d unstable -s snap02
#POST-EXEC
#EXPECT
snapshot is generated :
#PRE-EXEC
#EXEC
-../pkg-svr gen-snapshot -i temp_remote -d unstable -n test
+../pkg-svr gen-snapshot -n temp_remote -d unstable -s test
#POST-EXEC
#EXPECT
snapshot is generated :
#PRE-EXEC
#EXEC
-../pkg-svr gen-snapshot -i temp_remote -d unstable -n test2 -b test
+../pkg-svr gen-snapshot -n temp_remote -d unstable -s snap03 -b snap01
#POST-EXEC
#EXPECT
snapshot is generated :
#PRE-EXEC
#EXEC
-../pkg-svr gen-snapshot -i temp_remote -d unstable -o all -n test3
+../pkg-svr gen-snapshot -n temp_remote -d unstable -s test3
#POST-EXEC
#EXPECT
snapshot is generated :
#PRE-EXEC
#EXEC
-../pkg-svr create -i temp_remote_snap -d unstable -u temp_remote/unstable/snapshots/test
+../pkg-svr create -n temp_remote_snap -d unstable -u temp_remote/unstable/snapshots/snap01
#POST-EXEC
#EXPECT
package server [temp_remote_snap] created successfully
#PRE-EXEC
cp test_server_pkg_file/smart-build-interface* ./
#EXEC
-../pkg-svr register -i temp_remote -d unstable -p smart-build-interface_1.20.1_linux.zip -s smart-build-interface_1.20.1.tar.gz -g
+../pkg-svr register -n temp_remote -d unstable -P smart-build-interface_1.20.1_linux.zip --gen
#POST-EXEC
#EXPECT
package registed successfully
#PRE-EXEC
#EXEC
-../pkg-svr register -i temp_remote -d unstable -p smart-build-interface_1.20.1_linux.zip -s smart-build-interface_1.20.1.tar.gz -g
+../pkg-svr register -n temp_remote -d unstable -P smart-build-interface_1.20.1_linux.zip --gen
#POST-EXEC
#EXPECT
existing package's version is higher than register package
#PRE-EXEC
cp test_server_pkg_file/smart-build-interface* ./
#EXEC
-../pkg-svr register -i temp_remote_dup -d unstable -p ./temp_remote/unstable/binary/smart-build-interface_1.20.1_linux.zip -s ./temp_remote/unstable/source/smart-build-interface_1.20.1.tar.gz -g -t
+../pkg-svr register -n temp_remote_dup -d unstable -P ./temp_remote/unstable/binary/smart-build-interface_1.20.1_linux.zip --gen --test
#POST-EXEC
#EXPECT
package registed successfully
#PRE-EXEC
cp test_server_pkg_file/smart-build-interface* ./
#EXEC
-../pkg-svr remove-pkg -i temp_local -d unstable -p smart-build-interface
+../pkg-svr remove-pkg -n temp_local -d unstable -P smart-build-interface
#POST-EXEC
#EXPECT
package removed successfully
#PRE-EXEC
#EXEC
-../pkg-svr list -i temp_local
+../pkg-svr list -n temp_local
#POST-EXEC
rm smart-build-interface_1.20.1*
#EXPECT
#PRE-EXEC
#EXEC
-../pkg-svr remove -i temp_local -f
+../pkg-svr remove -n temp_local --force
#POST-EXEC
YES
#EXPECT
#PRE-EXEC
#EXEC
-../pkg-svr remove -i temp_remote -f
+../pkg-svr remove -n temp_remote --force
#POST-EXEC
YES
#EXPECT
#PRE-EXEC
#EXEC
-../pkg-svr remove -i temp_remote_dup -f
+../pkg-svr remove -n temp_remote_dup --force
#POST-EXEC
YES
#EXPECT
#PRE-EXEC
#EXEC
-../pkg-svr remove -i temp_remote_snap -f
+../pkg-svr remove -n temp_remote_snap --force
#POST-EXEC
YES
#EXPECT
--- /dev/null
+#PRE-EXEC
+#EXEC
+../pkg-svr add-os -n temp_local -d unstable -o ubuntu-10.04-32
+#POST-EXEC
+../pkg-svr add-os -n temp_local -d unstable -o windows-7-32
+#EXPECT
+package server add os [ubuntu-10.04-32] successfully
--- /dev/null
+#PRE-EXEC
+#EXEC
+../pkg-svr link -n temp_local -d unstable --origin-pkg-name smart-build-interface --origin-pkg-os ubuntu-10.04-32 --link-os-list windows-7-32
+#POST-EXEC
+#EXPECT
+package linked successfully
#POST-EXEC
rm -rf pkgcli01
#EXPECT
-base-ide-product_0.20.8_linux.zip
+base-ide-product_1.0.2_linux.zip
../pkg-cli list-rpkg -u http://172.21.111.132/testserver3/unstable
#POST-EXEC
#EXPECT
-base-ide-product (0.20.8)
+base-ide-product (1.0.2)
#POST-EXEC
#EXPECT
Package : base-ide-product
-Version : 0.20.8
+Version : 1.0.2
OS : linux
-Package : A
+Source : Origin
Version : 0.1.0
+Maintainer : taejun.ha <tajun.ha@samsung.com>
+
+Package : A
OS : linux
+C-test : test
Build-host-os :linux | windows | darwin
-Maintainer : taejun.ha <tajun.ha@samsung.com>
Path : binary/A_0.1.0_linux.zip
+C-commic : ask
Origin : remote
-SHA256 : 52b400554f2a29dec46144af649181cf287c000b4feb65de72055ed9f11924a9
+C-origin : kkk
Package: B
-Version : 0.2.0
OS : linux
Build-host-os :linux | windows | darwin
-Maintainer : taejun.ha <tajun.ha@samsung.com>
Install-dependency : C, D, E
Build-dependency : F (>= 1.0.0.20101221), E (>= 1.0.0.20101221)
Source-dependency : D, scratchbox-aquila-simulator-rootstrap [ linux |windows ](>= 1.0.0.20101221), scratchbox-core [windows|darwin](>= 1.0.17)
Path :
-Source : Origin
-From-server? : true
SHA256 : your_checksum
Description : this is my first
-project
-descriotion
+C-kim : oks
--- /dev/null
+Include : pkg-list
+
+Package : A
+OS : windows
+Build-host-os :linux | windows | darwin
+Path : binary/A_0.1.0_linux.zip
+Origin : remote
+
+Package: B
+OS : windows
+Build-host-os :linux | windows | darwin
+Install-dependency : C, D, E
+Build-dependency : F (>= 1.0.0.20101221), E (>= 1.0.0.20101221)
+Source-dependency : D, scratchbox-aquila-simulator-rootstrap [ linux |windows ](>= 1.0.0.20101221), scratchbox-core [windows|darwin](>= 1.0.17)
+Path :
+SHA256 : your_checksum
+Description : this is my first
#!/bin/sh
rm -rf ~/.build_tools/pkg_server/pkgsvr01
rm -rf `pwd`/pkgsvr01
-../pkg-svr create -n pkgsvr01 -d unstable
-../pkg-svr start -n pkgsvr01
+ruby -d ../pkg-svr create -n pkgsvr01 -d unstable
+ruby -d ../pkg-svr add-os -n pkgsvr01 -d unstable -o ubuntu-32
+ruby -d ../pkg-svr add-os -n pkgsvr01 -d unstable -o windows-32
+ruby -d ../pkg-svr start -n pkgsvr01
end
def is_succeeded?(results)
+ i = 0
@expected_results.each do |e|
found = false
- results.each do |r|
- if r.include? e then
- found = true
- break
- end
+ if not results[i].include? e then
+ return false
end
- if not found then return false end
+ i += 1
end
return true
require '../src/common/parser'
require '../src/common/package'
-alist = Parser.read_pkg_list "pkg-list"
-a_list = alist.values
-a_list.each do |l|
+alist = Parser.read_multy_pkginfo_from "pkg-list-local"
+alist.each do |l|
l.print
-end
+end
#!/bin/sh
echo "============ remove 1 =============="
-../pkg-svr remove -n temp_local
+../pkg-svr remove -n temp_local --force
echo "============ remove 2 =============="
-../pkg-svr remove -n temp_remote
+../pkg-svr remove -n temp_remote --force
echo "============ remove 3 =============="
-../pkg-svr remove -n temp_remote_dup
+../pkg-svr remove -n temp_remote_dup --force
echo "============ remove 4 =============="
-../pkg-svr remove -n temp_remote_snap
+../pkg-svr remove -n temp_remote_snap --force
echo "============ create 1 =============="
../pkg-svr create -n temp_local -d unstable
echo "============ create 2 =============="
-../pkg-svr create -n temp_remote -d unstable
+../pkg-svr create -n temp_remote -d unstable -u http://172.21.17.55/private/develop
echo "============ create 3 =============="
-../pkg-svr create -n temp_remote_dup -d unstable -u temp_remote/unstable
+../pkg-svr create -n temp_remote_dup -d unstable -u temp_local/unstable
echo "============ add dist 1 =============="
../pkg-svr add-dist -n temp_local -d stable
echo "============ sync 1 =============="