From: donghee Date: Tue, 18 Sep 2012 08:05:19 +0000 (+0900) Subject: Update Tizen 2.0 SDK source code X-Git-Tag: 2.0_alpha^0 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3463d6740a5ad48e7b9ce3f89886639136c2e840;p=sdk%2Ftools%2Fsdk-build.git Update Tizen 2.0 SDK source code Change-Id: I8625ecad315e7cb9a07e52cfff0bc594eda4f4f0 --- diff --git a/README b/README index e54ec3f..f373da2 100644 --- a/README +++ b/README @@ -142,7 +142,7 @@ Building a SDK package is very simple. Here is the command for buiding package. ## pkg-build [-u ] [-o ] [-c ] [-r ] ## -u : Package server URL which contains binary and development packages. ## If ommited, it will use previous server URL. - ## -o : Target OS(linux or windows) + ## -o : Target OS(ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64) ## -c : Clean build"" ## If set, start build after downloading all dependent packages ## If not set, it will not download dependent packages if already downloaded @@ -195,7 +195,7 @@ There are more useful commands provided You can list up available packages of server. ## pkg-cli list-rpkg [-o ] [-u ] - ## -o : Target OS(linux or windows) + ## -o : Target OS(ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64) ## -u : Package server URL which contains binary and development packages. ## If ommited, it will use previous server URL. diff --git a/build-cli b/build-cli index 4bec172..8207598 100755 --- a/build-cli +++ b/build-cli @@ -1,4 +1,4 @@ -#!/usr/bin/ruby -d +#!/usr/bin/ruby =begin @@ -36,75 +36,370 @@ require "utils" require "BuildClientOptionParser" require "BuildComm" + + #option parsing -option = option_parse +begin + option = option_parse +rescue => e + puts e.message + exit 0 +end + + +# check HOST OS +if not Utils.check_host_OS() then + puts "Error: Your host OS is not supported!" + exit 1 +end + +def query( ip, port, sym ) + client = BuildCommClient.create( ip, port, nil, 0 ) + if client.nil? then + puts "Connection to server failed!" + return nil + end + client.send "QUERY|#{sym.strip}" + result = client.receive_data() + client.terminate + return result +end + +def query_system_info(ip, port) + # HOST SYSTEM INFO + puts "* SYSTEM INFO *" + data = query( ip, port, "SYSTEM") + if data.nil? then exit 1 end + + result = data[0].split(",").map { |x| x.strip } + puts "HOST-OS: #{result[0]}" + puts "MAX_WORKING_JOBS: #{result[1]}" + + # FTP INFO + puts "\n* FTP *" + data = query(ip, port, "FTP") + if data.nil? then exit 1 end + + result = data[0].split(",").map { |x| x.strip } + puts "FTP_ADDR: #{result[0]}" + puts "FTP_USERNAME: #{result[1]}" + + # SUPPORTED OS INFO + puts "\n* SUPPORTED OS LIST *" + data = query(ip, port, "OS") + if data.nil? then exit 1 end + + data.each do |item| + puts "#{item.strip}" + end + + # Friend lists + puts "\n* FRIEND SERVER LIST (WAIT|WORK/MAX) jobs [transfer count] *" + data = query(ip, port, "FRIEND") + if data.nil? then exit 1 end + i = 0 + data.each do |item| + i = i + 1 + info = item.split(",").map { |x| x.strip } + if info[0] == "DISCONNECTED" then + puts "#{i}. #{info[0]}" + else + puts "#{i}. #{info[0]} #{info[1]} server (#{info[2]}|#{info[3]}/#{info[4]}) [#{info[5]}]" + end + end +end + + +def query_project_list(ip, port) + puts "* PROJECT(S) *" + data = query( ip, port, "PROJECT") + data.each do |item| + tok = item.split(",").map { |x| x.strip } + type = (tok[0]=="G" ? "NORMAL":"REMOTE") + printf("%-25s %s\n",tok[1],type) + end +end -# if "--os" is not specified, use host os type + +def query_job_list(ip, port) + puts "* JOB(S) *" + data = query(ip, port, "JOB") + data.each do |item| + tok = item.split(",").map { |x| x.strip } + if tok[3].nil? then + puts "#{tok[1]} #{tok[0]} #{tok[2]}" + else + puts "#{tok[1]} #{tok[0]} #{tok[2]} (#{tok[3]})" + end + end +end + + +# if "--os" is not specified, use pe if option[:os].nil? then - option[:os] = Utils::HOST_OS -else - if not option[:os] =~ /^(linux|windows|darwin)$/ then - puts "We have no plan to Buld OS \"#{option[:os]}\" \n please check your option OS " - exit 1 - end + option[:os] = "default" end if option[:domain].nil? then + puts "Warn: Build server IP address is not specified. 127.0.0.1 will be used" option[:domain] = "127.0.0.1" end -if option[:port].nil? then - option[:port] = 2222 -end - begin case option[:cmd] when "build" - client = BuildCommClient.create( option[:domain], option[:port]) + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + client = BuildCommClient.create( result[0], result[1], nil, 0 ) if not client.nil? then - client.send "BUILD,GIT,#{option[:git]},#{option[:commit]},#{option[:os]},,#{option[:async]}" + client.send "BUILD|GIT|#{option[:project]}|#{option[:passwd]}|#{option[:os]}|#{option[:async]}|#{option[:noreverse]}" client.print_stream client.terminate + else + puts "Connection to server failed!" + exit 1 end when "resolve" - client = BuildCommClient.create( option[:domain], option[:port]) + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + client = BuildCommClient.create( result[0], result[1], nil, 0 ) if not client.nil? then - client.send "RESOLVE,GIT,#{option[:git]},#{option[:commit]},#{option[:os]},,#{option[:async]}" + client.send "RESOLVE|GIT|#{option[:project]}|#{option[:passwd]}|#{option[:os]}|#{option[:async]}" client.print_stream client.terminate end when "query" - # SYSTEM INFO - client = BuildCommClient.create( option[:domain], option[:port]) - if not client.nil? then - client.send "QUERY,SYSTEM" - result0 = client.receive_data() - if result0.nil? then - client.terminate - exit(-1) + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + + query_system_info( result[0], result[1] ) + puts "" + query_project_list( result[0], result[1]) + puts "" + query_job_list( result[0], result[1]) + + when "query-system" + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + + query_system_info( result[0], result[1] ) + + when "query-project" + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + + query_project_list( result[0], result[1]) + + when "query-job" + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + + query_job_list( result[0], result[1] ) + + when "cancel" + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + if not option[:job].nil? then + client = BuildCommClient.create( result[0], result[1], nil, 0 ) + if not client.nil? then + client.send "CANCEL|#{option[:job]}|#{option[:passwd]}" + result1 = client.receive_data() + if result1.nil? then + client.terminate + exit(-1) + end + puts result1 + else + puts "Connection to server failed!" + exit 1 end - result0 = result0[0].split(",").map { |x| x.strip } - puts "HOST-OS: #{result0[0]}" - puts "MAX_WORKING_JOBS: #{result0[1]}" + else + puts "you must input \"cancel job number\"!!" + exit 1 + end + + when "register" + # check file exist + if not File.exist? option[:package] then + puts "The file does not exist!.. #{option[:package]}" + exit(-1) + end + + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + bs_ip = result[0] + bs_port = result[1] + + ftp_result = Utils.parse_ftpserver_url(option[:fdomain]) + if ftp_result.nil? or ftp_result.length != 4 then + puts "FTP server url is incorrect. (#{option[:fdomain]})" + puts "Tune as following format." + puts " ftp://:@
" + exit 1 + end + ip = ftp_result[0] + port = ftp_result[1] + username = ftp_result[2] + passwd = ftp_result[3] + + # upload + client = BuildCommClient.create( bs_ip, bs_port, nil, 0 ) + if client.nil? then + puts "Can't access server #{bs_ip}:#{bs_port}" + exit(-1) + end + dock = Utils.create_uniq_name() + msg = "UPLOAD|#{dock}" + client.send( msg ) + result = client.send_file(ip, port, username, passwd, option[:package]) + client.terminate + if not result then + puts "Uploading file failed!.. #{option[:package]}" + exit(-1) + end + + # register + client = BuildCommClient.create( bs_ip, bs_port, nil, 0 ) + if client.nil? then + puts "Can't access server #{bs_ip}:#{bs_port}" + exit(-1) + end + client.send("REGISTER|BINARY|#{File.basename(option[:package])}|#{option[:passwd]}|#{dock}") + client.print_stream + client.terminate + + # for test + when "upload" + # check file exist + if not File.exist? option[:file] then + puts "The file does not exist!.. #{option[:file]}" + exit(-1) + end + + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + + # FTP INFO + client = BuildCommClient.create( result[0], result[1], nil, 0 ) + if client.nil? then + puts "Can't access server #{result[0]}:#{result[1]}" + exit(-1) + end + client.send "QUERY|FTP" + result0 = client.receive_data() + if result0.nil? then client.terminate + exit(-1) end + result0 = result0[0].split(",").map { |x| x.strip } + ip = result0[0] + username = result0[1] + passwd = result0[2] + client.terminate - # JOB INFO - client = BuildCommClient.create( option[:domain], option[:port]) - if not client.nil? then - client.send "QUERY,JOB" - result1 = client.receive_data() - if result0.nil? then - client.terminate - exit(-1) - end - puts "* JOB *" - for item in result1 - tok = item.split(",").map { |x| x.strip } - puts "#{tok[1]} #{tok[0]} #{tok[2]}" - end + client = BuildCommClient.create( result[0], result[1], nil, 0 ) + if client.nil? then + puts "Can't access server #{result[0]}:#{result[1]}" + exit(-1) + end + client.send("UPLOAD") + result = client.send_file(ip, username, passwd, option[:file]) + client.terminate + if not result then + puts "Uploading file failed!.. #{option[:file]}" + exit(-1) + else + puts "Uploading file succeeded!" + end + + when "download" + result = Utils.parse_server_addr(option[:domain]) + if result.nil? then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + + # FTP INFO + client = BuildCommClient.create( result[0], result[1], nil, 0 ) + if client.nil? then + puts "Can't access server #{result[0]}:#{result[1]}" + exit(-1) end + client.send "QUERY|FTP" + result0 = client.receive_data() + if result0.nil? then + client.terminate + exit(-1) + end + result0 = result0[0].split(",").map { |x| x.strip } + ip = result0[0] + username = result0[1] + passwd = result0[2] + client.terminate + # download + client = BuildCommClient.create( result[0], result[1], nil, 0 ) + if client.nil? then + puts "Can't access server #{result[0]}:#{result[1]}" + exit(-1) + end + file_name = option[:file] + client.send("DOWNLOAD|#{file_name}") + result = client.receive_file(ip, username, passwd, "./#{file_name}") + client.terminate + if not result then + puts "Downloading file failed!.. #{option[:file]}" + exit(-1) + else + puts "Downloading file succeeded!" + end else raise RuntimeError, "input option incorrect : #{option[:cmd]}" end diff --git a/build-svr b/build-svr index 66aed20..fd881ff 100755 --- a/build-svr +++ b/build-svr @@ -1,4 +1,4 @@ -#!/usr/bin/ruby -d +#!/usr/bin/ruby -d =begin @@ -32,6 +32,7 @@ require 'fileutils' $LOAD_PATH.unshift File.dirname(__FILE__)+"/src/common" $LOAD_PATH.unshift File.dirname(__FILE__)+"/src/build_server" require "utils" +require "log.rb" require "BuildServerOptionParser" require "BuildServerController" @@ -39,49 +40,113 @@ require "BuildServerController" begin option = option_parse rescue => e - puts "Option parse error" puts e.message exit 0 end +# check HOST OS +if not Utils.check_host_OS() then + puts "Error: Your host OS is not supported!" + exit 1 +end -# if "--os" is not specified, use host os type +# if "--os" is not specified, set it as default if option[:os].nil? then - host_os = `uname -s`.strip - case host_os - when "Linux" - option[:os] = "linux" - when /MINGW32.*/ - option[:os] = "windows" - when "Darwin" - option[:os] = "darwin" - else - if not option[:os] =~ /^(linux|windows|darwin)$/ then - puts "We have no plan to Buld OS \"#{option[:os]}\" \n please check your option OS " - exit 1 - end - end -else - if not option[:os] =~ /^(linux|windows|darwin)$/ then - puts "We have no plan to Buld OS \"#{option[:os]}\" \n please check your option OS " - exit 1 - end + option[:os] = "default" end - - begin - case option[:cmd] - when "create" - BuildServerController.create_server( option[:name], Utils::WORKING_DIR, option[:url], option[:domain], option[:pid] ) + case option[:cmd] + when "create" + svr_result = Utils.parse_server_addr(option[:domain]) + if svr_result.nil? or svr_result.length != 2 then + puts "Server address is incorrect. (#{option[:domain]})" + puts "Tune as following format." + puts " :" + exit 1 + end + ftp_result = Utils.parse_ftpserver_url(option[:fdomain]) + if ftp_result.nil? or ftp_result.length != 4 then + puts "FTP server url is incorrect. (#{option[:fdomain]})" + puts "Tune as following format." + puts " ftp://:@
:" + exit 1 + end + pkgsvr_addr = svr_result[0] + pkgsvr_port = svr_result[1] + ftpsvr_addr = ftp_result[0] + ftpsvr_port = ftp_result[1] + ftpsvr_username = ftp_result[2] + ftpsvr_passwd = ftp_result[3] + BuildServerController.create_server( option[:name], Utils::WORKING_DIR, option[:url], pkgsvr_addr, pkgsvr_port, option[:pid], ftpsvr_addr, ftpsvr_port, ftpsvr_username, ftpsvr_passwd ) when "remove" BuildServerController.remove_server( option[:name] ) when "start" - BuildServerController.start_server( option[:name], option[:port] ) + if( option[:child] ) then # Child Process + BuildServerController.start_server( option[:name], option[:port] ) + else # Parent Process + log = Log.new( "#{BuildServer::CONFIG_ROOT}/#{option[:name]}/main.log" ) + begin + while(true) + log.info "Build Server[#{option[:name]}] Start - PORT:[#{option[:port]}]" + # Start child process + cmd = Utils.execute_shell_generate("#{File.dirname(__FILE__)}/build-svr start -n #{option[:name]} -p #{option[:port]} --CHILD") + IO.popen(cmd) + pid = Process.wait + + # End chlid process + log.info "Child process terminated, pid = #{pid}, status = #{$?.exitstatus}" + if ($?.exitstatus == 0) then # SERVER STOP COMMAND + log.info "Down Build Server." + break + elsif ($?.exitstatus == 99) then # DIBS UPGRADE + cmd = "#{File.dirname(__FILE__)}/upgrade -l #{File.dirname(__FILE__)} -S -t BUILDSERVER -n #{option[:name]} -p #{option[:port]}" + cmd = Utils.execute_shell_generate(cmd) + puts cmd + Utils.spawn(cmd) + log.info cmd + log.info "Down Build Server for DIBS upgrade." + break + else + log.error "Down Build Server. Try reboot Build Server." + end + end + rescue => e + log.error( e.message, Log::LV_USER) + end + end when "stop" BuildServerController.stop_server( option[:name] ) - when "add" - BuildServerController.add_friend_server( option[:name], option[:domain], option[:port] ) + when "upgrade" + BuildServerController.upgrade_server( option[:name] ) + when "add-svr" + if not option[:domain].nil? then + svr_result = Utils.parse_server_addr(option[:domain]) + if svr_result.nil? or svr_result.length != 2 then + puts "Server address is incorrect. Tune as following format." + puts " :" + exit 1 + end + pkgsvr_addr = svr_result[0] + pkgsvr_port = svr_result[1] + BuildServerController.add_friend_server( option[:name], pkgsvr_addr, pkgsvr_port ) + elsif not option[:url].nil? then + BuildServerController.add_remote_package_server( option[:name], option[:url], option[:proxy] ) + end + when "add-prj" + if not option[:git].nil? then + BuildServerController.add_project( option[:name], option[:pid], + option[:git], option[:branch], option[:remote], option[:passwd], option[:os] ) + else + BuildServerController.add_binary_project( option[:name], option[:pid], + option[:package], option[:passwd], option[:os] ) + end + when "add-os" + BuildServerController.add_target_os( option[:name], option[:os] ) + when "fullbuild" + BuildServerController.build_all_projects( option[:name] ) + when "register" + BuildServerController.register_package( option[:name], option[:package] ) else raise RuntimeError, "input option incorrect : #{option[:cmd]}" end diff --git a/doc/DIBS_Advanced_Guide.pdf b/doc/DIBS_Advanced_Guide.pdf new file mode 100644 index 0000000..4e90588 Binary files /dev/null and b/doc/DIBS_Advanced_Guide.pdf differ diff --git a/doc/Tizen_SDK_Development_Guide.pdf b/doc/Tizen_SDK_Development_Guide.pdf new file mode 100644 index 0000000..74fba2e Binary files /dev/null and b/doc/Tizen_SDK_Development_Guide.pdf differ diff --git a/doc/Tizen_SDK_Package_Guide.pdf b/doc/Tizen_SDK_Package_Guide.pdf new file mode 100644 index 0000000..5fd43ac Binary files /dev/null and b/doc/Tizen_SDK_Package_Guide.pdf differ diff --git a/package/build.linux b/package/build.linux index 7ff849a..05ef573 100755 --- a/package/build.linux +++ b/package/build.linux @@ -15,20 +15,17 @@ build() # install install() { - BIN_DIR=$SRCDIR/package/dibs.package.${BUILD_TARGET_OS}/data/dev_tools - DOC_DIR=$SRCDIR/package/dibs.package.${BUILD_TARGET_OS}/data/dev_tools/doc + BIN_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/ + DOC_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/documents mkdir -p $BIN_DIR mkdir -p $DOC_DIR cp -f $SRCDIR/pkg-* $BIN_DIR/ cp -f $SRCDIR/build-* $BIN_DIR/ cp -rf $SRCDIR/src $BIN_DIR/ - cp -rf $SRCDIR/src $BIN_DIR/ + cp -f $SRCDIR/upgrade $BIN_DIR/ cp -f $SRCDIR/AUTHORS $DOC_DIR/ cp -f $SRCDIR/LICENSE $DOC_DIR/ cp -f $SRCDIR/NOTICE $DOC_DIR/ + cp -f $SRCDIR/doc/* $DOC_DIR/ + echo $VERSION > $BIN_DIR/VERSION } - - -$1 -echo "$1 success" - diff --git a/package/build.macos b/package/build.macos new file mode 100644 index 0000000..05ef573 --- /dev/null +++ b/package/build.macos @@ -0,0 +1,31 @@ +#!/bin/sh -xe +# clean +clean() +{ + rm -rf $SRCDIR/*.zip + rm -rf $SRCDIR/*.tar.gz +} + +# build +build() +{ + echo "build" +} + +# install +install() +{ + BIN_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/ + DOC_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/documents + mkdir -p $BIN_DIR + mkdir -p $DOC_DIR + cp -f $SRCDIR/pkg-* $BIN_DIR/ + cp -f $SRCDIR/build-* $BIN_DIR/ + cp -rf $SRCDIR/src $BIN_DIR/ + cp -f $SRCDIR/upgrade $BIN_DIR/ + cp -f $SRCDIR/AUTHORS $DOC_DIR/ + cp -f $SRCDIR/LICENSE $DOC_DIR/ + cp -f $SRCDIR/NOTICE $DOC_DIR/ + cp -f $SRCDIR/doc/* $DOC_DIR/ + echo $VERSION > $BIN_DIR/VERSION +} diff --git a/package/build.windows b/package/build.windows new file mode 100644 index 0000000..05ef573 --- /dev/null +++ b/package/build.windows @@ -0,0 +1,31 @@ +#!/bin/sh -xe +# clean +clean() +{ + rm -rf $SRCDIR/*.zip + rm -rf $SRCDIR/*.tar.gz +} + +# build +build() +{ + echo "build" +} + +# install +install() +{ + BIN_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/tools/dibs/ + DOC_DIR=$SRCDIR/package/dibs.package.${TARGET_OS}/data/documents + mkdir -p $BIN_DIR + mkdir -p $DOC_DIR + cp -f $SRCDIR/pkg-* $BIN_DIR/ + cp -f $SRCDIR/build-* $BIN_DIR/ + cp -rf $SRCDIR/src $BIN_DIR/ + cp -f $SRCDIR/upgrade $BIN_DIR/ + cp -f $SRCDIR/AUTHORS $DOC_DIR/ + cp -f $SRCDIR/LICENSE $DOC_DIR/ + cp -f $SRCDIR/NOTICE $DOC_DIR/ + cp -f $SRCDIR/doc/* $DOC_DIR/ + echo $VERSION > $BIN_DIR/VERSION +} diff --git a/package/pkginfo.manifest b/package/pkginfo.manifest index 8f8eda2..cf38c95 100644 --- a/package/pkginfo.manifest +++ b/package/pkginfo.manifest @@ -1,15 +1,8 @@ -Package : dibs -Version : 0.20.9 -Maintainer : taejun ha, jiil hyoun , , donghee yang< donghee.yang@samsung.com > -Description : Distribute Inteligent Build System -OS : linux -Build-host-os : linux Source : dibs +Version :1.0.6 +Maintainer : taejun ha, jiil hyoun , donghyuk yang , donghee yang , sungmin kim , jiil hyoun , , donghee yang< donghee.yang@samsung.com > +OS : ubuntu-32, ubuntu-64, windows-32, windows-64, macos-64 +Build-host-os : ubuntu-32 Description : Distribute Inteligent Build System -OS : windows -Build-host-os : linux -Source : dibs diff --git a/package/pkginfo.manifest.local b/package/pkginfo.manifest.local new file mode 100644 index 0000000..ce655de --- /dev/null +++ b/package/pkginfo.manifest.local @@ -0,0 +1,6 @@ +Include: pkginfo.manifest + +Package : dibs +OS : ubuntu-32, windows-32, macos-64, ubuntu-64, windows-64 +Build-host-os : windows-32, macos-64, ubuntu-64, windows-64 +Description : Distribute Inteligent Build System diff --git a/pkg-build b/pkg-build index 22b58f3..39e7882 100755 --- a/pkg-build +++ b/pkg-build @@ -36,18 +36,24 @@ require "packageServer" require "Builder" require "optionparser" -option = parse +begin + option = parse +rescue => e + puts e.message + exit 0 +end #generate server when local package server is not set +# check HOST OS +if not Utils.check_host_OS() then + puts "Error: Your host OS is not supported!" + exit 1 +end + # if "--os" is not specified, use host os type if option[:os].nil? then option[:os] = Utils::HOST_OS -else - if not option[:os] =~ /^(linux|windows|darwin)$/ then - puts "We have no plan to Buld OS \"#{option[:os]}\" \n please check your option OS " - exit 1 - end end path = Dir.pwd @@ -62,25 +68,27 @@ if not option[:url].nil? then builder = Builder.get("default") if builder.pkgserver_url != option[:url] then puts "Package server URL has been changed! Creating new builder..." - builder = Builder.create("default", option[:url], nil) + builder = Builder.create("default", option[:url], nil, nil, nil) end rescue puts "Default builder does not exist! Creating new builder..." - builder = Builder.create("default", option[:url], nil) + builder = Builder.create("default", option[:url], nil, nil, nil) end else # if url is not specified begin builder = Builder.get("default") rescue puts "Default builder does not exist! Creating new builder..." - builder = Builder.create("default", "http://172.21.111.132/pkgserver/unstable",nil) + builder = Builder.create("default", "http://172.21.111.132/pkgserver/unstable",nil, nil, nil) end end #build project -if not builder.build( Utils::WORKING_DIR, option[:os], option[:clean], option[:rev], [], []) then +if not builder.build( Utils::WORKING_DIR, option[:os], option[:clean], [], true) then puts "Build Failed!" + exit 1 else puts "Build Succeeded!" + exit 0 end diff --git a/pkg-clean b/pkg-clean index de48dce..9315d88 100755 --- a/pkg-clean +++ b/pkg-clean @@ -46,6 +46,12 @@ option = parse #generate server when local package server is not set +# check HOST OS +if not Utils.check_host_OS() then + puts "Error: Your host OS is not supported!" + exit 1 +end + begin builder = Builder.get("default") rescue diff --git a/pkg-cli b/pkg-cli index 3a04581..df9a164 100755 --- a/pkg-cli +++ b/pkg-cli @@ -42,93 +42,73 @@ require "packageServer" #set global variable @WORKING_DIR = nil -$log = Logger.new('.log', 'monthly') - #option parsing begin option = option_parse rescue => e # if option parse error print help message - $log.error "option parsing error" - system "#{__FILE__} help" + puts e.message exit 0 end +# check HOST OS +if not Utils.check_host_OS() then + puts "Error: Your host OS is not supported!" + exit 1 +end #if "--os" is not specfied, use host os type if option[:os].nil? then - system_type = `uname -s` - case system_type.strip - when "Linux" then - option[:os] = "linux" - when /MINGW32.*/ then - option[:os] = "windows" - when "Darwin" then - option[:os] = "darwin" - else - raise RuntimeError, "Unknown OS type : #{system_type}" - end + option[:os] = Utils::HOST_OS end case option[:cmd] when "update" then client = Client.new( option[:url], nil, nil ) - client.update() + #client.update() when "clean" then client = Client.new( nil, option[:loc], nil ) client.clean(option[:f]) when "download" then client = Client.new( option[:url], option[:loc], nil ) - if not option[:url].nil? then - client.update() - end + #if not option[:url].nil? then + # client.update() + #end file_loc = client.download( option[:pkg], option[:os], option[:t] ) -when "upload" then - client = Client.new( nil, nil, nil ) - result = client.upload( option[:alias], option[:id], option[:binpkg], option[:srcpkg], false ) - if not result.nil? then - puts result - end -when "source" then - client = Client.new( option[:url], option[:loc], nil ) - if not option[:url].nil? then - client.update() - end - client.download_source( option[:pkg], option[:os] ) when "install" then client = Client.new( option[:url], option[:loc], nil ) - if not option[:url].nil? then - client.update() - end - client.install( option[:pkg], option[:os], option[:t], option[:f] ) + #if not option[:url].nil? then + # client.update() + #end + client.install( option[:pkg], option[:os], option[:t], option[:f] ) when "install-file" then - client = Client.new( nil, option[:loc], nil ) - client.install_local_pkg( option[:pkg], option[:f] ) + client = Client.new( option[:url], option[:loc], nil ) + client.install_local_pkg( option[:pkg], option[:t], option[:f] ) when "uninstall" then client = Client.new( nil, option[:loc], nil ) client.uninstall( option[:pkg], option[:t] ) when "upgrade" then client = Client.new( option[:url], option[:loc], nil ) - if not option[:url].nil? then - client.update() - end + #if not option[:url].nil? then + # client.update() + #end client.upgrade( option[:os], option[:t] ) when "check-upgrade" then client = Client.new( option[:url], option[:loc], nil ) - if not option[:url].nil? then - client.update() - end + #if not option[:url].nil? then + # client.update() + #end client.check_upgrade( option[:os] ) when "show-rpkg" then client = Client.new( option[:url], nil, nil ) - if not option[:url].nil? then - client.update() - end + #if not option[:url].nil? then + # client.update() + #end puts client.show_pkg_info( option[:pkg], option[:os] ) when "list-rpkg" then client = Client.new( option[:url], nil, nil ) - if not option[:url].nil? then - client.update() - end + #if not option[:url].nil? then + # client.update() + #end result = client.show_pkg_list( option[:os] ) if not result.nil? and not result.empty? then result.each do |i| @@ -175,6 +155,6 @@ when "install-dep" then ret[-3..-1] = "" puts ret else - raise RuntimeError, "input option incorrect : #{option[:cmd]}" + raise RuntimeError, "Input is incorrect : #{option[:cmd]}" end diff --git a/pkg-svr b/pkg-svr index 7a80d61..3e3d597 100755 --- a/pkg-svr +++ b/pkg-svr @@ -39,10 +39,7 @@ require "serverOptParser" begin option = option_parse rescue => e - puts "\n=============== Error occured ==============================" puts e.message - puts e.backtrace.inspect - puts "=============================================================\n" exit 0 end @@ -66,15 +63,15 @@ begin when "create" server.create( option[:id], option[:dist], option[:url], option[:loc] ) when "register" - server.register( option[:spkgs], option[:bpkgs], option[:dist], option[:gensnap], option[:test] ) + server.register( option[:pkgs], option[:dist], option[:gensnap], option[:test], false ) when "gen-snapshot" - server.generate_snapshot( option[:snap], option[:dist], option[:bsnap], option[:bpkgs] ) + server.generate_snapshot( option[:snaps][0], option[:dist], option[:bsnap] ) when "sync" server.sync( option[:dist], option[:force] ) when "add-dist" server.add_distribution( option[:dist], option[:url], option[:clone] ) - when "spkg-path" - server.find_source_package_path( option[:dist], option[:spkgs] ) + when "add-os" + server.add_os( option[:dist], option[:os] ) when "remove" if not option[:force] then puts "Do you want to really? then input \"YES\"" @@ -87,17 +84,35 @@ begin end end - server.remove_server( option[:id] ) + server.remove_server() + when "remove-dist" + if not option[:force] then + puts "Do you want to really? then input \"YES\"" + input = $stdin.gets.strip + if input.eql? "YES" then + puts "Remove server!" + else + puts "Remove is canceled by user input" + exit(0) + end + end + + server.remove_dist( option[:dist] ) when "remove-pkg" - server.remove_pkg( option[:id], option[:dist], option[:bpkgs], option[:os] ) + server.remove_pkg( option[:dist], option[:pkgs], option[:os] ) + when "remove-snapshot" + server.remove_snapshot( option[:dist], option[:snaps] ) + when "clean" + server.clean( option[:dist], option[:snaps] ) + when "start" + server.start( option[:port], option[:passwd] ) + when "stop" + server.stop( option[:port], option[:passwd] ) else raise RuntimeError, "input option incorrect : #{option[:cmd]}" end rescue => e - puts "\n=============== Error occured ==============================" puts e.message - puts e.backtrace.inspect - puts "=============================================================\n" end diff --git a/src/build_server/BinaryUploadProject.rb b/src/build_server/BinaryUploadProject.rb new file mode 100644 index 0000000..28f81ea --- /dev/null +++ b/src/build_server/BinaryUploadProject.rb @@ -0,0 +1,91 @@ +=begin + + BinaryUploadProject.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require 'fileutils' +$LOAD_PATH.unshift File.dirname(__FILE__) +require "CommonProject.rb" +require "RegisterPackageJob.rb" +require "Version.rb" +require "PackageManifest.rb" + + +class BinaryUploadProject < CommonProject + attr_accessor :pkg_name + + # initialize + def initialize( name, pkg_name, server, os_list ) + super(name, "BINARY", server, os_list) + @pkg_name = pkg_name + end + + + # create new job + def create_new_job( filename, dock = "0" ) + new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3') + pkg_name = new_name.split(",")[0] + os = new_name.split(",")[2] + + # check file name + if @pkg_name != pkg_name then return nil end + + # check os name + if not @server.supported_os_list.include? os then return nil end + + # check package info + file_path = "#{@server.transport_path}/#{dock}/#{filename}" + if not File.exist? file_path then return nil end + + pkginfo_dir = "#{@server.path}/projects/#{@name}/pkginfos" + if not File.exist? pkginfo_dir then FileUtils.mkdir_p pkginfo_dir end + if not Utils.extract_a_file(file_path, "pkginfo.manifest", pkginfo_dir) then + return nil + end + begin + pkginfo =PackageManifest.new("#{pkginfo_dir}/pkginfo.manifest") + rescue => e + puts e.message + return nil + end + pkgs = pkginfo.get_target_packages(os) + if pkgs.count != 1 then return nil end + if pkgs[0].package_name != @pkg_name then return nil end + + new_job = RegisterPackageJob.new( file_path, self, @server ) + + return new_job + end + + + def include_package?(name, version=nil, os=nil) + if name == @pkg_name then + return true + else + return false + end + end +end diff --git a/src/build_server/BuildClientOptionParser.rb b/src/build_server/BuildClientOptionParser.rb index 1adf0b0..8cc9e60 100644 --- a/src/build_server/BuildClientOptionParser.rb +++ b/src/build_server/BuildClientOptionParser.rb @@ -26,51 +26,151 @@ Contributors: - S-Core Co., Ltd =end +$LOAD_PATH.unshift File.dirname(__FILE__)+"/src/common" require 'optparse' +require 'utils' -def option_parse +def option_error_check( options ) + case options[:cmd] + + when "build" then + if options[:project].nil? or options[:project].empty? or + options[:domain].nil? or options[:domain].empty? then + raise ArgumentError, "Usage: build-cli build -N -d [-o ] [-w ] [--async]" + end + + when "resolve" then + if options[:project].nil? or options[:project].empty? or + options[:domain].nil? or options[:domain].empty? then + raise ArgumentError, "Usage: build-cli resolve -N -d [-o ] [-w ] [--async]" + end + + when "query" then + if options[:domain].nil? or options[:domain].empty? then + raise ArgumentError, "Usage: build-cli query -d " + end + + when "query-system" then + if options[:domain].nil? or options[:domain].empty? then + raise ArgumentError, "Usage: build-cli query-system -d " + end + + when "query-project" then + if options[:domain].nil? or options[:domain].empty? then + raise ArgumentError, "Usage: build-cli query-project -d " + end + + when "query-job" then + if options[:domain].nil? or options[:domain].empty? then + raise ArgumentError, "Usage: build-cli query-job -d " + end + + when "cancel" then + if options[:job].nil? or options[:job].empty? or + options[:domain].nil? or options[:domain].empty? then + raise ArgumentError, "Usage: build-cli cancel -j -d [-w ]" + end + when "register" then + if options[:package].nil? or options[:package].empty? or + options[:domain].nil? or options[:domain].empty? or + options[:fdomain].nil? or options[:fdomain].empty? then + raise ArgumentError, "Usage: build-cli register -P -d -t [-w ]" + end + + else + raise ArgumentError, "Input is incorrect : #{options[:cmd]}" + end + + if ARGV.length > 1 then + raise ArgumentError, "Unknown argument value : #{ARGV[1]}" + end +end + +def option_parse options = {} - banner = "Usage: build-cli {build|resolve|query} ..." + "\n" \ - + "\t" + "build-cli build -g -c [-d ] [-p ] [-o ] [-a ] " + "\n" \ - + "\t" + "build-cli resolve -g -c [-d ] [-p ] [-o ] [-a ] " + "\n" \ - + "\t" + "build-cli query [-d ] [-p ]" + "\n" + banner = "Requiest service to build-server command-line tool." + "\n" \ + + "\n" + "Usage: build-cli [OPTS] or build-cli (-h|-v)" + "\n" \ + + "\n" + "Subcommands:" + "\n" \ + + "\t" + "build Build and create package." + "\n" \ + + "\t" + "resolve Request change to resolve-status for build-conflict." + "\n" \ + + "\t" + "query Query information about build-server." + "\n" \ + + "\t" + "query-system Query system information about build-server." + "\n" \ + + "\t" + "query-project Query project information about build-server." + "\n" \ + + "\t" + "query-job Query job information about build-server." + "\n" \ + + "\t" + "cancel Cancel a building project." + "\n" \ + + "\t" + "register Register the package to the build-server." + "\n" \ + + "\n" + "Subcommand usage:" + "\n" \ + + "\t" + "build-cli build -N -d [-o ] [-w ] [--async]" + "\n" \ + + "\t" + "build-cli resolve -N -d [-o ] [-w ] [--async]" + "\n" \ + + "\t" + "build-cli query -d " + "\n" \ + + "\t" + "build-cli query-system -d " + "\n" \ + + "\t" + "build-cli query-project -d " + "\n" \ + + "\t" + "build-cli query-job -d " + "\n" \ + + "\t" + "build-cli cancel -j -d [-w ] " + "\n" \ + + "\t" + "build-cli register -P -d -t [-w ] " + "\n" \ + + "\n" + "Options:" + "\n" + + optparse = OptionParser.new(nil, 32, ' '*8) do|opts| - optparse = OptionParser.new do|opts| # Set a banner, displayed at the top # of the help screen. opts.banner = banner - opts.on( '-g', '--git ', 'git repository' ) do|git| - options[:git] = git - end - - opts.on( '-c', '--commit ', 'git commit id/tag' ) do|git| - options[:commit] = git + opts.on( '-N', '--project ', 'project name' ) do|project| + if not Utils.multi_argument_test( project, "," ) then + raise ArgumentError, "Project variable parsing error : #{project}" + end + options[:project] = project end options[:domain] = nil - opts.on( '-d', '--domain ', 'remote build server ip address. default 127.0.0.1' ) do|domain| + opts.on( '-d', '--address ', 'build server address: 127.0.0.1:2224' ) do|domain| options[:domain] = domain end - options[:port] = nil - opts.on( '-p', '--port ', 'remote build server port. default 2222' ) do|port| - options[:port] = port - end - options[:os] = nil - opts.on( '-o', '--os ', 'target operating system linux/windows/darwin' ) do|os| + opts.on( '-o', '--os ', 'target operating system: ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64' ) do |os| + if not Utils.multi_argument_test( os, "," ) then + raise ArgumentError, "OS variable parsing error : #{os}" + end options[:os] = os end options[:async] = "NO" - opts.on( '-a', '--async', 'asynchronous job' ) do + opts.on( '--async', 'asynchronous job' ) do options[:async] = "YES" + end + + options[:noreverse] = "NO" + opts.on( '--noreverse', 'do not check reverse build' ) do + options[:noreverse] = "YES" end - opts.on( '-h', '--help', 'display this information' ) do - puts opts + opts.on( '-j', '--job ', 'job number' ) do|job| + options[:job] = job + end + + options[:passwd] = "" + opts.on( '-w', '--passwd ', 'password for managing project' ) do|passwd| + options[:passwd] = passwd + end + + opts.on( '-P', '--pkg ', 'package file path' ) do|package| + options[:package] = package.strip + end + + opts.on( '-t', '--ftp ', 'ftp server url: ftp://dibsftp:dibsftp@127.0.0.1' ) do|domain| + options[:fdomain] = domain + end + + opts.on( '-h', '--help', 'display help' ) do + opts.help.split("\n").each {|op| puts op if not op.include? "--noreverse"} + exit + end + + opts.on( '-v', '--version', 'display version' ) do + puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version() exit end @@ -78,19 +178,26 @@ def option_parse cmd = ARGV[0] - if cmd.eql? "build" or cmd.eql? "resolve" or cmd.eql? "query" or - cmd =~ /(help)|(-h)|(--help)/ then + if cmd.eql? "build" or cmd.eql? "resolve" or + cmd.eql? "query" or cmd.eql? "query-system" or + cmd.eql? "query-project" or cmd.eql? "query-job" or + cmd.eql? "cancel" or + cmd.eql? "register" or + cmd =~ /(-v)|(--version)/ or + cmd =~ /(help)|(-h)|(--help)/ then - if cmd.eql? "help" then + if cmd.eql? "help" then ARGV[0] = "-h" end options[:cmd] = ARGV[0] else - raise ArgumentError, banner + raise ArgumentError, "Usage: build-cli [OPTS] or build-cli -h" end optparse.parse! + + option_error_check options return options end diff --git a/src/build_server/BuildComm.rb b/src/build_server/BuildComm.rb index 94f8bb1..647d755 100644 --- a/src/build_server/BuildComm.rb +++ b/src/build_server/BuildComm.rb @@ -30,16 +30,45 @@ Contributors: $LOAD_PATH.unshift File.dirname(__FILE__)+"/src/common" require "log" +require 'timeout' +require "fileTransfer" +require "net/ftp" +require 'thread' + +ATTEMPTS = ["first", "second", "third"] class BuildCommServer - VERSION = "1.2.0" + VERSION = "1.5.0" + + private_class_method :new - def initialize(port, log) + def initialize(port, log, ftp_url, cache_dir) @port = port - @tcp_server = TCPServer.open( port ) @log = log + @ftp_url = ftp_url + @cache_dir = cache_dir + @tcp_server = TCPServer.open( port ) + @download_cache_mutex = Mutex.new end + def self.create(port, log, ftp_url=nil, cache_dir=nil) + # checking port is available + if port_open? port then + raise "Port \"#{@port}\" is already in use." + end + + if log.nil? then + log = Log.new(nil) + end + + # create cache dir if not nil + if not cache_dir.nil? and not File.exist? cache_dir then + FileUtils.mkdir_p cache_dir + end + + return new(port, log, ftp_url, cache_dir) + end + # wait for connection and handle request def wait_for_connection(quit_loop) @@ -47,8 +76,9 @@ class BuildCommServer req = @tcp_server.accept begin - yield req + yield req if block_given? rescue + @log.error $! @log.error "Caught a connection exception" req.close end @@ -89,30 +119,275 @@ class BuildCommServer end + def send_file(req, src_file) + # 1. send "READY" + # 2. If "FTP,ip,username,passwd" is received, + # Upload the src file using server's ftp_url. + # if then ftp_url is nil, use the url on "FTP" message instead + # After uploading, send "UPLOADED,ftp_filepath" + # 3. If "SUCC" is received, remove the file on FTP server + + begin + if not File.exist? src_file then + @log.error "\"#{src_file}\" file does not exist" + req.puts "ERROR" + return false + end + + req.puts "READY" + @log.info "Ready to upload file" + while l = req.gets() + tok = l.split(",").map { |x| x.strip } + cmd = tok[0].strip + if cmd == "FTP" then + if tok.count < 5 then + @log.error "Server received wrong REQ : #{l.strip}" + req.puts "ERROR" + return false + end + + # get ftp connection info + if @ftp_url.nil? then + ip = tok[1].strip + port = tok[2].strip + username = tok[3].strip + passwd = tok[4].strip + @log.info "Server received ftp server infomation from client : [#{ip}, #{port}]" + else + url_contents = Utils.parse_ftpserver_url(@ftp_url) + ip = url_contents[0] + port = url_contents[1] + username = url_contents[2] + passwd = url_contents[3] + end + + # upload to ftp server + ftp_filepath = nil + for attempt in ATTEMPTS + ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file, @log) + if !ftp_filepath.nil? then break; + else @log.info "Server is the #{attempt} upload attempt fails" end + end + if ftp_filepath.nil? then + req.puts "ERROR" + return false + else + @log.info "Server is the #{attempt} successful attempt to upload file: [#{File.basename(src_file)}]" + end + req.puts "UPLOADED,#{ftp_filepath}" + elsif cmd == "SUCC" then + @log.info "Client downloaded file successfully" + FileTransfer.cleandir(ip, port, username, passwd, ftp_filepath, @log) + @log.info "Cleaned temporary dir on FTP server: #{ftp_filepath}" + break + elsif cmd == "ERROR" then + @log.error "Client failed to download file" + return false + end + end + rescue => e + puts "[BuildCommServer] Exception" + @log.error e.message + @log.error e.backtrace.inspect + return false + end + + return true + end + + + # NOTE. dst_file can be directory + def receive_file(req, dst_file) + # 1. send "READY" + # 2. If "UPLOADED,ip,port,file_path,username,passwd" is received, + # Download the file using my ftp_url. + # If ftp_url is nil, use the url on "UPLOADED" messge instead + # After downloading it, send "SUCC" + + begin + req.puts "READY" + while l = req.gets() + tok = l.split(",").map { |x| x.strip } + cmd = tok[0].strip + if cmd == "CHECK_CACHE" then + file_name = tok[1] + file_size = tok[2].to_i + checksum = tok[3] + + # check download cache + if File.exist? dst_file and File.directory? dst_file then + target_file = File.join(dst_file,file_name) + else + target_file = dst_file + end + if not @cache_dir.nil? and + check_download_cache( target_file, file_size, checksum ) then + + @log.info "Download cache hit! Copied from cache.: #{file_name}" + req.puts "CACHED" + break + else + @log.info "Cached file not found!#{file_name}" + req.puts "NOT_CACHED" + end + elsif cmd == "UPLOADED" then + @log.info "Client uploaded file to ftp server successful" + if tok.count < 6 then + @log.error "Server received wrong REQ : #{l.strip}" + req.puts "ERROR" + return false + end + filepath = tok[3].strip + + # get ftp connection info + if @ftp_url.nil? then + ip = tok[1].strip + port = tok[2].strip + username = tok[4].strip + passwd = tok[5].strip + @log.info "Client sent ftp server infomations [#{ip}, #{port}]" + else + url_contents = Utils.parse_ftpserver_url(@ftp_url) + ip = url_contents[0] + port = url_contents[1] + username = url_contents[2] + passwd = url_contents[3] + end + + # download from ftp server + dst_filepath = nil + for attempt in ATTEMPTS + dst_filepath = FileTransfer.getfile(ip, port, username, passwd, filepath, dst_file, @log) + if not dst_filepath.nil? then break + else + @log.warn "Server is the #{attempt} download attempt fails" + end + end + if dst_filepath.nil? then + req.puts "ERROR" + return false + else @log.info " Server is the #{attempt} successful attempt to download" end + + # add to cache + if not @cache_dir.nil? then + if File.exist? dst_file and File.directory? dst_file then + target_file = File.join(dst_file,File.basename(dst_filepath)) + else + target_file = dst_file + end + add_download_cache(target_file) + end + + req.puts "SUCC" + break + elsif cmd == "ERROR" then + @log.error "Client failed to upload the file" + return false + else + @log.warn "Unhandled message: #{l}" + end + end + rescue => e + puts "[BuildCommServer] Exception" + @log.error e.message + @log.error e.backtrace.inspect + return false + end + + return true + end + + def self.disconnect( req ) begin req.close rescue end end + + def self.port_open?( port ) + Timeout::timeout(1) do + begin + TCPSocket.new("127.0.0.1",port).close + true + rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH + false + end + end + rescue Timeout::Error + false + end + + + private + def check_download_cache(dst_file, file_size, checksum ) + file_name = File.basename(dst_file) + cache_file = "#{@cache_dir}/#{file_name}" + + @download_cache_mutex.synchronize { + found = false + # check file exist + if File.exist? cache_file and + File.size(cache_file) == file_size and + Utils.checksum(cache_file) == checksum then + + # if hit , touch and copy + FileUtils.touch cache_file + FileUtils.copy_file(cache_file, dst_file) + + found = true + end + + # refresh cache dir + curr_time = Time.now + Dir.entries(@cache_dir).each { |fname| + if fname == "." or fname == ".." then next end + file_path = "#{@cache_dir}/#{fname}" + if File.mtime(file_path) + 3600 < curr_time then + FileUtils.rm_rf file_path + end + } + + return found + } + end + + + private + def add_download_cache(dst_file) + file_name = File.basename(dst_file) + cache_file = "#{@cache_dir}/#{file_name}" + @download_cache_mutex.synchronize { + # copy & touch + FileUtils.copy_file(dst_file, cache_file) + FileUtils.touch cache_file + } + end end class BuildCommClient - VERSION = "1.2.0" + VERSION = "1.5.0" private_class_method :new - def initialize(socket) + def initialize(socket, log) + @log = log @socket = socket end # create - def self.create(ip, port) + # if sec 0 or nil then not set timeout. it's timeout spec + def self.create(ip, port, log = nil, sec = 5) # open socket + socket = nil begin - socket = TCPSocket.open( ip, port ) + timeout(sec) do + socket = TCPSocket.open( ip, port ) + end + rescue Timeout::Error + return nil rescue # unknown exception return nil @@ -120,10 +395,14 @@ class BuildCommClient # refused if socket.nil? then - return nil - end + return nil + end + + if log.nil? then + log = Log.new(nil) + end - return new(socket) + return new(socket, log) end @@ -140,7 +419,7 @@ class BuildCommClient begin l = @socket.gets() - if @socket.nil? then + if l.nil? then puts "Connection refused" return false end @@ -173,9 +452,12 @@ class BuildCommClient begin # get first line - l = @socket.gets() + l = nil + timeout(5) do + l = @socket.gets() + end - if @socket.nil? then + if l.nil? then return false end @@ -190,9 +472,13 @@ class BuildCommClient if line.strip == "=CHK" then next end # print - yield line.strip + yield line.strip if block_given? end - rescue + rescue Timeout::Error + puts "WARN: Connection timed out" + return false + rescue => e + puts e.message return false end @@ -207,7 +493,7 @@ class BuildCommClient begin l = @socket.gets() - if @socket.nil? then + if l.nil? then puts "Connection refused" return nil end @@ -235,6 +521,133 @@ class BuildCommClient end + def send_file(ip, port, username, passwd, src_file) + begin + l = @socket.gets() + if l.nil? then + @log.error "[BuildCommClient] Connection refused" + return false + end + + # check protocol + if not protocol_matched? l.strip then + @log.error "[BuildCommClient] Comm. Protocol version is mismatched! #{VERSION}" + return false + end + + # 1. If "READY" is received, upload src file to FTP server + # After uploading it, send "UPLOADED,ip,file_path,username,passwd" + # 2. If "SUCC" is received, remove the file on FTP server + while line = @socket.gets() + if line.strip == "READY" then + @log.info "Server is ready to receive file" + file_name = File.basename(src_file) + file_size = File.size(src_file) + checksum = Utils.checksum(src_file) + send "CHECK_CACHE,#{file_name},#{file_size},#{checksum}" + elsif line.strip == "CACHED" then + @log.info "Server already has cached file" + elsif line.strip == "NOT_CACHED" then + @log.info "Server doest not have cached file" + ftp_filepath = nil + for attempt in ATTEMPTS + ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file, @log) + if !ftp_filepath.nil? then break; + else @log.info "Client is the #{attempt} upload attempt fails" end + end + if ftp_filepath.nil? then + send "ERROR" + return false + else @log.info "Client is the #{attempt} successful attempt to upload file" end + send "UPLOADED,#{ip},#{port},#{ftp_filepath},#{username},#{passwd}" + elsif line.strip == "SUCC" then + @log.info "Server downloaded file sucessfully" + FileTransfer.cleandir(ip, port, username, passwd, ftp_filepath, @log) + @log.info "Client cleaned temporary dir on ftp server: #{ftp_filepath}" + elsif line.strip == "ERROR" then + @log.error "Server failed to download the file. Please check server log" + return false + elsif line.strip == "=END" then + break + end + end + rescue => e + puts "[BuildCommClient] Exception" + @log.error e.message + @log.error e.backtrace.inspect + return false + end + + return true + end + + + # return file + def receive_file(ip, port, username, passwd, dst_file) + begin + l = @socket.gets() + + if l.nil? then + @log.error "[BuildCommClient] Connection refused" + return false + end + + # check protocol + if not protocol_matched? l.strip then + @log.error "[BuildCommClient] Comm. Protocol version is mismatched! #{VERSION}" + return false + end + + # 1. If "READY" is received, send "FTP,ip,port,username,passwd" + # 2. if "UPLOADED,ftp_file_path" is received, + # Download the file + # Send "SUCC" + # 3. If "SUCC" is received, remove the file on FTP server + while line = @socket.gets() + cmd = line.split(",")[0].strip + #@log.info "[BuildCommClient] Received \"#{cmd}\" message from BuildCommServer" + if cmd == "READY" then + send "FTP,#{ip},#{port},#{username},#{passwd}" + @log.info "Client sent ftp server infomation to server : [#{ip}, #{port}]" + elsif cmd == "UPLOADED" then + tok = line.split(",") + if tok.length < 2 then + @log.error "Client received wrong REQ : #{line.strip}" + return false + end + ftp_filepath = tok[1].strip + @log.info "Server uploaded file sucessfully" + dst_filepath = nil + for attempt in ATTEMPTS + dst_filepath = FileTransfer.getfile(ip, port, username, passwd, ftp_filepath, dst_file, @log) + if not dst_filepath.nil? then break + else + @log.warn "Client is the #{attempt} download attempt fails" + end + end + if dst_filepath.nil? then + send "ERROR" + return false + else @log.info "Client is the #{attempt} successful attempt to download" end + send "SUCC" + elsif cmd == "ERROR" then + @log.error "Server failed to upload file. Check server log" + return false + elsif cmd == "=END" then + break + end + end + rescue => e + puts "[BuildCommServer] Exception" + @log.error e.message + @log.error e.backtrace.inspect + return false + end + + return true + end + + def terminate @socket.close end diff --git a/src/build_server/BuildJob.rb b/src/build_server/BuildJob.rb index f29bdaf..223d752 100644 --- a/src/build_server/BuildJob.rb +++ b/src/build_server/BuildJob.rb @@ -1,5 +1,5 @@ =begin - + BuildJob.rb Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. @@ -35,64 +35,318 @@ require "client.rb" require "PackageManifest.rb" require "Version.rb" require "Builder.rb" +require "RemoteBuilder.rb" require "BuildServer.rb" require "JobLog.rb" require "mail.rb" +require "utils.rb" +require "ReverseBuildChecker.rb" class BuildJob - attr_accessor :blocked_by + attr_accessor :id, :server, :pre_jobs, :os, :type + attr_accessor :status, :pkginfo, :log, :source_path + attr_accessor :pkgsvr_client, :thread + attr_accessor :rev_fail_projects, :rev_success_jobs + attr_accessor :pending_ancestor, :cancel_state + attr_accessor :no_reverse # initialize - def initialize () - @blocked_by = [] + def initialize (id, project, os, server) + @id = id + @project = project + @os = os + @server = server + @type = "BUILD" + + @status = "JUST_CREATED" + @cancel_state = "NONE" + @resolve = false + @host_os = Utils::HOST_OS + @pkgserver_url = @server.pkgserver_url + @job_root = "#{@server.path}/jobs/#{@id}" + @source_path = @job_root+"/temp" + @job_working_dir=@job_root+"/works" + @buildroot_dir = "#{@job_root}/buildroot" + @pre_jobs = [] #pre-requisite jobs + + # this item will be initialized on pre-verify + @pkginfo = nil + @pkgsvr_client = nil + @thread = nil + @log = nil + @parent = nil # for job hierachy + + #for cancel operation + @pending_ancestor = nil # for cancel pending job + @remote_id = nil # for cancel remote_working job + @build_dep_prjs = nil # for cacnel pending job + + # for resolving build-break + @rev_fail_projects = [] # list of [project,os] + @rev_success_jobs = [] # list of job + + # remote build + @remote_server = nil + + # job type + @is_rev_build_check_job = false + @is_remote_job = false + + # for internal(transferred) job + @is_internal_job = false + @dock_num = "0" + + @external_pkgs = [] + @force_rebuild = false + + @no_reverse = false + end + + + def get_project() + return @project + end + + + # set parent + def set_parent_job( parent ) + # if parent exists, share build-root + @parent = parent + end + + # get parent + def get_parent_job() + return @parent + end + + + def is_sub_job? + return (not @parent.nil?) + end + + + def get_sub_jobs + return [] + end + + + def get_buildroot() + return @buildroot_dir + end + + + # set reverse build check job + def set_rev_build_check_job( parent ) + @is_rev_build_check_job = true + + # if parent exists, share build-root + if not parent.nil? then + set_parent_job( parent ) + end + end + + + def is_rev_build_check_job() + return @is_rev_build_check_job + end + + + def set_remote_job(server) + @is_remote_job = true + @remote_server=server + end + + def set_no_reverse() + @no_reverse = true + end + + + def set_internal_job( dock_num ) + @is_internal_job = true + @dock_num = dock_num + end + + + # set option for waiting for resolve + def set_resolve_flag() + @resolve = true + end + + + # set force rebuild + # This make project to build + # even though there is a package of same version on pkg-server + def set_force_rebuild(value) + @force_rebuild = value + end + + + # set logger + def set_logger( logger ) + @log = logger + end + + + # add external packages to overwrite before build + def add_external_package( file_name ) + @external_pkgs.push "#{@job_root}/external_pkgs/#{file_name}" end + # execute - def execute + def execute(sync=false) @log.info( "Invoking a thread for building Job #{@id}", Log::LV_USER) if @status == "ERROR" then return end @thread = Thread.new { - # main - thread_main() - - # close - terminate() - } + begin + thread_main() + if not is_sub_job? then terminate() end + rescue => e + @log.error e.message + @log.error e.backtrace.inspect + end + } + + if sync then + @thread.join + end + + return true end - # remote - def execute_remote(server) - @log.info( "Invoking a thread for remote-building Job #{@id}", Log::LV_USER) - if @status == "ERROR" then return end - @thread = Thread.new { - # main - remote_thread_main( server ) - - # close - terminate() - } + #terminate + def terminate() + #do noting + end + + + #cancel + def cancel() + # cancel all its reverse job + @server.jobmgr.reverse_build_jobs.each do |job| + if job.get_parent_job() == self and job.cancel_state == "NONE" then + job.cancel_state = "INIT" + end + end + + # cancel log print + if not @log.nil? then + @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER) + end + + case @status + when "REMOTE_WORKING" then + client = BuildCommClient.create( @remote_server.ip, @remote_server.port, @log ) + if not client.nil? then + client.send "CANCEL|#{@remote_id}|#{self.get_project.passwd}" + result1 = client.receive_data() + if result1.nil? then + @log.info( "cancel operation failed [connection error] !!", Log::LV_USER) + else + @log.info(result1, Log::LV_USER) + end + client.terminate + end + when "PENDING" then + if @pending_ancestor.nil? then + #resolve pending job + pending_descendants = @server.jobmgr.jobs.select do |j| + (not j.pending_ancestor.nil?) and "#{j.pending_ancestor.id}" == "#{@id}" + end + pending_descendants.each do |pd| + pd.cancel_state = "INIT" + end + else + # remove myself from success job if exist + # and add myself into rev_fail_project list if not exist + @pending_ancestor.remove_rev_success_job(self) + @pending_ancestor.add_rev_fail_project( @project, @os ) + + # remove the project that depends on me if exist + # and add it into rev_fail_project list if not exist + p_sub_jobs = @server.jobmgr.jobs.select do |j| + ( not j.pending_ancestor.nil? and + "#{j.pending_ancestor.id}" == "#{@pending_ancestor.id}" and + j.is_build_dependent_project(@project, @os) ) + end + p_sub_jobs.each do |d| + @pending_ancestor.remove_rev_success_job(d) + @pending_ancestor.add_rev_fail_project( d.get_project, d.os ) + + if not d.thread.nil? then d.thread.terminate end + d.status = "WAITING" + end + end + when "WORKING", "WAITING" , "INITIALIZING" , "JUST_CREATED" then + #just log + else # ERROR | FINISHED | RESOLVED + #do noting + end end # check building is possible def can_be_built_on?(host_os) - for pkg in @pkginfo.packages - if pkg.os == @os and pkg.build_host_os.include? host_os then + if @pkginfo.nil? then return false end + + @pkginfo.packages.each do |pkg| + if pkg.os_list.include? @os and pkg.build_host_os.include? host_os then return true end end - return false end + def get_packages() + return @pkginfo.packages + end + + + def get_build_dependencies(target_os) + return @pkginfo.get_build_dependencies(target_os) + end + + + def get_source_dependencies(target_os,host_os) + return @pkginfo.get_source_dependencies(target_os,host_os) + end + + + def is_compatible_with?(o) + if type != o.type then return false end + + my_project = get_project() + other_project = o.get_project() + + # check project name + if my_project.nil? or other_project.nil? or + my_project.name != other_project.name then + return false + end + + # check version + if @pkginfo.nil? or o.pkginfo.nil? or + not (Version.new(@pkginfo.get_version()) == Version.new(o.pkginfo.get_version())) then + return false + end + + # check compat os + @pkginfo.get_target_packages(@os).each do |p| + if not p.os_list.include?(o.os) then return false end + end + + return true + end + + def has_build_dependency?(other_job) if has_same_packages?(other_job) or - does_depend_on?(other_job) or - does_depended_by?(other_job) then + does_depend_on?(other_job) or + does_depended_by?(other_job) then return true else @@ -102,35 +356,53 @@ class BuildJob def has_same_packages?( wjob ) - for pkg in @pkginfo.packages - for wpkg in wjob.pkginfo.packages + + # same package must have same os + if not @os.eql? wjob.os then + return false + end + + # check package name + get_packages().each do |pkg| + wjob.get_packages().each do |wpkg| if pkg.package_name == wpkg.package_name then #puts "Removed from candiated... A == B" return true end end end + return false end def does_depend_on?( wjob ) - for dep in @pkginfo.get_build_dependencies(@os, BuildServer::HOST_OS) - for wpkg in wjob.pkginfo.packages - if dep.package_name == wpkg.package_name then + + # compare build dependency + get_build_dependencies(@os).each do |dep| + wjob.get_packages().each do |wpkg| + # dep packages of my job must have same name and target os + # with packages in working job + if dep.package_name == wpkg.package_name and + dep.target_os_list.include? wjob.os then #puts "Removed from candiated... A -> B" return true end end end + return false end def does_depended_by?( wjob ) - for pkg in @pkginfo.packages - for dep in wjob.pkginfo.get_build_dependencies(@os, BuildServer::HOST_OS) - if pkg.package_name == dep.package_name then + + get_packages().each do |pkg| + wjob.get_build_dependencies(wjob.os).each do |dep| + # dep package of working job must have same name and target os + # with packages in my job + if dep.package_name == pkg.package_name and + dep.target_os_list.include? @os then #puts "Checking... A <- B" return true end @@ -141,31 +413,135 @@ class BuildJob def is_connected? + return @log.is_connected? + end - # nil? then false - if @outstream.nil? then + + # return the job is asyncronous job + def is_asynchronous_job? + if not @log.has_second_out? then + return true + else return false end + end - # send chk signal - begin - BuildCommServer.send_chk( @outstream ) - rescue - return false + + # remove job from reverse success job + def remove_rev_success_job( job ) + @rev_success_jobs.delete job if @rev_success_jobs.include? job + end + + + # check [project,os] is in reverse fail project list + def is_rev_fail_project( prj, os ) + # check the project already exist + @rev_fail_projects.each do |p| + if p[0] == prj and p[1] == os then + return true + end end - return true + return false end - # return the job is asyncronous job - def is_asynchronous_job? - if @outstream.nil? then - return true + # add [project,os] to reverse fail project list + def add_rev_fail_project( prj, os ) + # check the project already exist + @rev_fail_projects.each do |p| + if p[0] == prj and p[1] == os then + return + end + end + # if not, add it + @rev_fail_projects.push [prj,os] + end + + + # remove [project,os] from reverse fail project list + def remove_rev_fail_project( prj, os ) + remove_list = [] + + # check project and os name + @rev_fail_projects.each do |p| + if p[0] == prj and p[1] == os then + remove_list.push p + end + end + + # remove + remove_list.each do |r| + @rev_fail_projects.delete r + end + end + + + # get project that my job is dependent on + def get_build_dependent_projects() + if @build_dep_prjs.nil? then + deps = @pkginfo.get_build_dependencies(@os) + pkgs = deps.map{|x| + # if "os" is not specified, use my "os" + if x.target_os_list.nil? or x.target_os_list.empty? then + os = @os + else + os = x.target_os_list[0] + end + + # package as item + @pkgsvr_client.get_pkg_from_list(x.package_name, os) + } + prjs = @server.prjmgr.get_projects_from_pkgs(pkgs) + @build_dep_prjs = prjs + end + + return @build_dep_prjs + end + + + # check if the project is my dependent project + def is_build_dependent_project( prj, os ) + dep_list = get_build_dependent_projects() + dep_list.each do |dep| + if dep[0] == prj and dep[1] == os then + return true + end + end + + return false + end + + + def progress + if not @log.nil? then + if @project.nil? or @project.get_latest_log_cnt.nil? then + return "--% (#{log.cnt.to_s} lines) " + else + return ( ( @log.cnt * 100 ) / @project.get_latest_log_cnt ).to_s + "%" + end + end + # if log is nil then can't figure progress out + return "" + end + + + def get_log_url() + # only when server support log url + if @server.job_log_url.empty? then + return "","" + end + + url = "#{@server.job_log_url}/#{@id}/log" + # if remote, the file existence must be checked + if File.exist? "#{@job_root}/remote_log" then + return url,"#{@server.job_log_url}/#{@id}/remote_log" else - return false + return url,"" end end + + # # PROTECTED METHODS # @@ -176,52 +552,70 @@ class BuildJob def thread_main @log.info( "New Job #{@id} is started", Log::LV_USER) - # update local package server - @server.local_pkgsvr.sync( @server.local_pkgsvr.get_default_dist_name(), false ) - - # checking version - if not check_package_version() - @status = "ERROR" - - return - end - # checking build dependency - if not check_build_dependency() + if not @is_remote_job and not @is_internal_job and + not check_build_dependency() then + if @is_internal_job then copy_result_files_to_master() end @status = "ERROR" - return + return false end # clean build - if not build() + if not build() then + if @is_internal_job then copy_result_files_to_master() end + @status = "ERROR" - return + return false end # upload - if not upload() + if not @is_rev_build_check_job and not @is_internal_job and + @parent.nil? and + not upload() then @status = "ERROR" - return + return false + end + + # copy result files to transport path + if @is_internal_job then + copy_result_files_to_master() + elsif not @parent.nil? and not @is_rev_build_check_job then + copy_result_files(@parent.source_path) end # INFO. don't change this string @log.info( "Job is completed!", Log::LV_USER) @status = "FINISHED" + return true end # check if local package version is greater than server - def check_package_version() + def check_package_version( source_info ) @log.info( "Checking package version ...", Log::LV_USER) - # package update - @pkgsvr_client.update + # check if version is same and source_info is different + ver_local = @pkginfo.packages[0].version + old_source_info = @project.get_source_info( ver_local ) + if not old_source_info.nil? and old_source_info != source_info then + @log.error( "Source code has been changed without increasing version!", Log::LV_USER) + @log.error( " * Version : #{ver_local}", Log::LV_USER) + @log.error( " * Before : #{old_source_info}", Log::LV_USER) + @log.error( " * Current : #{source_info}", Log::LV_USER) + + return false + end + + # compare with package version in package server + @pkginfo.packages.each do |pkg| + # check all supported os + ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version") + # ignore if package does not exist + if ver_svr.nil? then next end - for pkg in @pkginfo.packages - ver_local = pkg.version - #ver_svr = @pkgsvr_client.get_package_version( pkg.package_name, @os ) - ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version") - if not ver_svr.nil? and Version.new(ver_local) <= Version.new(ver_svr) then + # compare version + if Version.new(ver_local) < Version.new(ver_svr) or + ( not @force_rebuild and Version.new(ver_local) == Version.new(ver_svr) ) then @log.error( "Version must be increased : #{ver_local} <= #{ver_svr}", Log::LV_USER) return false end @@ -232,127 +626,422 @@ class BuildJob # build dependency version + # make sure that package server has all dependency packages of job def check_build_dependency() @log.info( "Checking build dependency ...", Log::LV_USER) + @pkgsvr_client.update + unmet_bdeps = [] + @pkginfo.get_build_dependencies( @os ).each do |dep| + # if parent exist, search parent source path first + # if not found, check package server + ver_svr = nil + if not @parent.nil? then + local_pkg = get_local_path_of_dependency( dep, @parent ) + if not local_pkg.nil? then + ver_svr = Utils.get_version_from_package_file( local_pkg ) + else + ver_svr = nil + end + end + if not ver_svr.nil? then next end - for dep in @pkginfo.get_build_dependencies( @os, @host_os ) - #ver_svr = @pkgsvr_client.get_package_version( dep.package_name, @os ) - if dep.target_os_list.count != 0 then - dep_target_os = dep.target_os_list[0] - else - dep_target_os = @os + if not remote_package_of_dependency_exist?(dep) then + unmet_bdeps.push dep end - ver_svr = @pkgsvr_client.get_attr_from_pkg( dep.package_name, dep_target_os, "version") + end - if ver_svr.nil? - @log.error( "The package \"#{dep.package_name}\" for build-dependency is not found}", Log::LV_USER) - return false + @log.info( "Checking install dependency ...", Log::LV_USER) + unmet_ideps = [] + @pkginfo.get_install_dependencies( @os ).each do |dep| + # if parent exist, search pkginfos for all sub jobs + # if not found, check package server + found = false + if not @parent.nil? and @parent.type == "MULTIBUILD" then + @parent.sub_jobs.each { |j| + os = (dep.target_os_list.empty?) ? @os : dep.target_os_list[0] + if j.pkginfo.pkg_exist?(dep.package_name, dep.base_version, os) then + found = true; break + end + } end + if found then next end - if not dep.match? ver_svr - @log.error( "Version for build-dependency in not matched : server version => #{ver_svr}", Log::LV_USER) - return false + if not remote_package_of_dependency_exist?(dep) then + unmet_ideps.push dep end - end - - return true + end + + # unmet dependencies found , report the errors + if not unmet_bdeps.empty? or not unmet_ideps.empty? then + @log.error( "Unmet dependency found!", Log::LV_USER) + unmet_bdeps.each { |d| + os = (d.target_os_list.empty?) ? @os : d.target_os_list[0] + @log.error( " * #{d.package_name}(#{os}) for build-dependency", Log::LV_USER) + } + unmet_ideps.each { |d| + os = (d.target_os_list.empty?) ? @os : d.target_os_list[0] + @log.error( " * #{d.package_name}(#{os}) for install-dependency", Log::LV_USER) + } + + return false + else + return true + end end # build clean def build() - if @resolve then - @log.info( "Resolving job...", Log::LV_USER) + + # check there are pending pacakges which wait for me + # it will return nil if not exist + # this process must be skip if it is sub-job + if not @is_rev_build_check_job and not @is_internal_job then + @server.cancel_lock.synchronize{ + @pending_ancestor = get_pending_ancestor_job() + } + end + + if not @pending_ancestor.nil? then + # resolve other pending job + resolve() + elsif @resolve then + # wait for being resolved by other jobs + # this condition must be placed after checking pending status + wait_resolve() else - @log.info( "Building job...", Log::LV_USER) - end + # build + build_normal() + end + end - # create builder - builder = Builder.create( "JB#{@id}", @pkgserver_url,@log.path ) - if builder.nil? - @log.error( "Creating job builder failed", Log::LV_USER) - return false + + # return pending job that wait for me + def get_pending_ancestor_job() + @server.jobmgr.get_pending_jobs.each do |job| + if job.is_rev_fail_project(@project,@os) then + return job + end + end + + return nil + end + + + # check whether build this job or not + # if not build, then return its compat pkgs list + def check_compatable_packages + compat_pkgs = [] # [ package name, os, local_path ] + + @pkginfo.get_target_packages(@os).each do |p| + # if package has only os then must build + if p.os_list.count <= 1 then return [] end + + compat_found = false + p.os_list.each do |o| + # check parent pkgs first + if not @parent.nil? then + parent_pkgs = Dir.glob("#{@parent.source_path}/#{p.package_name}_*_*.zip") + parent_pkgs.each do |lp| + lpname = Utils.get_package_name_from_package_file( lp ) + lver = Utils.get_version_from_package_file(lp) + los = Utils.get_os_from_package_file( lp ) + if lpname == p.package_name and o == los and lver == p.version then + compat_pkgs.push [p.package_name,o,lp] + compat_found = true + break + end + end + end + if compat_found then break end + + # check other package already in package server + ver_svr = @pkgsvr_client.get_attr_from_pkg( p.package_name, o, "version") + if not ver_svr.nil? and p.version.eql? ver_svr then + compat_pkgs.push [p.package_name,o,nil] + compat_found = true + break + end + end + + # if there is no compat pkgs for one pkg, then must build + if not compat_found then return [] end end - @log.info( "JobBuilder##{@id} is created", Log::LV_USER) + + return compat_pkgs + end + + + def build_normal() + @log.info( "Started to build this job...", Log::LV_USER) + + # create builder + if @is_remote_job then + builder = RemoteBuilder.new("JB#{@id}", @remote_server, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd) + @log.info( "JobBuilder##{@id} is created", Log::LV_USER) + @log.info( " - Remote Server : #{@remote_server.ip}:#{@remote_server.port}" ) + @log.info( " - FTP Server : #{@server.ftp_addr}" ) + else + builder = Builder.create( "JB#{@id}", @pkgserver_url, @log.path, + "#{@buildroot_dir}", @server.build_cache_dir ) + if builder.nil? + @log.error( "Creating job builder failed", Log::LV_USER) + return false + end + @log.info( "JobBuilder##{@id} is created", Log::LV_USER) + @log.info( " - Package Server : #{@pkgserver_url}" ) + @log.info( " - Build Cache Path : #{@server.build_cache_dir}" ) + end + @log.info( " - Log Path : #{@log.path}" ) # set log output builder.log.close builder.log = @log - #make pending_pkg_dir_list - pending_pkg_dir_list = [] - ignore_rev_dep_build_list = [] - @pkginfo.packages.each do |i| - @server.jobmgr.get_pending_jobs.each do |pj| - if pj.rev_fail_list.include? i.package_name then - pending_pkg_dir_list.push pj.source_path - pending_pkg_dir_list += pj.rev_success_list.map {|pjs| pjs.source_path} - ignore_rev_dep_build_list = pj.rev_fail_list - break - end - end - if not pending_pkg_dir_list.empty? then break end - end - dependency_package_exist = (not pending_pkg_dir_list.empty?) + # if sub job, install dependent packages of parent-pkgs and not clean + use_clean = true + local_pkgs = [] + local_pkgs += @external_pkgs + if not @parent.nil? then + use_clean = false + # get local packages to install + deps = @pkginfo.get_build_dependencies(@os) + local_pkgs += get_local_paths_of_chained_dependencies( deps, @parent ) + end + local_pkgs.uniq! + + #compatable os support + compat_ok = true + compat_pkgs = check_compatable_packages + if compat_pkgs.size > 0 and not @is_rev_build_check_job then + # bring package from server for reverse check + compat_pkgs.each do |p| + pkg_name = p[0]; cos = p[1]; local_path = p[2] + + if not local_path.nil? then + ext = File.extname(local_path) + base_package_name= File.basename(local_path, "#{cos}#{ext}") + @log.info( "Copying compatible package:#{local_path}", Log::LV_USER) + @log.info( "Creating package file ... #{base_package_name}#{@os}#{ext}", Log::LV_USER) + FileUtils.cp local_path, "#{@source_path}/#{base_package_name}#{@os}#{ext}" + else + @log.info( "Downloading compatible package:#{pkg_name}(#{cos})", Log::LV_USER) + loc = @pkgsvr_client.download(pkg_name, cos, false) + if loc.nil? or loc.count != 1 then + @log.warn( "Downloading compatible package failed!:#{pkg_name}(#{cos})", Log::LV_USER) + compat_ok = false + break + end + ext = File.extname(loc[0]) + base_package_name= File.basename(loc[0], "#{cos}#{ext}") + @log.info( "Creating package file ... #{base_package_name}#{@os}#{ext}", Log::LV_USER) + FileUtils.mv loc[0], "#{@source_path}/#{base_package_name}#{@os}#{ext}" + end + end + else + compat_ok = false + end + + # if compat check failed + if not compat_ok then + # build + if @is_remote_job then + result = builder.build(@project.repository, @source_path, @os, + @is_rev_build_check_job, @git_commit, @no_reverse, local_pkgs) + else + result = builder.build(@source_path, @os, use_clean, local_pkgs, false ) + end + if not result then + @log.error( "Building job failed", Log::LV_USER) + write_log_url() + return false + end + end + + # check reverse dependecy if not sub jobs + + if not @no_reverse then + if not @is_rev_build_check_job and not @is_internal_job and + not ReverseBuildChecker.check( self, true ).empty? then + @log.error( "Reverse-build-check failed!" ) + return false + end + end + + return true + end + + + # wait to be resolved by other jobs + def wait_resolve() + @log.info( "Started to build this job and wait for being resolved...", Log::LV_USER) + + # create builder + if @is_remote_job then + builder = RemoteBuilder.new("JB#{@id}", @remote_server, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd) + @log.info( "JobBuilder##{@id} is created", Log::LV_USER) + @log.info( " - Remote Server : #{@remote_server.ip}:#{@remote_server.port}" ) + @log.info( " - FTP Server : #{@server.ftp_addr}" ) + else + builder = Builder.create( "JB#{@id}", @pkgserver_url, @log.path, + "#{@buildroot_dir}/#{@os}", @server.build_cache_dir ) + if builder.nil? + @log.error( "Creating job builder failed", Log::LV_USER) + return false + end + @log.info( "JobBuilder##{@id} is created", Log::LV_USER) + @log.info( " - Package Server : #{@pkgserver_url}" ) + @log.info( " - Build Cache Path : #{@server.build_cache_dir}" ) + end + @log.info( " - Log Path : #{@log.path}" ) + + # set log output + builder.log.close + builder.log = @log # build - if @resolve then - @rev_fail_list = builder.build_resolve(@source_path, @os, [], []) + if @is_remote_job then + result = builder.build(@project.repository, @source_path, @os, + false, @git_commit, @no_reverse, []) + else + result = builder.build(@source_path, @os, true, [], false ) + end + if not result then + @log.error( "Building job failed", Log::LV_USER) + write_log_url() + return false + end - # clean build failed - if @rev_fail_list.nil? then - @log.error( "Resolve building job failed", Log::LV_USER) - return false - end + # check reverse dependecy + @rev_fail_projects = ReverseBuildChecker.check(self, false) + if @rev_fail_projects.empty? then + # if no problem?, it OK + return true + end - # pending - @status = "PENDING" + # pending + @status = "PENDING" + @log.info( "Entered the PENDING state ...", Log::LV_USER) + old_msg = "" + while @status == "PENDING" + new_msg = @rev_fail_projects.map {|p| "#{p[0].name}(#{p[1]})"}.join(", ") + if old_msg != new_msg then + @log.error( " * Waiting for building next projects: #{new_msg}", Log::LV_USER) + old_msg = new_msg + end + sleep 1 + end - # rev build successed - if @rev_fail_list.empty? then - @rev_success_list.each do |s| - s.status = "" - end - @status = "" - end + return true + end - @log.info "Enters the PENGING state ..." - while @status == "PENDING" - sleep 1 - end - return true - else - if not builder.build(@source_path, @os, true, true, pending_pkg_dir_list, ignore_rev_dep_build_list ) - @log.error( "Building job failed", Log::LV_USER) - return false + + # resolve other pending job + def resolve() + + # wait for other build-dependent projects are resolved + old_msg = "" + wait_prjs = @pending_ancestor.rev_fail_projects.select {|p| is_build_dependent_project(p[0], p[1])} + @log.info("Checking build dependency before RESOLVE", Log::LV_USER) + while not wait_prjs.empty? + @status = "PENDING" + new_msg = wait_prjs.map {|p| "#{p[0].name}(#{p[1]})"}.join(", ") + if new_msg != old_msg then + @log.info(" * Waiting for building next projects: #{new_msg}", Log::LV_USER) + old_msg = new_msg + end + sleep 1 + wait_prjs = @pending_ancestor.rev_fail_projects.select {|p| is_build_dependent_project(p[0], p[1])} + end + + # return back to "WORKING" + @status = "WORKING" + + @log.info( "Started to build this job and resolve other pending job...", Log::LV_USER) + + # create builder + if @is_remote_job then + builder = RemoteBuilder.new("JB#{@id}", @remote_server, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd) + @log.info( "JobBuilder##{@id} is created", Log::LV_USER) + @log.info( " - Remote Server : #{@remote_server.ip}:#{@remote_server.port}" ) + @log.info( " - FTP Server : #{@server.ftp_addr}" ) + else + builder = Builder.create( "JB#{@id}", @pkgserver_url, @log.path, + "#{@buildroot_dir}/#{@os}", @server.build_cache_dir ) + if builder.nil? + @log.error( "Creating job builder failed", Log::LV_USER) + return false + end + @log.info( "JobBuilder##{@id} is created", Log::LV_USER) + @log.info( " - Package Server : #{@pkgserver_url}" ) + @log.info( " - Build Cache Path : #{@server.build_cache_dir}" ) + end + @log.info( " - Log Path : #{@log.path}" ) + + # set log output + builder.log.close + builder.log = @log + + # get local packages to overwite + # they must be composed of packages of pending jobs and its success list + local_pkgs=[] + local_pkgs += @external_pkgs + src_path = @pending_ancestor.source_path + ver = @pending_ancestor.pkginfo.get_version() + @pending_ancestor.pkginfo.get_target_packages(@os).each do |pkg| + local_pkgs.push "#{src_path}/#{pkg.package_name}_#{ver}_#{@os}.zip" + end + @pending_ancestor.rev_success_jobs.each do |job| + src_path = job.source_path + ver = job.pkginfo.get_version() + job.pkginfo.get_target_packages(@os).each do |pkg| + local_pkgs.push "#{src_path}/#{pkg.package_name}_#{ver}_#{@os}.zip" + end + end + + # build + if @is_remote_job then + result = builder.build(@project.repository, @source_path, @os, + false, @git_commit, @no_reverse, local_pkgs) + else + result = builder.build(@source_path, @os, true, local_pkgs, false ) + end + if not result then + @log.error( "Building job failed", Log::LV_USER) + write_log_url() + return false + end + + # check reverse dependecy and update parent rev_fail_project list + new_fail_projects = ReverseBuildChecker.check(self, false) + new_fail_projects.each do |p| + @pending_ancestor.add_rev_fail_project(p[0], p[1]) + end + + # update the status of pending job + @status = "PENDING" + @pending_ancestor.remove_rev_fail_project(@project, @os) + @pending_ancestor.rev_success_jobs.push self + if @pending_ancestor.rev_fail_projects.empty? then + @pending_ancestor.status = "RESOLVED" + @pending_ancestor.rev_success_jobs.each do |job| + job.status = "RESOLVED" end + else + @log.info( "Entered the PENDING state ...", Log::LV_USER) + old_msg = "" + while @status == "PENDING" + new_msg = @pending_ancestor.rev_fail_projects.map {|p| "#{p[0].name}(#{p[1]})"}.join(", ") + + if new_msg != old_msg then + @log.info(" * Waiting for building next projects: #{new_msg}", Log::LV_USER) + old_msg = new_msg + end - if dependency_package_exist then - @server.jobmgr.get_pending_jobs.each do |j| - if j.source_path == pending_pkg_dir_list[0] then - j.rev_fail_list -= @pkginfo.packages.map{|p| p.package_name} - j.rev_success_list.push self - if j.rev_fail_list.empty? then - j.rev_success_list.each do |s| - s.status = "" - end - j.status = "" - else - @status = "PENDING" - @log.info "Enters the PENGING state ..." - while @status == "PENDING" - sleep 1 - end - end - break - end - end - end + sleep 1 + end end - # remove builder - Builder.remove( "builder_#{@id}" ) - return true end @@ -362,13 +1051,10 @@ class BuildJob # get package path list binpkg_path_list = Dir.glob("#{@source_path}/*_*_#{@os}.zip") - srcpkg_path_list = Dir.glob("#{@source_path}/*.tar.gz") # upload u_client = Client.new( @server.pkgserver_url, nil, @log ) - u_client.update - snapshot = u_client.upload( @server.pkgserver_addr, - @server.pkgserver_id, binpkg_path_list, srcpkg_path_list, true) + snapshot = u_client.upload( @server.pkgserver_addr, @server.pkgserver_port, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd, binpkg_path_list) if snapshot.nil? then @log.info( "Upload failed...", Log::LV_USER) @@ -379,46 +1065,157 @@ class BuildJob # update local @log.info( "Upload succeeded. Sync local pkg-server again...", Log::LV_USER) @pkgsvr_client.update - @server.local_pkgsvr.sync( @server.local_pkgsvr.get_default_dist_name(), false ) @log.info("Snapshot: #{snapshot}", Log::LV_USER) return true end - # remote main module - def remote_thread_main(server) - @log.info( "Job #{@id} is requested to be built on remote server ", Log::LV_USER) - - # open - client = BuildCommClient.create( server.ip, server.port ) - if client.nil? then - @status = "ERROR" - return + def copy_result_files(dst_path) + @log.info( "Copying result files to #{dst_path}", Log::LV_USER) + + # get package path list + binpkg_path_list = Dir.glob("#{@source_path}/*_*_#{@os}.zip") + + binpkg_path_list.each do |file| + @log.info( " * #{file}", Log::LV_USER) + FileUtils.cp(file,"#{dst_path}/") end - - # send & receive - if client.send("BUILD,GIT,#{@git_repos},#{@git_commit},#{@os},,NO") then - result = client.read_lines do |l| - if l.include? "Job is stopped by ERROR" then - @status = "ERROR" - end - # ddd list - @log.output( l.strip, Log::LV_USER) + + return true + end + + + # copy binary package files and log file to transport dir + def copy_result_files_to_master() + outgoing_dir = "#{@server.transport_path}/#{@dock_num}" + + @log.info( "Copying log to #{outgoing_dir}", Log::LV_USER) + file = "#{@source_path}/../log" + FileUtils.copy_file(file, "#{outgoing_dir}/remote_log") + + # copy result files, if not reverse build + if not @is_rev_build_check_job then + return copy_result_files( outgoing_dir ) + else + return true + end + end + + + protected + def get_local_path_of_dependency( dep, parent ) + dep_target_os = get_os_of_dependency(dep) + + # search my parent job and its parent job + binpkgs = Dir.glob("#{parent.source_path}/#{dep.package_name}_*_#{dep_target_os}.zip") + if binpkgs.count == 0 and not parent.get_parent_job().nil? then + binpkgs = Dir.glob("#{parent.get_parent_job().source_path}/#{dep.package_name}_*_#{dep_target_os}.zip") + end + + if binpkgs.count > 0 then + pkg = binpkgs[0] + version = Utils.get_version_from_package_file(pkg) + if dep.match? version then + return pkg + else + return nil end - if not result then @status = "ERROR" end + else + return nil end + end - # close socket - client.terminate - # INFO. don't change this string - if @status != "ERROR" then - @log.info( "Job is just finished", Log::LV_USER) - @status = "FINISHED" + protected + def get_local_paths_of_chained_dependencies( deps, parent ) + pkg_paths = [] + + # get packages names that is related my build dependency + chained_deps = get_local_chained_dependencies( deps, parent ) + + # get all local path of dependencies + chained_deps.each { |dep| + new_path = get_local_path_of_dependency(dep, parent) + if not new_path.nil? then + pkg_paths.push new_path + end + } + + # remove duplicates + pkg_paths.uniq! + + return pkg_paths + end + + + protected + def get_local_chained_dependencies( deps, parent ) + + chained_deps = [] + chained_deps += deps + + # if parent is multi build job, gether all install dependency of dependency. + if parent.type == "MULTIBUILD" then + begin + old_deps_count = chained_deps.count + new_deps = [] + chained_deps.each { |dep| + dep_target_os = get_os_of_dependency(dep) + + parent.sub_jobs.each { |j| + new_deps += j.pkginfo.get_install_dependencies(dep_target_os, dep.package_name) + } + } + chained_deps += new_deps + chained_deps.uniq! {|d| d.package_name } + end while chained_deps.count != old_deps_count + end + + # check parent of parent + if not parent.get_parent_job().nil? then + chained_deps = get_local_chained_dependencies(chained_deps, parent.get_parent_job()) end - return + return chained_deps + end + + + protected + def remote_package_of_dependency_exist?(dep) + dep_target_os = get_os_of_dependency(dep) + + # search + ver_svr = @pkgsvr_client.get_attr_from_pkg( dep.package_name, dep_target_os, "version") + if ver_svr.nil? then return false end + if not dep.match? ver_svr then return false end + + return true + end + + + # write web url for log + protected + def write_log_url() + url,remote_url = get_log_url() + if not url.empty? then + @log.info( " ** Log1: #{url}", Log::LV_USER) + end + if not remote_url.empty? then + @log.info( " ** Log2: #{remote_url}", Log::LV_USER) + end end + + # get target os of dependency + protected + def get_os_of_dependency(dep) + # use the target os if not specified + if dep.target_os_list.count != 0 then + dep_target_os = dep.target_os_list[0] + else + dep_target_os = @os + end + end + end diff --git a/src/build_server/BuildServer.rb b/src/build_server/BuildServer.rb index d1d993d..0c7e3c6 100644 --- a/src/build_server/BuildServer.rb +++ b/src/build_server/BuildServer.rb @@ -28,36 +28,52 @@ Contributors: require 'fileutils' $LOAD_PATH.unshift File.dirname(__FILE__) -$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server" require "SocketJobRequestListener.rb" require "RemoteBuildJob.rb" -require "LocalBuildJob.rb" -require "packageServer.rb" require "JobManager.rb" +require "JobClean.rb" require "RemoteBuildServer.rb" +require "PackageSync.rb" +require "ProjectManager.rb" class BuildServer - attr_accessor :id, :path, :pkgserver_url, :pkgserver_addr, :pkgserver_id, :port, :status, :friend_servers, :host_os, :log + attr_accessor :id, :path, :pkgserver_url, :pkgserver_addr, :pkgserver_port, :pkgserver_id, :port, :status, :friend_servers, :host_os, :log attr_accessor :git_server_url, :git_bin_path attr_accessor :job_log_url attr_accessor :allowed_git_branch - attr_accessor :pkgsvr_cache_path, :local_pkgsvr attr_accessor :send_mail attr_accessor :jobmgr attr_accessor :test_time attr_accessor :password attr_accessor :finish + attr_accessor :build_cache_dir + attr_accessor :keep_time + attr_accessor :ftp_addr + attr_accessor :ftp_port + attr_accessor :ftp_username + attr_accessor :ftp_passwd + attr_accessor :cleaner + attr_accessor :prjmgr + attr_accessor :transport_path + attr_accessor :cancel_lock + attr_accessor :supported_os_list + attr_accessor :upgrade + attr_accessor :remote_pkg_servers + attr_accessor :pkg_sync_period + CONFIG_ROOT = "#{Utils::HOME}/.build_tools/build_server" HOST_OS = Utils::HOST_OS # initialize - def initialize (id, path, pkgserver_url, pkgserver_addr, pkgserver_id) + def initialize (id, path, pkgsvr_url, pkgsvr_addr, pkgsvr_port, pkgsvr_id, ftpsvr_addr, ftpsvr_port, ftpsvr_username, ftpsvr_passwd) @id = id @path = path - @pkgserver_url = pkgserver_url - @pkgserver_addr = pkgserver_addr - @pkgserver_id = pkgserver_id + @pkgserver_url = pkgsvr_url + @pkgserver_addr = pkgsvr_addr + @pkgserver_port = pkgsvr_port + @pkgserver_id = pkgsvr_id @friend_servers = [] + @remote_pkg_servers = [] @req_listener = [] @finish = false # port number @@ -75,11 +91,24 @@ class BuildServer @send_mail = "NO" # local package server @pkgsvr_cache_path = nil - @local_pkgsvr = nil # Job Manager @jobmgr = JobManager.new(self) @test_time=0 #test time in mili-seconds @password="0000" + @keep_time=86400 + @ftp_addr = ftpsvr_addr + @ftp_port = ftpsvr_port + @ftp_username = ftpsvr_username + @ftp_passwd = ftpsvr_passwd + @cleaner=nil + @prjmgr = ProjectManager.new(self) + # + @transport_path = "#{@path}/transport" + @cancel_lock = Mutex.new + @supported_os_list = [] + + @pkg_sync_period=600 + @upgrade = false end @@ -88,57 +117,74 @@ class BuildServer # start @log = Log.new( "#{BuildServer::CONFIG_ROOT}/#{@id}/log" ) - # set local package server for cache - @log.info "Setting local package server..." - pkgsvr_id = @id - pkgsvr_dist = @pkgserver_url.split("/")[-1] - @local_pkgsvr = PackageServer.new( pkgsvr_id ) - if @local_pkgsvr.location.empty? then - FileUtils.mkdir_p @pkgsvr_cache_path - @local_pkgsvr.create(pkgsvr_id, pkgsvr_dist, @pkgserver_url, @pkgsvr_cache_path ) - else - # check path is changed, recreate it - if @local_pkgsvr.location != "#{@pkgsvr_cache_path}/#{pkgsvr_id}" then - # remove - @local_pkgsvr.remove_server( pkgsvr_id ) - # create - FileUtils.mkdir_p @pkgsvr_cache_path - @local_pkgsvr.create(pkgsvr_id, pkgsvr_dist, @pkgserver_url, @pkgsvr_cache_path ) - end + # set build cache dir + @build_cache_dir="#{BuildServer::CONFIG_ROOT}/#{@id}/build_cache" + if not File.exist? @build_cache_dir then + FileUtils.mkdir_p @build_cache_dir end + # init transport path + if not File.exist? @transport_path then FileUtils.mkdir_p @transport_path end + + # init project mgr + @log.info "Setting Project Manager..." + @prjmgr.load() + + # init job mgr + @log.info "Intializing Job Manager..." + @jobmgr.init() + # set job request listener @log.info "Setting listener..." listener2 = SocketJobRequestListener.new(self) listener2.start @req_listener.push listener2 + + # set job cleaner + @log.info "Setting Job Cleaner..." + @cleaner = JobCleaner.new(self) + @cleaner.start + + # set package server synchrontizer + if not @remote_pkg_servers.empty? then + @log.info "Setting Package Server Synchronizer..." + @pkg_sync = PackageServerSynchronizer.new(self) + @pkg_sync.start + end # main loop @log.info "Entering main loop..." - if @test_time > 0 then start_time = Time.now end - while( not @finish ) - - # update friend server status - for server in @friend_servers - # update state - server.update_state - end - - # handle jobs - @jobmgr.handle() - - # sleep - if @test_time > 0 then - curr_time = Time.now - if (curr_time - start_time).to_i > @test_time then - puts "Test time is elapsed!" - break + begin + if @test_time > 0 then start_time = Time.now end + while( not @finish ) + + # update friend server status + @friend_servers.each do |server| + # update state + server.update_state + end + + # handle jobs + @jobmgr.handle() + + # sleep + if @test_time > 0 then + curr_time = Time.now + if (curr_time - start_time).to_i > @test_time then + puts "Test time is elapsed!" + break + end + else + sleep 1 end - else - sleep 1 end + rescue => e + @log.error( e.message, Log::LV_USER) end + if(@upgrade) + exit(99) + end # TODO: something should be done for server down end @@ -152,6 +198,10 @@ class BuildServer # check the job can be built on this server def can_build?(job) + # check max allowed jobs + if @jobmgr.max_working_jobs <= 0 then + return false + end # check me if job.can_be_built_on? @host_os then @@ -166,39 +216,78 @@ class BuildServer def add_remote_server( ip, port ) # if already exit, return false - for svr in @friend_servers + @friend_servers.each do |svr| if svr.ip.eql? ip and svr.port == port then return false end end # create new one, and add it into list - new_server = RemoteBuildServer.new( ip, port ) + new_server = RemoteBuildServer.new( ip, port, self ) @friend_servers.push new_server return true end + # add new remote pkg server + def add_remote_package_server( url, proxy ) + + # if already exit, return false + @remote_pkg_servers.each do |entry| + u = entry[0] + + if u == url then + return false + end + end + + @remote_pkg_servers.push [url, proxy] + + return true + end + + + # add new target OS. + # If already exist, return false , otherwise true + def add_target_os( os_name ) + + # if already exit, return false + @supported_os_list.each do |os| + if os.eql? os_name then + return false + end + end + + # add it into list + @supported_os_list.push os_name + + return true + end + + # get remote server def get_available_server ( job ) candidates = [] - # check local - if @jobmgr.get_number_of_empty_room > 0 and can_build?(job) then + # calculate empty rooms + # if sub job, his parent should be excluded + local_empty_rooms = @jobmgr.get_number_of_empty_room + + if local_empty_rooms > 0 and can_build?(job) then candidates.push self end - # if Local build job, just check local - if job.instance_of? LocalBuildJob then return candidates[0] end - - # get availables - for server in @friend_servers - # select only "RUNNING" & possible one - if ( server.status == "RUNNING" and server.can_build?( job ) and - not server.has_waiting_jobs and - server.get_number_of_empty_room > 0 ) - candidates.push server + # get availables server + # but, job must not be "REGISTER" and "MULTIBUILD" job + if job.type != "REGISTER" and job.type != "MULTIBUILD" then + @friend_servers.each do |server| + if ( server.status == "RUNNING" and server.can_build?( job ) and + not server.has_waiting_jobs and + server.get_file_transfer_cnt() == 0 and + server.get_number_of_empty_room > 0 ) + candidates.push server + end end end @@ -208,7 +297,7 @@ class BuildServer # get best # it is better if working jobs count is less max_empty_room = best_server.get_number_of_empty_room - for server in candidates + candidates.each do |server| # check whether idle, use it if not server.has_working_jobs then return server end @@ -234,7 +323,7 @@ class BuildServer if can_build? job then return true end #if not found, check friends - for server in @friend_servers + @friend_servers.each do |server| if server.status == "RUNNING" and job.can_be_built_on? server.host_os then return true @@ -261,5 +350,6 @@ class BuildServer def has_waiting_jobs return @jobmgr.has_waiting_jobs end + end diff --git a/src/build_server/BuildServerController.rb b/src/build_server/BuildServerController.rb index a58734d..4be6726 100644 --- a/src/build_server/BuildServerController.rb +++ b/src/build_server/BuildServerController.rb @@ -34,7 +34,7 @@ class BuildServerController @@instance_map = {} # create - def self.create_server (id, path, pkgsvr_url, pkgsvr_addr, pkgsvr_id) + def self.create_server (id, path, pkgsvr_url, pkgsvr_addr, pkgsvr_port, pkgsvr_id, ftpsvr_addr, ftpsvr_port, ftpsvr_username, ftpsvr_passwd) # check server config root check_build_server_root @@ -45,11 +45,11 @@ class BuildServerController end # create new instance and return it - @@instance_map[id] = BuildServer.new( id, path, pkgsvr_url, pkgsvr_addr, pkgsvr_id ) + @@instance_map[id] = BuildServer.new( id, path, pkgsvr_url, pkgsvr_addr, pkgsvr_port, pkgsvr_id, ftpsvr_addr, ftpsvr_port, ftpsvr_username, ftpsvr_passwd) # set default @@instance_map[id].git_server_url="gerrithost:" - if Utils::HOST_OS == "windows" then + if Utils.is_windows_like_os(Utils::HOST_OS) then @@instance_map[id].git_bin_path="/c/Program\\ Files/Git/bin/git.exe" else @@instance_map[id].git_bin_path="/usr/bin/git" @@ -58,7 +58,6 @@ class BuildServerController @@instance_map[id].jobmgr.max_working_jobs= 2 @@instance_map[id].job_log_url="" @@instance_map[id].send_mail="NO" - @@instance_map[id].pkgsvr_cache_path="#{path}/pkgsvr_cache" # write config @@ -79,12 +78,9 @@ class BuildServerController FileUtils.rm_rf "#{BuildServer::CONFIG_ROOT}/#{id}" @@instance_map[id] = nil puts "Removed the server \"#{id}\"" + else + puts "The server \"#{id}\" does not exist!" end - - # remove local package server - local_pkgsvr = PackageServer.new( id ) - local_pkgsvr.remove_server(id) - end @@ -98,7 +94,7 @@ class BuildServerController # check server config if not File.exist? "#{BuildServer::CONFIG_ROOT}/#{id}/server.cfg" - raise RuntimeError, "The server \"#{id}\" does not exist." + raise RuntimeError, "The server \"#{id}\" does not exist!" end # get server config and return its object @@ -137,7 +133,7 @@ class BuildServerController # send request stop_ok = false - if client.send "STOP,#{server.password}" then + if client.send "STOP|#{server.password}" then # recevie & print mismatched = false result = client.read_lines do |l| @@ -161,6 +157,94 @@ class BuildServerController return true end + # upgrade server + def self.upgrade_server( id ) + + # server + server = get_server(id) + client = BuildCommClient.create( "127.0.0.1", server.port ) + if client.nil? then + puts "Server is not running!" + return false + end + + # send request + upgrade_ok = false + if client.send "UPGRADE|#{server.password}" then + # recevie & print + mismatched = false + result = client.read_lines do |l| + puts l + if l.include? "Password mismatched!" then + mismatched = true + end + end + if result and not mismatched then + upgrade_ok = true + end + end + + # terminate + client.terminate + + if not upgrade_ok then + puts "Server upgrade failed!" + end + + return true + end + + # request upgrade friends build server + def self.request_upgrade_server( id ) + + server = get_server(id) + server_dir = "#{BuildServer::CONFIG_ROOT}/#{id}" + + if File.exist? "#{server_dir}/friends" then + File.open( "#{server_dir}/friends", "r" ) do |f| + f.each_line do |l| + if l.split(",").count < 2 then next end + ip = l.split(",")[0].strip + port = l.split(",")[1].strip + + client = BuildCommClient.create( ip, port ) + if client.nil? then + puts "Friend Server #{ip}:#{port} is not running!" + next + end + # send request + upgrade_ok = false + if client.send "UPGRADE|#{server.password}" then + # recevie & print + mismatched = false + result = client.read_lines do |l| + puts l + if l.include? "Password mismatched!" then + mismatched = true + end + end + if result and not mismatched then + upgrade_ok = true + end + end + + # terminate + client.terminate + + if upgrade_ok then + puts "Friend Server #{ip}:#{port} upgrade requested!" + else + puts "Friend Server #{ip}:#{port} upgrade failed!" + end + end + end + else + puts "No Friend Server." + end + + return true + end + # add friend server def self.add_friend_server( id, ip, port ) @@ -185,82 +269,210 @@ class BuildServerController end - # build git repository and upload - def self.build_git( id, repository, commit, os, url, resolve ) - - # server + # add remote package server + def self.add_remote_package_server(id, url, proxy ) server = get_server(id) - client = BuildCommClient.create( "127.0.0.1", server.port ) - if client.nil? then return false end + + # add + if server.add_remote_package_server( url, proxy ) then - # send request - client.send "BUILD,GIT,#{repository},#{commit},#{os}" + # write config + server_dir = "#{BuildServer::CONFIG_ROOT}/#{server.id}" + f = File.open( "#{server_dir}/remote_pkg_servers", "a" ) + if not proxy.nil? then + f.puts "#{url}|#{proxy}" + else + f.puts "#{url}|" + end + f.close + + puts "Remote package server is added!" + + return true + else + puts "The server already exists in list!" - # recevie & print - client.print_stream + return false + end + end - # terminate - client.terminate - return true + # add supported target os + def self.add_target_os( id, os_name ) + # TODO:check os foramt + if os_name == "default" then + puts "Cannot use \"default\" as target OS name!" + return false + end + + # get server + server = get_server(id) + + # add + if server.add_target_os( os_name ) then + + # write config + server_dir = "#{BuildServer::CONFIG_ROOT}/#{server.id}" + f = File.open( "#{server_dir}/supported_os_list", "a" ) + f.puts "#{os_name}" + f.close + + puts "Target OS is added successfully!" + + return true + else + puts "Target OS already exists in list!" + return false + end end - # resolve git and build it and upload - def resolve_git( id, repository, commit, os, url ) - # server + # add project + def self.add_project( id, project_name, git_repos, git_branch, remote_server_id, passwd, os_string ) + # get server server = get_server(id) - client = BuildCommClient.create( "127.0.0.1", server.port ) - if client.nil? then return false end + + # get supported os for project. + # if not specified, all supported os of the server will be used + if os_string.nil? or os_string == "default" then + os_list = server.supported_os_list + else + os_list = os_string.strip.split(",") + end + + # check OS name + os_list.each do |os| + if not server.supported_os_list.include? os then + puts "Unsupported OS name \"#{os}\" is used!" + puts "Check the following supported OS list:" + server.supported_os_list.each do |s_os| + puts " * #{s_os}" + end - # send request - client.send "RESOLVE,GIT,#{repository},#{commit},#{os}" + return false + end + end - # recevie & print - client.print_stream + # add + if not git_repos.nil? and not git_branch.nil? then + result = server.prjmgr.add_git_project( project_name, git_repos, git_branch, passwd, os_list ) + elsif not remote_server_id.nil? then + result = server.prjmgr.add_remote_project( project_name, remote_server_id, passwd, os_list) + else + result = false + end + + if result then + puts "Adding project succeeded!" + return true + else + puts "Adding project failed!" + return false + end + end - # terminate - client.terminate - return true + # add binary project + def self.add_binary_project( id, project_name, pkg_name, passwd, os_string ) + # get server + server = get_server(id) + + # get supported os for project. + # if not specified, all supported os of the server will be used + if os_string.nil? or os_string == "default" then + os_list = server.supported_os_list + else + os_list = os_string.strip.split(",") + end + + # add + result = server.prjmgr.add_binary_project( project_name, pkg_name, passwd, os_list ) + + if result then + puts "Adding project succeeded!" + return true + else + puts "Adding project failed!" + return false + end end - # build local project and upload - def self.build_local( id, local_path, os, url, resolve ) + # full build + def self.build_all_projects( id ) + # server server = get_server(id) client = BuildCommClient.create( "127.0.0.1", server.port ) - if client.nil? then return false end + if client.nil? then + puts "Server is not running!" + return false + end # send request - client.send "BUILD,LOCAL,#{local_path},#{os}" - - # recevie & print - client.print_stream + fullbuild_ok = false + if client.send "FULLBUILD|#{server.password}" then + # recevie & print + mismatched = false + result = client.read_lines do |l| + puts l + if l.include? "Password mismatched!" then + mismatched = true + end + end + if result and not mismatched then + fullbuild_ok = true + end + end # terminate client.terminate + + if not fullbuild_ok then + puts "Full build failed!" + end return true end - # resolve local project and build it and upload - def resolve_local( path, os ) + def self.register_package(id, file_path) # server server = get_server(id) client = BuildCommClient.create( "127.0.0.1", server.port ) - if client.nil? then return false end + if client.nil? then + puts "Server is not running!" + return false + end - # send request - client.send "RESOLVE,LOCAL,#{local_path},#{os}" + if not File.exist? file_path then + puts "File not found!" + return false + end - # recevie & print - client.print_stream + file_path = File.expand_path(file_path) + # send request + success = false + if client.send "REGISTER|BINARY-LOCAL|#{file_path}|#{server.password}" then + # recevie & print + mismatched = false + result = client.read_lines do |l| + puts l + if l.include? "Password mismatched!" then + mismatched = true + end + end + if result and not mismatched then + success = true + end + end # terminate client.terminate + + if not success then + puts "Registering package failed!" + end return true end @@ -281,7 +493,9 @@ class BuildServerController def self.write_server_config( server ) # create config folder server_dir = "#{BuildServer::CONFIG_ROOT}/#{server.id}" - FileUtils.mkdir_p( "#{server_dir}" ) + if not File.exist? server_dir then + FileUtils.mkdir_p( server_dir ) + end # write configuration File.open( "#{server_dir}/server.cfg", "w" ) do |f| @@ -289,8 +503,8 @@ class BuildServerController f.puts "PATH=#{server.path}" f.puts "PSERVER_URL=#{server.pkgserver_url}" f.puts "PSERVER_ADDR=#{server.pkgserver_addr}" + f.puts "PSERVER_PORT=#{server.pkgserver_port}" f.puts "PSERVER_ID=#{server.pkgserver_id}" - f.puts "PSERVER_CACHE_PATH=#{server.pkgsvr_cache_path}" f.puts "GIT_SERVER_URL=#{server.git_server_url}" f.puts "GIT_BIN_PATH=#{server.git_bin_path}" f.puts "ALLOWED_GIT_BRANCH=#{server.allowed_git_branch}" @@ -299,6 +513,12 @@ class BuildServerController f.puts "SEND_MAIL=#{server.send_mail}" f.puts "TEST_TIME=#{server.test_time}" if server.test_time > 0 f.puts "PASSWORD=#{server.test_time}" if server.password != "0000" + f.puts "JOB_KEEP_TIME=#{server.keep_time}" + f.puts "FTP_ADDR=#{server.ftp_addr}" + f.puts "FTP_PORT=#{server.ftp_port}" + f.puts "FTP_USERNAME=#{server.ftp_username}" + f.puts "FTP_PASSWD=#{server.ftp_passwd}" + f.puts "PKG_SYNC_PERIOD=#{server.pkg_sync_period}" end end @@ -308,8 +528,8 @@ class BuildServerController path="" pkgsvr_url="" pkgsvr_addr="" + pkgsvr_port="3333" pkgsvr_id="" - pkgsvr_cache_path="" git_server_url="gerrithost:" git_bin_path="/usr/bin/git" allowed_git_branch="" @@ -318,6 +538,12 @@ class BuildServerController send_mail="NO" test_time=0 password="0000" + keep_time=86400 + ftp_addr="" + ftp_port="21" + ftp_username="" + ftp_passwd="" + pkg_sync_period=600 # read configuration server_dir = "#{BuildServer::CONFIG_ROOT}/#{id}" @@ -332,10 +558,10 @@ class BuildServerController pkgsvr_url = l[idx,length].strip elsif l.start_with?("PSERVER_ADDR=") pkgsvr_addr = l[idx,length].strip + elsif l.start_with?("PSERVER_PORT=") + pkgsvr_port = l[idx,length].strip elsif l.start_with?("PSERVER_ID=") pkgsvr_id = l[idx,length].strip - elsif l.start_with?("PSERVER_CACHE_PATH=") - pkgsvr_cache_path = l[idx,length].strip elsif l.start_with?("GIT_SERVER_URL=") git_server_url = l[idx,length].strip elsif l.start_with?("GIT_BIN_PATH=") @@ -352,6 +578,18 @@ class BuildServerController test_time = l[idx,length].strip.to_i elsif l.start_with?("PASSWORD=") password = l[idx,length].strip.to_i + elsif l.start_with?("JOB_KEEP_TIME=") + keep_time = l[idx,length].strip.to_i + elsif l.start_with?("FTP_ADDR=") + ftp_addr = l[idx,length].strip + elsif l.start_with?("FTP_PORT=") + ftp_port = l[idx,length].strip + elsif l.start_with?("FTP_USERNAME=") + ftp_username = l[idx,length].strip + elsif l.start_with?("FTP_PASSWD=") + ftp_passwd = l[idx,length].strip + elsif l.start_with?("PKG_SYNC_PERIOD=") + pkg_sync_period = l[idx,length].strip.to_i else next end @@ -359,7 +597,7 @@ class BuildServerController end # create server object - obj = BuildServer.new( id, path, pkgsvr_url, pkgsvr_addr, pkgsvr_id ) + obj = BuildServer.new( id, path, pkgsvr_url, pkgsvr_addr, pkgsvr_port, pkgsvr_id, ftp_addr, ftp_port, ftp_username, ftp_passwd ) # check running port if File.exist? "#{server_dir}/run" then @@ -381,6 +619,28 @@ class BuildServerController end end + # check remote package server + if File.exist? "#{server_dir}/remote_pkg_servers" then + File.open( "#{server_dir}/remote_pkg_servers", "r" ) do |f| + f.each_line do |l| + if l.split("|").count < 2 then next end + url = l.split("|")[0].strip + proxy = l.split("|")[1].strip + obj.add_remote_package_server( url, proxy ) + end + end + end + + # check supported os + if File.exist? "#{server_dir}/supported_os_list" then + File.open( "#{server_dir}/supported_os_list", "r" ) do |f| + f.each_line do |l| + os_name = l.strip + obj.add_target_os( os_name ) + end + end + end + # set git server url obj.git_server_url = git_server_url @@ -399,16 +659,24 @@ class BuildServerController # set allowed git branch name obj.allowed_git_branch = allowed_git_branch - # set package server path - pkgsvr_cache_path = (pkgsvr_cache_path.empty? ? "#{path}/pkgsvr_cache":pkgsvr_cache_path) - obj.pkgsvr_cache_path= pkgsvr_cache_path - # set test time obj.test_time = test_time # set password obj.password = password + # set password + obj.keep_time = keep_time + + # set ftp infomation + obj.ftp_addr = ftp_addr + obj.ftp_port = ftp_port + obj.ftp_username = ftp_username + obj.ftp_passwd = ftp_passwd + + # pkg synchronization + obj.pkg_sync_period = pkg_sync_period + # save config write_server_config( obj ) diff --git a/src/build_server/BuildServerOptionParser.rb b/src/build_server/BuildServerOptionParser.rb index f14f69c..4c1ba9a 100644 --- a/src/build_server/BuildServerOptionParser.rb +++ b/src/build_server/BuildServerOptionParser.rb @@ -26,56 +26,192 @@ Contributors: - S-Core Co., Ltd =end +$LOAD_PATH.unshift File.dirname(__FILE__)+"/src/common" require 'optparse' +require 'utils' + +def option_error_check( options ) + case options[:cmd] + + when "create" + if options[:name].nil? or options[:name].empty? or + options[:url].nil? or options[:url].empty? or + options[:domain].nil? or options[:domain].empty? or + options[:fdomain].nil? or options[:fdomain].empty? then + raise ArgumentError, "Usage: build-svr create -n -u -d -t " + end + + when "remove" + if options[:name].nil? or options[:name].empty? then + raise ArgumentError, "Usage: build-svr remove -n " + end + + when "start" + if options[:name].nil? or options[:name].empty? or + options[:port].nil? then + raise ArgumentError, "Usage: build-svr start -n -p " + end + + when "stop" + if options[:name].nil? or options[:name].empty? then + raise ArgumentError, "Usage: build-svr stop -n " + end + + when "upgrade" + if options[:name].nil? or options[:name].empty? then + raise ArgumentError, "Usage: build-svr upgrade -n " + end + + when "add-svr" + if options[:name].nil? or options[:name].empty? or + ((options[:domain].nil? or options[:domain].empty?) and + (options[:url].nil? or options[:url].empty?)) then + raise ArgumentError, "Usage: build-svr add-svr -n (-d |-u ) [--proxy ]" + end + + when "add-prj" + if options[:name].nil? or options[:name].empty? or + options[:pid].nil? or options[:pid].empty? then + raise ArgumentError, "Usage: build-svr add-prj -n -N (-g -b |-P ) [-w ] [-o ]" + end + + when "add-os" + if options[:name].nil? or options[:name].empty? or + options[:os].nil? or options[:os].empty? then + raise ArgumentError, "Usage: build-svr add-os -n -o " + end + + when "fullbuild" + if options[:name].nil? or options[:name].empty? then + raise ArgumentError, "Usage: build-svr fullbuild -n " + end + + when "register" + if options[:name].nil? or options[:name].empty? or + options[:package].nil? or options[:package].empty? then + raise ArgumentError, "Usage: build-svr register -n -P " + end + else + raise ArgumentError, "Input is incorrect : #{options[:cmd]}" + end +end def option_parse options = {} - banner = "Usage: build-svr {create|remove|start|build|help} ..." + "\n" \ - + "\t" + "build-svr create -n -u -d -i " + "\n" \ - + "\t" + "build-svr remove -n " + "\n" \ - + "\t" + "build-svr start -n [-p " + "\n" \ - + "\t" + "build-svr add -n [-d -p ]" + "\n" - - optparse = OptionParser.new do|opts| + banner = "Build-server administer service command-line tool." + "\n" \ + + "\n" + "Usage: build-svr [OPTS] or build-svr (-h|-v)" + "\n" \ + + "\n" + "Subcommands:" + "\n" \ + + "\t" + "create Create the build-server." + "\n" \ + + "\t" + "remove Remove the build-server." + "\n" \ + + "\t" + "start Start the build-server." + "\n" \ + + "\t" + "stop Stop the build-server." + "\n" \ + + "\t" + "upgrade Upgrade the build-server include friends." + "\n" \ + + "\t" + "add-svr Add remote build/package server for support multi-OS or distribute build job." + "\n" \ + + "\t" + "add-prj Register information for project what you want build berfore building a project." + "\n" \ + + "\t" + "register Register the package to the build-server." + "\n" \ + + "\t" + "fullbuild Build all your projects and upload them to package server." + "\n" \ + + "\n" + "Subcommand usage:" + "\n" \ + + "\t" + "build-svr create -n -u -d -t " + "\n" \ + + "\t" + "build-svr remove -n " + "\n" \ + + "\t" + "build-svr start -n -p " + "\n" \ + + "\t" + "build-svr stop -n " + "\n" \ + + "\t" + "build-svr upgrade -n " + "\n" \ + + "\t" + "build-svr add-svr -n (-d |-u ) [--proxy ]" + "\n" \ + + "\t" + "build-svr add-prj -n -N (-g -b |-P ) [-w ] [-o ]" + "\n" \ + + "\t" + "build-svr add-os -n -o " + "\n" \ + + "\t" + "build-svr register -n -P " + "\n" \ + + "\t" + "build-svr fullbuild -n " + "\n" \ + + "\n" + "Options:" + "\n" + + optparse = OptionParser.new(nil, 32, ' '*8) do|opts| # Set a banner, displayed at the top # of the help screen. opts.banner = banner - opts.on( '-n', '--name ', 'build server name' ) do|name| + opts.on( '-n', '--name ', 'build server name' ) do|name| options[:name] = name end - opts.on( '-u', '--url ', 'package server URL: http://xxx/yyy/zzz' ) do|url| + opts.on( '-u', '--url ', 'package server url: http://127.0.0.1/dibs/unstable' ) do|url| options[:url] = url end - opts.on( '-d', '--domain ', 'package svr or friend svr ip or ssh alias' ) do|domain| + options[:proxy] = nil + opts.on( '--proxy ', 'proxy url: http://172.21.111.100:2222' ) do|proxy| + options[:proxy] = proxy + end + + opts.on( '-d', '--address ', 'server address: 127.0.0.1:2224' ) do|domain| options[:domain] = domain end - opts.on( '-i', '--id ', 'package server id' ) do|pid| + options[:port] = 2222 + opts.on( '-p', '--port ', 'server port number: 2224' ) do|port| + options[:port] = port.strip.to_i + end + + opts.on( '-P', '--pkg ', 'package file path or name' ) do|package| + options[:package] = package.strip + end + + options[:os] = nil + opts.on( '-o', '--os ', 'ex) ubuntu-32,windows-32' ) do|os| + if not Utils.multi_argument_test( os, "," ) then + raise ArgumentError, "OS variable parsing error : #{os}" + end + options[:os] = os + end + + opts.on( '-N', '--pname ', 'project name' ) do|pid| options[:pid] = pid end - options[:port] = 2222 - opts.on( '-p', '--port ', 'port' ) do|port| - options[:port] = port.strip.to_i + opts.on( '-g', '--git ', 'git repository' ) do|git| + options[:git] = git + end + + opts.on( '-b', '--branch ', 'git branch' ) do|branch| + options[:branch] = branch + end + + #opts.on( '-r', '--remote ', 'remote server id' ) do|remote| + # options[:remote] = remote + #end + + options[:passwd] = "" + opts.on( '-w', '--passwd ', 'password for managing project' ) do|passwd| + options[:passwd] = passwd + end + + opts.on( '-t', '--ftp ', 'ftp server url: ftp://dibsftp:dibsftp@127.0.0.1:1024' ) do|domain| + options[:fdomain] = domain end opts.on( '-h', '--help', 'display this information' ) do - puts opts + opts.help.split("\n").each {|op| puts op if not op.include? "--CHILD"} + exit + end + + opts.on( '-v', '--version', 'display version' ) do + puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version() exit end + + opts.on( '-C', '--CHILD', 'child process' ) do + options[:child] = true + end end cmd = ARGV[0] - - if cmd.eql? "create" or cmd.eql? "remove" or cmd.eql? "start" or - cmd.eql? "stop" or cmd.eql? "add" or - cmd =~ /(help)|(-h)|(--help)/ then + if cmd.eql? "create" or cmd.eql? "remove" or + cmd.eql? "start" or cmd.eql? "upgrade" or + cmd.eql? "stop" or cmd.eql? "add-svr" or + cmd.eql? "add-prj" or cmd.eql? "add-os" or + cmd.eql? "fullbuild" or cmd.eql? "register" or + cmd =~ /(-v)|(--version)/ or + cmd =~ /(help)|(-h)|(--help)/ then if cmd.eql? "help" then ARGV[0] = "-h" @@ -83,10 +219,12 @@ def option_parse options[:cmd] = ARGV[0] else - raise ArgumentError, banner + raise ArgumentError, "Usage: build-svr [OPTS] or build-svr -h" end optparse.parse! + + option_error_check options return options end diff --git a/src/build_server/CommonProject.rb b/src/build_server/CommonProject.rb new file mode 100644 index 0000000..10caaaf --- /dev/null +++ b/src/build_server/CommonProject.rb @@ -0,0 +1,102 @@ +=begin + + CommonProject.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require 'fileutils' +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" + +class CommonProject + attr_accessor :name, :type, :passwd, :os_list + + # initialize + def initialize( name, type, server, os_list ) + @name = name + @type = type + @passwd = "" + @os_list = os_list + @server = server + @extra_infos = {} + read_ext_info + end + + + #return passwd + def is_passwd_set?() + return ( not @passwd.empty? ) + end + + + def passwd_match?(word) + if not is_passwd_set? then return true end + + if word.eql? @passwd then + return true + else + return false + end + end + + + def write_ext_info + # write to file + info_file = "#{@server.path}/projects/#{@name}/extra" + File.open( info_file, "w" ) do |f| + @extra_infos.each { |key,value| + f.puts "#{key} : #{value}" + } + end + end + + + # set extra info + def read_ext_info + info_file = "#{@server.path}/projects/#{@name}/extra" + if not File.exists? info_file then return end + File.open( info_file, "r" ) do |f| + while (not f.gets and line = f.gets.split(":")) + if not line[1].nil? then + @extra_infos[line[0].strip] = line[1].strip + end + end + end + end + + + def set_log_cnt( cnt ) + @extra_infos["Latest_log_count"] = cnt.to_s + end + + + def get_latest_log_cnt + result = @extra_infos["Latest_log_count"] + if not result.nil? then + return result.to_i + end + return nil + end +end diff --git a/src/build_server/FullBuildJob.rb b/src/build_server/FullBuildJob.rb new file mode 100644 index 0000000..5c83fee --- /dev/null +++ b/src/build_server/FullBuildJob.rb @@ -0,0 +1,255 @@ +=begin + + FullBuildJob.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require "fileutils" +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/builder" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server" +require "client.rb" +require "PackageManifest.rb" +require "Version.rb" +require "Builder.rb" +require "RemoteBuilder.rb" +require "BuildServer.rb" +require "JobLog.rb" +require "mail.rb" + +class FullBuildJob + + attr_accessor :id, :server, :pre_jobs, :os, :type + attr_accessor :status, :log, :source_path + attr_accessor :pkgsvr_client, :thread + attr_accessor :is_fullbuild_job + + # initialize + def initialize (server) + @server = server + @id = server.jobmgr.get_new_job_id() + @log = nil + @type = "FULLBUILD" + + @status = "JUST_CREATED" + @host_os = Utils::HOST_OS + @pkgserver_url = @server.pkgserver_url + @job_root = "#{@server.path}/jobs/#{@id}" + @source_path = @job_root+"/temp" + @job_working_dir=@job_root+"/works" + @buildroot_dir = "#{@job_root}/buildroot" + @pre_jobs = [] #pre-requisite jobs + + @is_fullbuild_job = false + end + + + # execute + def execute(sync=false) + @log.info( "Invoking a thread for FULL-BUILD Job #{@id}", Log::LV_USER) + if @status == "ERROR" then return end + @thread = Thread.new { + begin + thread_main() + terminate() + rescue => e + @log.error e.message + @log.error e.backtrace.inspect + end + } + + if sync then + @thread.join + end + + return true + end + + + # + def init + # mkdir + if not File.exist? @job_root then + FileUtils.mkdir_p @job_root + end + + # create logger + if @log.nil? then + @log = JobLog.new(self, nil ) + end + + @log.info( "Initializing job...", Log::LV_USER) + + # create dummy source path + if not File.exist? @source_path then + FileUtils.mkdir_p @source_path + end + + # set up pkgsvr_client + @pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log) + @pkgsvr_client.update + + return true + end + + + #terminate + def terminate() + # report error + if @status == "ERROR" then + @log.error( "Job is stopped by ERROR" , Log::LV_USER) + @server.cleaner.clean_afterwards(@id) + else + # clean up + @server.cleaner.clean(@id) + end + + # close logger + @log.close + end + + + #cancel + def cancel() + #TODO + end + + + # check building is possible + def can_be_built_on?(host_os) + return true + end + + + def has_build_dependency?(other_job) + return true + end + + + def has_same_packages?( wjob ) + return self.eql? wjob + end + + + def does_depend_on?( wjob ) + return true + end + + + def does_depended_by?( wjob ) + return true + end + + + def is_connected? + return true + end + + + # return the job is asyncronous job + def is_asynchronous_job? + return false + end + + # set logger + def set_logger( logger ) + @log = logger + end + + + # + # PROTECTED METHODS + # + protected + + + # main module + def thread_main + @log.info( "New Job #{@id} is started", Log::LV_USER) + + # check passwd + + # create sub jobs + build_jobs = [] + @server.prjmgr.projects.each do |prj| + if prj.type != "GIT" then next end + build_jobs += @server.prjmgr.create_new_jobs_for_all_os( prj.name ) + end + + # set full build job flag + build_jobs.each do |job| + job.is_fullbuild_job = true + job.set_parent_job( self ) + end + + # add all jobs to jobmanager + job_status_map = {} # for tracking job status changes + build_jobs.each do |job| + @server.jobmgr.add_internal_job( job ) + + @log.info( "Added new job \"#{job.id}\"(#{job.get_project().name}) for #{job.os}!", + Log::LV_USER) + + if not @server.job_log_url.empty? then + @log.info( " * Log URL : #{@server.job_log_url}/#{job.id}/log", Log::LV_USER) + end + + # set satus + job_status_map[job.id] = job.status + end + + # show job status changes + all_jobs_finished = false + error_exist = false + while not all_jobs_finished + all_jobs_finished = true + build_jobs.each do |job| + + # check status chanaged, if then print + if job_status_map[ job.id ] != job.status then + @log.info("Job #{job.id}(#{job.get_project().name},#{job.os}) is #{job.status}", Log::LV_USER) + job_status_map[ job.id ] = job.status + end + if job.status != "ERROR" and job.status != "FINISHED" then + all_jobs_finished = false + end + if job.status == "ERROR" then error_exist = true end + end + sleep 1 + end + + # check error + if error_exist then + @status = "ERROR" + return + end + + # INFO. don't change this string + @log.info( "Job is completed!", Log::LV_USER) + @status = "FINISHED" + end + +end diff --git a/src/build_server/GitBuildJob.rb b/src/build_server/GitBuildJob.rb index 75fc502..5b1b696 100644 --- a/src/build_server/GitBuildJob.rb +++ b/src/build_server/GitBuildJob.rb @@ -27,194 +27,159 @@ Contributors: =end require "fileutils" +require "thread" $LOAD_PATH.unshift File.dirname(__FILE__) $LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" require "BuildJob.rb" require "utils.rb" + class GitBuildJob < BuildJob - attr_accessor :id, :status, :pkginfo, :pkgsvr_client, :thread, :log, :rev_fail_list, :rev_success_list, :source_path # initialize - def initialize ( repos, commit, os, pkgsvr_url, options, server, parent, outstream, resolve) - super() - @rev_fail_list = [] - @rev_success_list = [] - @id = server.jobmgr.get_new_job_id() - @server = server - @parent = parent - @git_repos = repos - @git_commit = commit - @os = os - @host_os = Utils::HOST_OS - if not pkgsvr_url.nil? and not pkgsvr_url.empty? then - @pkgserver_url = pkgsvr_url - else - local_pkgsvr = @server.local_pkgsvr - @pkgserver_url = local_pkgsvr.location + "/" + local_pkgsvr.get_default_dist_name - end - @options = options - @resolve = resolve - @outstream = outstream - - @status = "JUST_CREATED" - @sub_jobs = [] - @job_root = "#{@server.path}/jobs/#{@id}" - @source_path = @job_root+"/temp" - @pkginfo = nil - @job_working_dir=@job_root+"/works" - - @thread = nil - - # mkdir - FileUtils.rm_rf "#{@server.path}/jobs/#{@id}" - FileUtils.mkdir_p "#{@server.path}/jobs/#{@id}" - - # create logger - @log = JobLog.new(self,"#{@server.path}/jobs/#{@id}/log", outstream ) + def initialize( project, os, server ) + super(server.jobmgr.get_new_job_id(), project, os, server) + @git_repos = project.repository + @git_branch = project.branch + @git_commit = nil end def terminate() - # report error if @status == "ERROR" then @log.error( "Job is stopped by ERROR" , Log::LV_USER) + @server.cleaner.clean_afterwards(@id) + elsif @status == "CANCELED" then + @log.error( "Job is CANCELED" , Log::LV_USER) + @server.cleaner.clean_afterwards(@id) else - # if succeeded, clean up - FileUtils.rm_rf "#{@source_path}" - FileUtils.rm_rf "#{@job_working_dir}" + @log.info( "Job is FINISHED successfully!" , Log::LV_USER) + + # if succeeded, register source info and copy pkginfo.manifest + @log.info( "Updating the source info for project \"#{@project.name}\"" , Log::LV_USER) + @project.add_source_info( @pkginfo.get_version(), @git_commit) + @project.copy_package_info( @pkginfo.get_version(), + "#{@source_path}/package/pkginfo.manifest") + @project.set_log_cnt( @log.cnt ) + @project.write_ext_info + + # clean up + @server.cleaner.clean(@id) + end + + # clean up builder directory if exist? + if Builder.exist? "JB#{@id}" then + Builder.remove("JB#{@id}") end # send mail if ( @server.send_mail.eql? "YES" ) and ( not @pkginfo.nil? ) and ( not @pkginfo.packages.nil? ) then mail_list = [] contents = [] - done_pkg_list = [] contents.push " " contents.push " Git information : #{@git_commit} " contents.push " " contents.push "%-30s| %10s | %10s" % ["package name", "version", "os"] contents.push "---------------------------------------------------------------" - for pkg in @pkginfo.packages + @pkginfo.packages.each do |pkg| if not pkg.os.eql? @os then next end mail_list = mail_list | Mail.parse_email( pkg.maintainer ) contents.push("%-30s| %10s | %10s" % [ pkg.package_name, pkg.version, pkg.os] ) end - - if @status == "ERROR" then - subject = "[DIBS] Build fail" + + if @status == "ERROR" then + subject = "[DIBS] Build fail" contents.push " " contents.push "check log file" contents.push "* Log : #{@server.job_log_url}/#{@id}/log" + elsif @status == "CANCELED" then + subject = "[DIBS] Build canceled" else - subject = "[DIBS] Build success" + subject = "[DIBS] Build success" end Mail.send_mail(mail_list, subject, contents.join("\n")) - end + end # close logger @log.close - - # send END signal , if connectionn is valid - if @status != "ERROR" and not @outstream.nil? then - BuildCommServer.send_end(@outstream) - end - - # close outstream - if not @outstream.nil? then - BuildCommServer.disconnect( @outstream ) - end end # verify - def pre_verify - @log.info( "Verifying job input...", Log::LV_USER) + def init + # mkdir job root + if not File.exist? @job_root then FileUtils.mkdir_p @job_root end - # git clone - if not git_cmd("clone #{@server.git_server_url}#{@git_repos} temp", @job_root) then - @log.error( "Failed on \"git clone #{@server.git_server_url}/#{@git_repos}\"", Log::LV_USER) - @status = "ERROR" - return false + # create logger + if @log.nil? then + @log = JobLog.new(self, nil ) end - # git reset - if not git_cmd("reset --hard #{@git_commit}", @source_path) then - @log.error( "Failed on \"git reset --hard #{@git_commit}\"", Log::LV_USER) - @status = "ERROR" - return false - end + @log.info( "Initializing job...", Log::LV_USER) - # check branch name if ALLOWED_GIT_BRANCH is not empty - if not @server.allowed_git_branch.empty? then - is_correct_branch = false - - # origin/{branch_name} - if @git_commit == "origin/#{@server.allowed_git_branch}" then - is_correct_branch = true - else - # get commit id - commit_id = "" - result_line = git_cmd_return("log -1",@source_path) - if result_line != nil then - result_line.each do |l| - if l.start_with?("commit ") then - commit_id = l.split(" ")[1].strip - end - end - end - - # check branch - if not commit_id.empty? and commit_id.length == 40 then - result_line = git_cmd_return("branch --contains=#{commit_id} -r", @source_path) - result_line.each do |l| - if l.include? "origin/#{@server.allowed_git_branch}" then - is_correct_branch = true - end - end - end - end + # if internal job, copy external_pkgs + if @is_internal_job then + @log.info( "Copying external dependent pkgs...", Log::LV_USER) + ext_pkgs_dir = "#{@job_root}/external_pkgs" - if not is_correct_branch then - @log.error( "Wrong branch is used! Check your commit-id again", Log::LV_USER) - @status = "ERROR" - return false + incoming_dir = "#{@server.transport_path}/#{@dock_num}" + if File.exist? incoming_dir then + FileUtils.mv "#{incoming_dir}", "#{ext_pkgs_dir}" end + + FileUtils.mkdir_p incoming_dir + end + + # download source code + @git_commit = @project.get_source_code(@git_repos, @git_branch, @git_commit, @source_path, @log) + if @git_commit.nil? then + @status = "ERROR" + return false end - # check pkginfo.manifest if not File.exist? "#{@source_path}/package/pkginfo.manifest" - @log.error( "package/pkginfo.manifest doest not exist", Log::LV_USER) + @log.error( "package/pkginfo.manifest does not exist", Log::LV_USER) @status = "ERROR" return false end # set up pkg info + begin @pkginfo = PackageManifest.new("#{@source_path}/package/pkginfo.manifest") + rescue => e + @log.error( e.message, Log::LV_USER) + return false + end # set up pkgsvr_client @pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log) - @pkgsvr_client.update - return true - end + # checking version if not reverse-build job or not internal-job + if not @is_rev_build_check_job and not @is_internal_job then + # check availabiltiy + if not @server.check_job_availability( self ) then + @log.error( "No servers that are able to build your packages.", Log::LV_USER) + @log.error( "Host-OS (#{@os}) is not supported in build server.", Log::LV_USER) + @status = "ERROR" + @server.log.info "Adding the job \"#{@id}\" is canceled" + return false + end + if not check_package_version(@git_commit) then + @status = "ERROR" + return false + end + end - def git_cmd(cmd, working_dir) - build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}" - ret = Utils.execute_shell_with_log(build_command,@log) - - return ret + return true end - def git_cmd_return(cmd, working_dir) - build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}" - ret = Utils.execute_shell_return(build_command) - - return ret + def set_git_commit( commit ) + @git_commit = commit end + end diff --git a/src/build_server/GitBuildProject.rb b/src/build_server/GitBuildProject.rb new file mode 100644 index 0000000..d06af21 --- /dev/null +++ b/src/build_server/GitBuildProject.rb @@ -0,0 +1,264 @@ +=begin + + GitProject.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require 'fileutils' +require "thread" +$LOAD_PATH.unshift File.dirname(__FILE__) +require "CommonProject.rb" +require "GitBuildJob.rb" +require "Version.rb" +require "PackageManifest.rb" + +# mutax for git operation +$git_mutex = Mutex.new + + +class GitBuildProject < CommonProject + attr_accessor :repository, :branch + + # initialize + def initialize( name, repos, branch, server, os_list ) + super(name, "GIT", server, os_list) + @repository = repos + @branch = branch + @source_infos = {} + @package_infos = {} + end + + + # get commid-id of specified package version + def get_commit_id( version ) + return get_source_info( version ) + end + + + # create new job + # if this project cannot support os, return nil + def create_new_job( os ) + if @os_list.include? os then + return GitBuildJob.new( self, os, @server ) + else + return nil + end + end + + + # create new job + def create_new_job_from_version( os, version=nil ) + new_job=create_new_job( os ) + + # set commit id + if version.nil? then + version = get_latest_version() + end + + commit = get_commit_id( version ) + if not commit.nil? then + new_job.set_git_commit( commit ) + end + + return new_job + end + + # get latest package version + def get_latest_version() + versions = @package_infos.keys + if not versions.empty? then + versions.sort! {|x,y| Version.new(x).compare(Version.new(y)) } + return versions[-1] + else + return nil + end + end + + + # get all package version + def get_all_versions() + return @package_infos.keys + end + + + # add source source info + def add_source_info( version, info ) + @source_infos[version] = info + + # write to file + sources_file = "#{@server.path}/projects/#{@name}/sources" + File.open( sources_file, "w" ) do |f| + @source_infos.each { |key,value| + f.puts "#{key},#{value}" + } + end + end + + + # get source info + def get_source_info( version ) + return @source_infos[version] + end + + + # add package info + def add_package_info( version, path ) + begin + pkginfo =PackageManifest.new(path) + rescue => e + puts e.message + return + end + @package_infos[version] = pkginfo + end + + + # get package info + def get_package_info( version ) + return @package_infos[version] + end + + + # copy package info + def copy_package_info(version, file_path) + # check pkginfo directory + pkginfo_dir = "#{@server.path}/projects/#{@name}/pkginfos" + if not File.exist? pkginfo_dir then + FileUtils.mkdir_p pkginfo_dir + end + + # copy + pkginfo_file = "#{pkginfo_dir}/#{version}.manifest" + FileUtils.cp(file_path, pkginfo_file) + + add_package_info(version, pkginfo_file) + end + + + def include_package?(name, version=nil, os=nil) + # check version first + if not version.nil? then + version = get_latest_version() + end + + if version.nil? or @package_infos[version].nil? then return false end + + # check supported os + if not os.nil? and not @os_list.include? os then return false end + + # check name and version + pkginfo=@package_infos[version] + pkg_list = os.nil? ? pkginfo.packages : pkginfo.get_target_packages(os) + pkg_list.each do |pkg| + if pkg.package_name.eql? name then return true end + end + + return false + end + + + # download source code to "source_path" and return its commit-id + def get_source_code( git_repos, git_branch, git_commit, source_path, log ) + $git_mutex.synchronize { + # check git directory + git_path = "#{@server.path}/projects/#{@name}/cache/git" + cache_path = "#{@server.path}/projects/#{@name}/cache" + if not File.exist? cache_path then + FileUtils.mkdir_p cache_path + end + + # check branch name + if File.exist? git_path then + current_branch = git_cmd_return( "branch", git_path).select{|x| x.start_with?("*")}[0].split(" ")[1].strip + if current_branch != git_branch then + log.warn( "Branch name is changed.", Log::LV_USER) + FileUtils.rm_rf git_path + end + end + + # git pull operation + if File.exist? git_path and not git_cmd("pull", git_path,log) then + log.warn( "Failed on \"git pull\"", Log::LV_USER) + FileUtils.rm_rf git_path + end + + # if no git, clone it + if not File.exist? git_path then + # if "git pull" failed, try to "git clone" + if not git_cmd("clone #{git_repos} git", cache_path, log) then + log.error( "Failed on \"git clone #{git_repos}\"", Log::LV_USER) + return nil + end + # git checkout + if not git_cmd("checkout #{git_branch}", git_path, log) then + log.error( "Failed on \"git checkout #{git_branch}\"", Log::LV_USER) + return nil + end + end + + if git_commit.nil? then + # get git commit-id + commit_id = "" + result_line = git_cmd_return("log -1", git_path) + if result_line != nil then + result_line.each do |l| + if l.start_with?("commit ") then + commit_id = l.split(" ")[1].strip + end + end + end + + git_commit = commit_id + else + # git reset + if not git_cmd("reset --hard #{git_commit}", git_path, log) then + log.error( "Failed on \"git reset --hard #{git_commit}\"", Log::LV_USER) + return nil + end + end + + # copy to source path + FileUtils.cp_r(git_path, source_path) + } + + return git_commit + end + + + def git_cmd(cmd, working_dir, log) + build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}" + ret = Utils.execute_shell_with_log(build_command,log) + + return ret + end + + + def git_cmd_return(cmd, working_dir) + build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}" + ret = Utils.execute_shell_return(build_command) + + return ret + end +end diff --git a/src/build_server/JobClean.rb b/src/build_server/JobClean.rb new file mode 100644 index 0000000..64265d7 --- /dev/null +++ b/src/build_server/JobClean.rb @@ -0,0 +1,192 @@ +=begin + + JobClean.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require "fileutils" +require "thread" +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +require "BuildServer.rb" +require "Action.rb" +require "ScheduledActionHandler.rb" + +$access_listfile = Mutex.new + +class JobCleanAction < Action + + def initialize( time, job_path, list_file, server ) + super(time,0) + + @job_path = job_path + @job_id = @job_path.split("/")[-1] + @list_file = list_file + @server = server + end + + + def init + $access_listfile.synchronize { + File.open(@list_file, "a") do |f| + f.puts "#{@job_id},#{time.year},#{time.month},#{time.day},#{time.hour},#{time.min},#{time.sec}" + end + } + end + + + def execute + # Start to clean job + @server.log.info "Executing clean action for the job #{@job_id}" + begin + execute_internal() + rescue => e + @server.log.error e.message + @server.log.error e.backtrace.inspect + end + end + + + private + def execute_internal() + # remove directories + if File.exist? "#{@job_path}/buildroot" then + FileUtils.rm_rf "#{@job_path}/buildroot" + end + if File.exist? "#{@job_path}/temp" then + FileUtils.rm_rf "#{@job_path}/temp" + end + if File.exist? "#{@job_path}/external_pkgs" then + FileUtils.rm_rf "#{@job_path}/external_pkgs" + end + + # remove line for the job + $access_listfile.synchronize { + lines = [] + # get all lines + if File.exist? @list_file then + File.open(@list_file,"r") do |f| + f.each_line do |l| + lines.push l + end + end + end + + # write the line except my job_id + File.open(@list_file,"w") do |f| + lines.each do |l| + if l.split(",")[0].eql? @job_id then next end + f.puts l + end + end + } + end +end + + +class JobCleaner + attr_accessor :quit + + # init + def initialize( server ) + @server = server + @handler = ScheduledActionHandler.new + @list_file = "#{BuildServer::CONFIG_ROOT}/#{@server.id}/clean" + end + + + # start thread + def start() + + list_file2 = "#{BuildServer::CONFIG_ROOT}/#{@server.id}/clean_backup" + jobs_path = "#{@server.path}/jobs" + if not File.exist? jobs_path then + FileUtils.mkdir_p jobs_path + end + + # read clean list + clean_list = [] + if File.exist? @list_file then + FileUtils.mv(@list_file,list_file2) + File.open(list_file2, "r") do |f| + f.each_line do |l| + id = l.split(",")[0] + year = l.split(",")[1] + month = l.split(",")[2] + day = l.split(",")[3] + hour = l.split(",")[4] + min = l.split(",")[5] + sec = l.split(",")[6] + + # create job and register + job_path = "#{jobs_path}/#{id}" + time = Time.mktime(year.to_i, month.to_i, day.to_i, hour.to_i, min.to_i, sec.to_i) + @server.log.info "Registered clean-action for the job in list : #{id}" + @handler.register(JobCleanAction.new(time,job_path,@list_file, @server)) + + # add clean list + clean_list.push id + end + end + end + + + # scan all jobs + Dir.new(jobs_path).entries.each do |id| + # skip . or .. + if id.eql? "." or id.eql? ".." then next end + + if not clean_list.include? id then + job_path = "#{jobs_path}/#{id}" + time = Time.now + @server.log.info "Registered clean-action for old job : #{id}" + @handler.register(JobCleanAction.new(time,job_path,@list_file, @server)) + end + end + + # start handler + @handler.start + end + + + # clean after some time + def clean_afterwards(job_id) + time = Time.now + @server.keep_time + job_path = "#{@server.path}/jobs/#{job_id}" + @handler.register(JobCleanAction.new(time, job_path, @list_file, @server)) + + @server.log.info "Registered delayed clean-action for the job #{job_id}" + end + + + # clean directly + def clean(job_id) + time = Time.now + job_path = "#{@server.path}/jobs/#{job_id}" + @handler.register(JobCleanAction.new(time, job_path, @list_file, @server)) + + @server.log.info "Registered clean-action for the job #{job_id}" + end +end diff --git a/src/build_server/JobLog.rb b/src/build_server/JobLog.rb index c46db48..e005207 100644 --- a/src/build_server/JobLog.rb +++ b/src/build_server/JobLog.rb @@ -34,13 +34,67 @@ require "BuildComm.rb" class JobLog < Log - def initialize(job, path, stream_out) - super(path) + def initialize(job, stream_out) + if job.nil? then + super(nil) + else + if not File.exist? "#{job.server.path}/jobs/#{job.id}" then + FileUtils.mkdir_p "#{job.server.path}/jobs/#{job.id}" + end + super("#{job.server.path}/jobs/#{job.id}/log") + end @parent_job=job @second_out = stream_out end + def set_second_out( out ) + @second_out = out + end + + + def init + # comm-begin + if not @second_out.nil? and not @second_out.closed? then + BuildCommServer.send_begin(@second_out) + end + end + + + def close + # close communication + if not @second_out.nil? then + begin + if not @second_out.closed? then + BuildCommServer.send_end(@second_out) + end + rescue + end + BuildCommServer.disconnect(@second_out) + end + + @second_out = nil + end + + + def is_connected? + if @second_out.nil? or @second_out.closed? then + return false + else + return true + end + end + + + def has_second_out? + if @second_out.nil? then + return false + else + return true + end + end + + protected # overide @@ -50,21 +104,17 @@ class JobLog < Log BuildCommServer.send( @second_out, msg ) end rescue - @parent_job.status="ERROR" - close() - error "Connection closed by remote client" + # close second_out + @second_out.close + @second_out = nil - # terminate job - @parent_job.terminate + error "Connection closed by remote client" - # exit thread if independent worker thread - if @parent_job.thread == Thread.current then - error "Thread wiil be terminated" - @parent_job.thread=nil - Thread.exit + # cancel parent job + if not @parent_job.nil? and @parent_job.cancel_state == "NONE" then + @parent_job.cancel_state = "INIT" end end end - end diff --git a/src/build_server/JobManager.rb b/src/build_server/JobManager.rb index 853c74c..e876dc1 100644 --- a/src/build_server/JobManager.rb +++ b/src/build_server/JobManager.rb @@ -27,46 +27,73 @@ Contributors: =end require 'fileutils' +require 'thread' $LOAD_PATH.unshift File.dirname(__FILE__) $LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server" require "SocketJobRequestListener.rb" require "RemoteBuildJob.rb" -require "LocalBuildJob.rb" +require "RegisterPackageJob.rb" require "packageServer.rb" + class JobManager - attr_accessor :max_working_jobs, :jobs + attr_accessor :max_working_jobs, :jobs, :internal_jobs, :reverse_build_jobs + attr_accessor :internal_job_schedule # initialize def initialize( parent ) @parent = parent @jobs = [] + @internal_jobs = [] + @reverse_build_jobs = [] @max_working_jobs=2 @new_job_index = 0 + @internal_job_schedule = Mutex.new + @latest_job_touch = Mutex.new + end + + + # initialize + def init() + # load latest job idx if exist + file_path = "#{BuildServer::CONFIG_ROOT}/#{@parent.id}/latest_job" + if File.exist? file_path then + latest_idx = -1 + File.open( file_path, "r" ) { |f| + f.each_line { |l| + latest_idx = l.strip.to_i + break + } + } + if latest_idx < 0 then latest_idx = -1 end + @new_job_index = latest_idx + 1 + else + @new_job_index = 0 + end end # get new id def get_new_job_id - # check file - server_dir = "#{BuildServer::CONFIG_ROOT}/#{@parent.id}" - if File.exist? "#{server_dir}/latest_job" then - f = File.open( "#{server_dir}/latest_job", "r" ) - @new_job_index = f.gets.strip.to_i + 1 - f.close - end - - # get new id - new_id = @new_job_index - - # save it - f = File.open( "#{server_dir}/latest_job", "w" ) - f.puts "#{new_id}" - f.close - - return new_id + new_idx = 0 + @latest_job_touch.synchronize { + new_idx = @new_job_index + + file_path = "#{BuildServer::CONFIG_ROOT}/#{@parent.id}/latest_job" + File.open( file_path, "w" ) { |f| + f.puts "#{@new_job_index}" + } + + @new_job_index += 1 + } + + return new_idx end + + def create_new_register_job( file_path ) + return RegisterPackageJob.new( file_path, nil, @parent ) + end # add a normal job def add_job ( new_job ) @@ -75,31 +102,51 @@ class JobManager @jobs.push( new_job ) end + # add internal job for multi-build job + def add_internal_job( new_job ) + @parent.log.info "Added new job \"#{new_job.id}\"" + @internal_jobs.push( new_job ) + end + + # add reverse build chek job + def add_reverse_build_job( new_job ) + @parent.log.info "Added new job \"#{new_job.id}\"" + @reverse_build_jobs.push( new_job ) + end + + # stop internal job selection + def stop_internal_job_schedule() + @internal_job_schedule.lock + end + + + # start internal job selection + def resume_internal_job_schedule() + @internal_job_schedule.unlock + end # intialize normal job def initialize_job ( job ) job.status = "INITIALIZING" Thread.new { - # pre-verifiy - if not job.pre_verify or job.status == "ERROR" then - job.status = "ERROR" - @parent.log.info "Adding the job \"#{job.id}\" is canceled" - job.terminate() - Thread.current.exit - end - - # check availabiltiy - if not @parent.check_job_availability( job ) then - job.log.error( "No servers that are able to build your packages.", Log::LV_USER) - job.status = "ERROR" - @parent.log.info "Adding the job \"#{job.id}\" is canceled" - job.terminate() - Thread.current.exit + begin + # init + if not job.init or job.status == "ERROR" then + if job.cancel_state == "NONE" then job.status = "ERROR" end + @parent.log.info "Adding the job \"#{job.id}\" is canceled" + job.terminate() + Thread.current.exit + end + if job.status != "FINISHED" then + job.status = "WAITING" + end + @parent.log.info "Checking the job \"#{job.id}\" was finished!" + rescue => e + @parent.log.error e.message + @parent.log.error e.backtrace.inspect end - - job.status = "WAITING" - @parent.log.info "Checking the job \"#{job.id}\" was finished!" } + @parent.log.info "Job \"#{job.id}\" entered INITIALIZING status" end @@ -117,7 +164,8 @@ class JobManager def execute_remote(job, rserver) # start build - if job.execute_remote( rserver) then + job.set_remote_job(rserver) + if job.execute() then # status change & job control job.status = "REMOTE_WORKING" @parent.log.info "Moved the job \"#{job.id}\" to remote job list" @@ -126,35 +174,119 @@ class JobManager end end + def cancel_job( job) + job.cancel_state = "WORKING" + @parent.log.info "Creating thread for canceling the job \"#{job.id}\"" + Thread.new { + begin + #terminate job thread + if not job.thread.nil? then + job.thread.terminate + job.thread = nil + end + + # job cacncel + job.cancel + + # cancel finished + job.status = "CANCELED" + + # call terminate process for job + job.terminate + rescue => e + @parent.log.error e.message + @parent.log.error e.backtrace.inspect + end + } + end # handle def handle() + # for cancel jobs + (@jobs + @internal_jobs + @reverse_build_jobs).select {|j| j.cancel_state == "INIT" }.each do |job| + cancel_job( job ) + end + + # for reverse build jobs + job_list = @reverse_build_jobs + job_list.each do |job| + # if "ERROR", "FINISHED", "CANCELED" remove it from list + if job.status == "ERROR" + @parent.log.info "Job \"#{job.id}\" is stopped by ERROR" + @reverse_build_jobs.delete job + elsif job.status == "FINISHED" + @parent.log.info "Job \"#{job.id}\" is removed by FINISH status" + @reverse_build_jobs.delete job + elsif job.status == "CANCELED" + @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status" + @reverse_build_jobs.delete job + end + + # if "JUST_CREATED", initialize it + if job.status == "JUST_CREATED" then + initialize_job( job ) + end + end + + # for internal jobs + job_list = @internal_jobs + job_list.each do |job| + # if "ERROR", "FINISHED", "CANCELED" remove it from list + if job.status == "ERROR" + @parent.log.info "Job \"#{job.id}\" is stopped by ERROR" + @internal_jobs.delete job + elsif job.status == "FINISHED" + @parent.log.info "Job \"#{job.id}\" is removed by FINISH status" + @internal_jobs.delete job + elsif job.status == "CANCELED" + @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status" + @internal_jobs.delete job + end - # if "ERROR", "FINISHED", remove it from list - for job in @jobs + # if "JUST_CREATED", initialize it + if job.status == "JUST_CREATED" then + initialize_job( job ) + end + end + + # for normal job + job_list = @jobs + job_list.each do |job| + # if "ERROR", "FINISHED", "CANCELED" remove it from list if job.status == "ERROR" @parent.log.info "Job \"#{job.id}\" is stopped by ERROR" @jobs.delete job elsif job.status == "FINISHED" + @parent.log.info "Job \"#{job.id}\" is removed by FINISH status" + @jobs.delete job + elsif job.status == "CANCELED" + @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status" @jobs.delete job end - end - # if "JUST_CREATED", initialize it - for job in @jobs - if job.status != "JUST_CREATED" then next end - initialize_job( job ) + # if "JUST_CREATED", initialize it + if job.status == "JUST_CREATED" then + initialize_job( job ) + end + + # check the connection if job is not asynchronous job + if ( job.status == "WAITING" or job.status == "REMOTE_WORKING" or job.status == "PENDING") and + not job.is_asynchronous_job? and + not job.is_connected? then + + job.status = "ERROR" + @jobs.delete( job ) + @parent.log.info "Job \"#{job.id}\" is disconnected by user. Removed!" + end end - # get available job + # reverse build job -> internal job -> normal job job = get_available_job # available job not exist?, continue if not job.nil? then # oherwise, check remote server rserver = @parent.get_available_server( job ) - - # request for build if rserver != nil and rserver == @parent then execute(job) elsif rserver != nil then @@ -164,92 +296,70 @@ class JobManager end end - # check the connection if job is not asynchronous job - for job in @jobs - if ( job.status == "WAITING" or job.status == "REMOTE_WORKING") and - not job.is_asynchronous_job? and - not job.is_connected? then - - @jobs.delete( job ) - @parent.log.info "Job \"#{job.id}\" is disconnected by user. Removed!" - end - end end - # select the job whith no build-dependency problem + # select the job whith no build-dependency problem def get_available_job - - # gather all working jobs - all_working_jobs = [] - for job in @jobs - if job.status == "WORKING" or job.status == "REMOTE_WORKING" then - all_working_jobs.push job + # select reverse build job with round-robin method + selected_job = nil + @reverse_build_jobs.each do |job| + if job.status == "WAITING" then + selected_job = job + break end end - - # for waiting jobs - for job in @jobs - if job.status != "WAITING" then next end - - blocked_by = [] - is_changed = false - - # should not have same packages with workings - # should not depend on workings - # should not be depended by workings - for wjob in all_working_jobs - if job.has_build_dependency?( wjob ) then - - # if there are some changes, check it - blocked_by.push wjob - if not job.blocked_by.include? wjob then is_changed = true end - end + # rotate array + if @reverse_build_jobs.count > 0 then + @reverse_build_jobs.push @reverse_build_jobs.shift + end + if not selected_job.nil? then return selected_job end + + # if no reverse build job exist! + @internal_job_schedule.synchronize { + # internal job first + ret = nil + if @internal_jobs.count > 0 then + ret = get_available_job_in_list(@internal_jobs, true) end - - # if available , then FIFO - if blocked_by.empty? then - job.blocked_by = [] - return job - else - # check count - if blocked_by.count != job.blocked_by.count then is_changed = true end - # if somthing changed, print it and save it - if is_changed then - job.log.info( "Waiting for finishing following jobs:", Log::LV_USER) - for bjob in blocked_by - job.log.info( " * #{bjob.id} #{bjob.pkginfo.packages[0].source}", Log::LV_USER) - end - job.blocked_by = blocked_by - end + # not found, select normal job + if ret.nil? then + ret = get_available_job_in_list(@jobs, false) end - end - - return nil + + return ret + } end # return "max_working_jobs_cnt - current_working_jobs_cnt" def get_number_of_empty_room working_cnt = 0 - for job in @jobs + parent_list = [] + (@jobs + @internal_jobs + @reverse_build_jobs).each do |job| if job.status == "WORKING" then working_cnt = working_cnt + 1 end + + # must exclude parent job + if not job.get_parent_job().nil? then + parent_list.push job.get_parent_job() + end end - return @max_working_jobs - working_cnt + parent_list.uniq! + + return @max_working_jobs - working_cnt + parent_list.count end # check there are working jobs def has_working_jobs - working_cnt = 0 - for job in @jobs + (@jobs + @internal_jobs + @reverse_build_jobs).each do |job| if job.status == "WORKING" then return true - end + end end return false @@ -258,11 +368,10 @@ class JobManager # check there are waiting jobs def has_waiting_jobs - waiting_cnt = 0 - for job in @jobs + (@jobs + @internal_jobs + @reverse_build_jobs).each do |job| if job.status == "WAITING" then return true - end + end end return false @@ -271,44 +380,124 @@ class JobManager def get_working_jobs result = [] - for job in @jobs + (@jobs + @internal_jobs + @reverse_build_jobs).each do |job| if job.status == "WORKING" then result.push job end end + return result end def get_waiting_jobs result = [] - for job in @jobs + (@jobs + @internal_jobs + @reverse_build_jobs).each do |job| if job.status == "WAITING" then result.push job end end + return result end def get_remote_jobs result = [] - for job in @jobs + (@jobs + @internal_jobs + @reverse_build_jobs).each do |job| if job.status == "REMOTE_WORKING" then result.push job end end + return result end def get_pending_jobs result = [] - for job in @jobs + @jobs.each do |job| if job.status == "PENDING" then result.push job end end return result end + + + protected + # select the job whith no build-dependency problem + # if "check_dep_wait" is true, it will check the build dependency + # among items of "WAIT" status in list + def get_available_job_in_list( jobs, check_dep_wait=false ) + + # gather all working jobs and full-build jobs + check_dep_jobs = [] + jobs.each do |job| + if job.cancel_state != "NONE" then next end + + if job.status == "WORKING" or job.status == "REMOTE_WORKING" or job.status == "PENDING" then + check_dep_jobs.push job + elsif ( check_dep_wait and job.status == "WAITING") then + check_dep_jobs.push job + end + end + + # for waiting jobs + jobs.each do |job| + if job.cancel_state != "NONE" then next end + if job.status != "WAITING" then next end + + # check build dependency against working job + pre_jobs = [] + check_dep_jobs.each do |cjob| + if job == cjob then next end + # In case that "WORKING/REMOTE_WORKING" job has build dependency on me + if (cjob.status == "WORKING" or cjob.status == "REMOTE_WORKING" ) and + (job.has_build_dependency?( cjob ) or job.is_compatible_with?( cjob)) then + pre_jobs.push cjob + # In case that "PENDING" job is depends on me (not depended ) + elsif cjob.status == "PENDING" and (not job.does_depend_on? cjob) and + (job.has_build_dependency?( cjob ) or job.is_compatible_with?( cjob)) then + pre_jobs.push cjob + elsif check_dep_wait and cjob.status == "WAITING" and + (job.does_depend_on? cjob or + (job.id > cjob.id and job.is_compatible_with? cjob) ) then + pre_jobs.push cjob + end + end + + # check pre-requisite jobs are changed, notify to user + is_changed = false + if pre_jobs.count != job.pre_jobs.count then + is_changed=true + else + pre_jobs.each do |pjob| + if not job.pre_jobs.include? pjob then + is_changed = true + break + end + end + end + if pre_jobs.count > 0 and is_changed then + job.log.info( "Waiting for finishing following jobs:", Log::LV_USER) + pre_jobs.each do |bjob| + if bjob.type == "BUILD" then + job.log.info( " * #{bjob.id} #{bjob.pkginfo.packages[0].source}", Log::LV_USER) + elsif bjob.type == "MULTIBUILD" then + job.log.info( " * #{bjob.id} (Multi Build Job)", Log::LV_USER) + end + end + end + job.pre_jobs = pre_jobs + + # no pre-requisite jobs, return its job + if job.pre_jobs.count == 0 then + return job + end + end + + return nil + end + end diff --git a/src/build_server/LocalBuildJob.rb b/src/build_server/LocalBuildJob.rb deleted file mode 100644 index e178616..0000000 --- a/src/build_server/LocalBuildJob.rb +++ /dev/null @@ -1,148 +0,0 @@ -=begin - - LocalBuildJob.rb - -Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. - -Contact: -Taejun Ha -Jiil Hyoun -Donghyuk Yang -DongHee Yang - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -Contributors: -- S-Core Co., Ltd -=end - -$LOAD_PATH.unshift File.dirname(__FILE__) -$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" -require "BuildJob.rb" -require "utils.rb" - -class LocalBuildJob < BuildJob - attr_accessor :id, :status, :pkginfo, :pkgsvr_client, :thread, :log, :rev_fail_list, :rev_success_list, :source_path - - # initialize - def initialize (local_path, os, pkgserver_url, options, server, parent, outstream, resolve ) - super() - @rev_fail_list = [] - @rev_success_list = [] - @id = server.jobmgr.get_new_job_id() - @server = server - @parent = parent - @local_path = local_path - @os = os - @host_os = Utils::HOST_OS - if not pkgserver_url.nil? then - @pkgserver_url = pkgsvr_url - else - local_pkgsvr = @server.local_pkgsvr - @pkgserver_url = local_pkgsvr.location + "/" + local_pkgsvr.get_default_dist_name - end - @options = options - @resolve = resolve - @outstream = outstream - - @status = "JUST_CREATED" - @sub_jobs = [] - @job_root = "#{@server.path}/jobs/#{@id}" - @source_path = @local_path - @pkginfo = nil - @pkgsvr_client = nil - @job_working_dir=@job_root+"/works" - - @thread = nil - - # mkdir - FileUtils.rm_rf "#{@server.path}/jobs/#{@id}" - FileUtils.mkdir_p "#{@server.path}/jobs/#{@id}" - - # create logger - @log = JobLog.new( self, "#{@server.path}/jobs/#{@id}/log", outstream ) - end - - - def terminate() - - # report error - if @status == "ERROR" then - @log.error( "Job is stopped by ERROR", Log::LV_USER) - else - # if succeeded, clean up - FileUtils.rm_rf "#{@job_working_dir}" - end - - # send mail - if ( @server.send_mail.eql? "YES" ) and ( not @pkginfo.nil? ) and ( not @pkginfo.packages.nil? ) then - mail_list = [] - contents = [] - done_pkg_list = [] - contents.push " " - contents.push "%-30s| %10s | %10s" % ["package name", "version", "os"] - contents.push "---------------------------------------------------------------" - for pkg in @pkginfo.packages - if not pkg.os.eql? @os then next end - mail_list = mail_list | Mail.parse_email( pkg.maintainer ) - contents.push("%-30s| %10s | %10s" % [ pkg.package_name, pkg.version, pkg.os] ) - end - - if @status == "ERROR" then - subject = "[DIBS] Build fail" - contents.push " " - contents.push "check log file" - contents.push "* Log : #{@server.job_log_url}/#{@id}/log" - else - subject = "[DIBS] Build success" - end - Mail.send_mail(mail_list, subject, contents.join("\n")) - end - - # close logger - @log.close - - # send END signal , if connectionn is valid - if @status != "ERROR" and not @outstream.nil? then - BuildCommServer.send_end(@outstream) - end - - # close outstream - if not @outstream.nil? then - BuildCommServer.disconnect( @outstream ) - end - end - - - # verify - def pre_verify - @log.info( "Verifying job input...", Log::LV_USER) - - # check pkginfo.manifest - if not File.exist? "#{@source_path}/package/pkginfo.manifest" - @log.error( "#{@source_path}/package/pkginfo.manifest doest not exist", Log::LV_USER) - @status = "ERROR" - return false - end - - # set pkginfo - @pkginfo = PackageManifest.new("#{@source_path}/package/pkginfo.manifest") - - # set up pkgsvr_client - @pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log) - @pkgsvr_client.update - - return true - end - -end diff --git a/src/build_server/MultiBuildJob.rb b/src/build_server/MultiBuildJob.rb new file mode 100644 index 0000000..9e28c9e --- /dev/null +++ b/src/build_server/MultiBuildJob.rb @@ -0,0 +1,489 @@ +=begin + + MultiBuildJob.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require "fileutils" +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/builder" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server" +require "client.rb" +require "PackageManifest.rb" +require "Version.rb" +require "Builder.rb" +require "RemoteBuilder.rb" +require "BuildServer.rb" +require "JobLog.rb" +require "mail.rb" + +class MultiBuildJob + + attr_accessor :id, :server, :pre_jobs, :os, :type + attr_accessor :status, :log, :source_path, :cancel_state + attr_accessor :pkgsvr_client, :thread, :sub_jobs + + # initialize + def initialize (server) + @server = server + @id = server.jobmgr.get_new_job_id() + @log = nil + @type = "MULTIBUILD" + @os = "Unknown" + + @status = "JUST_CREATED" + @host_os = Utils::HOST_OS + @pkgserver_url = @server.pkgserver_url + @job_root = "#{@server.path}/jobs/#{@id}" + @source_path = @job_root+"/temp" + @job_working_dir=@job_root+"/works" + @buildroot_dir = "#{@job_root}/buildroot" + @pre_jobs = [] #pre-requisite jobs + @cancel_state = "NONE" + + # children + @sub_jobs = [] + end + + + def get_buildroot() + return @buildroot_dir + end + + + def get_parent_job() + return nil + end + + + def is_rev_build_check_job() + return false + end + + # execute + def execute(sync=false) + @log.info( "Invoking a thread for MULTI-BUILD Job #{@id}", Log::LV_USER) + if @status == "ERROR" then return end + @thread = Thread.new { + begin + # main + thread_main() + + # close + terminate() + rescue => e + @log.error e.message + @log.error e.backtrace.inspect + end + } + + if sync then + @thread.join + end + + return true + end + + # cnacel + def cancel() + @sub_jobs.select{|x| x.cancel_state == "NONE"}.each do |sub| + sub.cancel_state = "INIT" + end + if not @log.nil? then + @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER) + end + end + + # + def init + # mkdir + if not File.exist? @job_root then + FileUtils.mkdir_p @job_root + end + + # create logger + if @log.nil? then + @log = JobLog.new(self, nil ) + end + + @log.info( "Initializing job...", Log::LV_USER) + + # create source path + if not File.exist? @source_path then + FileUtils.mkdir_p @source_path + end + + # initialize all sub jobs and add them to "internal_jobs" + @sub_jobs.each do |job| + # initialize job + if not job.init or job.status == "ERROR" then + job.status = "ERROR" + @log.info( "Failed to initialize sub-job \"#{job.get_project().name}\" for #{job.os}. (#{job.id})", Log::LV_USER) + job.terminate() + end + + if job.status != "ERROR" then + job.status = "WAITING" + else + job.status = "ERROR" + @status = "ERROR" + end + end + if @status == "ERROR" then + return false + end + + + # set up pkgsvr_client + @pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log) + + return true + end + + + #terminate + def terminate() + + # report error + if @status == "ERROR" then + # register delayed clean action for sub jobs + @sub_jobs.each do |job| + @server.cleaner.clean_afterwards(job.id) + end + + # register delayed clean action for me + @log.error( "Job is stopped by ERROR" , Log::LV_USER) + @server.cleaner.clean_afterwards(@id) + + elsif @status == "CANCELED" then + # register delayed clean action for sub jobs + @sub_jobs.each do |job| + @server.cleaner.clean_afterwards(job.id) + end + + # register delayed clean action for me + @log.error( "Job is stopped by CANCEL" , Log::LV_USER) + @server.cleaner.clean_afterwards(@id) + + else + # terminate all sub jobs + @sub_jobs.each do |job| + if not job.log.nil? then job.terminate() end + end + + # register direct clean action for me + @server.cleaner.clean(@id) + end + + # close logger + @log.close + end + + + def is_sub_job? + return false + end + + + def get_sub_jobs() + return @sub_jobs + end + + + # check building is possible + def can_be_built_on?(host_os) + return true + end + + + def get_packages() + packages = [] + @sub_jobs.each do |job| + packages = packages + job.get_packages() + end + packages.uniq! + + return packages + end + + + def get_build_dependencies(target_os) + deps = [] + @sub_jobs.each do |job| + deps = deps + job.get_build_dependencies(target_os) + end + deps.uniq! + + return deps + end + + + def get_source_dependencies(target_os, host_os) + deps = [] + @sub_jobs.each do |job| + deps = deps + job.get_source_dependencies(target_os,host_os) + end + deps.uniq! + + return deps + end + + + def is_compatible_with?(o) + return false + end + + def has_build_dependency?(other_job) + + if has_same_packages?(other_job) or + does_depend_on?(other_job) or + does_depended_by?(other_job) then + + return true + else + return false + end + end + + + def has_same_packages?( wjob ) + + # same package must have same os + if not @os.eql? wjob.os then + return false + end + + # check package name + get_packages.each do |pkg| + wjob.get_packages().each do |wpkg| + if pkg.package_name == wpkg.package_name then + #puts "Removed from candiated... A == B" + return true + end + end + end + + return false + end + + + def does_depend_on?( wjob ) + + # compare build dependency + get_build_dependencies(@os).each do |dep| + wjob.get_packages().each do |wpkg| + # dep packages of my job must have same name and target os + # with packages in working job + if dep.package_name == wpkg.package_name and + dep.target_os_list.include? wjob.os then + #puts "Removed from candiated... A -> B" + return true + end + end + end + + return false + end + + + def does_depended_by?( wjob ) + + get_packages().each do |pkg| + wjob.get_build_dependencies(wjob.os).each do |dep| + # dep package of working job must have same name and target os + # with packages in my job + if dep.package_name == pkg.package_name and + dep.target_os_list.include? @os then + #puts "Checking... A <- B" + return true + end + end + end + return false + end + + + def is_connected? + return true + end + + + # return the job is asyncronous job + def is_asynchronous_job? + return false + end + + # set logger + def set_logger( logger ) + @log = logger + end + + + # add sub job + def add_sub_job( job ) + @sub_jobs.push job + # this will make sub-job to share build-root of parent + job.set_parent_job( self ) + end + + + def progress + # do noting + return "" + end + + + def get_log_url() + # only when server support log url + if @server.job_log_url.empty? then + return "","" + end + + return "#{@server.job_log_url}/#{@id}/log","" + end + + # + # PROTECTED METHODS + # + protected + + + # main module + def thread_main + @log.info( "New Job #{@id} is started", Log::LV_USER) + + # initialize status map + job_status_map = {} + @sub_jobs.each do |job| + job_status_map[job.id] = job.status + end + + # add to internal job + @server.jobmgr.internal_job_schedule.synchronize { + @sub_jobs.each do |job| + # init finished, add internal_jobs + @server.jobmgr.add_internal_job(job) + @log.info( "Added new job \"#{job.get_project().name}\" for #{job.os}! (#{job.id})", + Log::LV_USER) + if not @server.job_log_url.empty? then + @log.info( " * Log URL : #{@server.job_log_url}/#{job.id}/log", Log::LV_USER) + end + end + } + + # show job status changes + all_jobs_finished = false + stop_status = "FINISHED" + while not all_jobs_finished + all_jobs_finished = true + @sub_jobs.each do |job| + # check status chanaged, if then print + if job_status_map[ job.id ] != job.status then + @log.info(" * Sub-Job \"#{job.get_project().name}(#{job.os})\" has entered \"#{job.status}\" state. (#{job.id})", Log::LV_USER) + job_status_map[ job.id ] = job.status + end + # check all jobs are finished + if job.status != "ERROR" and job.status != "FINISHED" and job.status != "CANCELED" then + all_jobs_finished = false + end + # check there is some error or cancel + if stop_status == "FINISHED" and + (job.status == "ERROR" or job.status == "CANCELED") then + # write url + write_log_url(job) + # cancel all other un-finished jobs + @sub_jobs.each do |sub| + if sub.status != "ERROR" and sub.status != "FINISHED" and + sub.status != "CANCELED" and sub.cancel_state == "NONE" then + @log.info(" * Sub-Job \"#{sub.get_project().name}(#{sub.os})\" has entered \"CANCELING\" state. (#{sub.id})", Log::LV_USER) + sub.cancel_state = "INIT" + end + end + + stop_status = job.status + break + end + end + + # + sleep 1 + end + + if stop_status == "ERROR" or stop_status == "CANCELED" then + @status = stop_status + return + end + + # upload + if not upload() then + @status = "ERROR" + return + end + + # INFO. don't change this string + @log.info( "Job is completed!", Log::LV_USER) + @status = "FINISHED" + end + + + def upload() + @log.info( "Uploading ...", Log::LV_USER) + + # get package path list + binpkg_path_list = Dir.glob("#{@source_path}/*_*_*.zip") + + # upload + u_client = Client.new( @server.pkgserver_url, nil, @log ) + snapshot = u_client.upload( @server.pkgserver_addr, @server.pkgserver_port, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd, binpkg_path_list) + + if snapshot.nil? then + @log.info( "Upload failed...", Log::LV_USER) + + return false + end + + # update local + @log.info( "Upload succeeded. Sync local pkg-server again...", Log::LV_USER) + @pkgsvr_client.update + @log.info("Snapshot: #{snapshot}", Log::LV_USER) + + return true + end + + + # write web url for log + private + def write_log_url(job) + url,remote_url = job.get_log_url() + if not url.empty? then + @log.info( " ** Log1: #{url}", Log::LV_USER) + end + if not remote_url.empty? then + @log.info( " ** Log2: #{remote_url}", Log::LV_USER) + end + end + +end diff --git a/src/build_server/PackageSync.rb b/src/build_server/PackageSync.rb new file mode 100644 index 0000000..f395856 --- /dev/null +++ b/src/build_server/PackageSync.rb @@ -0,0 +1,187 @@ +=begin + + PackageSync.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require "fileutils" +require "thread" +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +require "BuildServer.rb" +require "Action.rb" +require "ScheduledActionHandler.rb" + + +class PackageSyncAction < Action + @@new_id = 0 + + def initialize( time, url, proxy, server ) + super(time, server.pkg_sync_period) + my_id = @@new_id + @@new_id += 1 + @pkgsvr_url = url + @proxy = proxy + @server = server + @pkgsvr_client = nil + @main_client = nil + @sync_root = "#{@server.path}/sync/#{my_id}" + @download_path = "#{@sync_root}/remote" + @original_path = "#{@sync_root}/main" + end + + + def init + # create directory + if File.exist? @download_path then + FileUtils.rm_rf @download_path + FileUtils.rm_rf @original_path + else + FileUtils.mkdir_p @download_path + FileUtils.mkdir_p @original_path + end + + # create client + @pkgsvr_client = Client.new( @pkgsvr_url, @download_path, @server.log ) + @main_client = Client.new( @server.pkgserver_url, @original_path, @server.log ) + end + + + def execute + @server.log.info "Executing package-sync action for server \"#{@pkgsvr_url}\"" + + begin + execute_internal() + rescue => e + @server.log.error e.message + @server.log.error e.backtrace.inspect + end + end + + + private + def execute_internal() + # check update + pkgs = check_package_update + + # if updates are found, download them + downloaded_files = [] + pkgs.each { |pkg| + pkg_name=pkg[0]; os=pkg[1] + + files = @pkgsvr_client.download(pkg_name, os, false) + downloaded_files += files + } + + # request to register + registered_jobs = [] + downloaded_files.each { |file_path| + @server.log.info "Creating new job for registering \"#{file_path}\"" + new_job = @server.jobmgr.create_new_register_job( file_path ) + logger = JobLog.new( new_job, nil ) + new_job.set_logger(logger) + logger.init + + # add + @server.jobmgr.add_job( new_job ) + registered_jobs.push new_job + } + + # wait for finish all jobs + all_jobs_finished = false + while not all_jobs_finished + unfinished_jobs = registered_jobs.select { |j| + (j.status != "ERROR" and j.status != "FINISHED" and j.status != "CANCELED") + } + if unfinished_jobs.empty? then + all_jobs_finished = true + else + sleep 10 + end + end + + # remove files + downloaded_files.each { |file_path| + @server.log.info "Removed downloaded file: \"#{file_path}\"" + FileUtils.rm_rf file_path + } + end + + + protected + def check_package_update + pkgs = [] + + # update + @pkgsvr_client.update() + @main_client.update() + + # for all BINARY project + bin_prjs = @server.prjmgr.projects.select { |p| (p.type == "BINARY") } + bin_prjs.each { |p| + pkg_name = p.pkg_name + p.os_list.each { |os| + # get pkg version in server + main_ver = @main_client.get_attr_from_pkg(pkg_name, os, "version") + if main_ver.nil? then next end + remote_ver = @pkgsvr_client.get_attr_from_pkg(pkg_name, os, "version") + if remote_ver.nil? then next end + + if Version.new(main_ver) < Version.new(remote_ver) then + pkgs.push [pkg_name, os] + end + } + } + + return pkgs + end + +end + + +class PackageServerSynchronizer + attr_accessor :quit + + # init + def initialize( server ) + @server = server + @handler = ScheduledActionHandler.new + end + + + # start thread + def start() + + time = Time.new + 60 + @server.remote_pkg_servers.each { |entry| + url=entry[0]; proxy=entry[1] + @handler.register( PackageSyncAction.new(time, url, proxy, @server) ) + @server.log.info "Registered package-sync action for server \"#{url}\"" + } + + # start handler + @handler.start + end +end diff --git a/src/build_server/ProjectManager.rb b/src/build_server/ProjectManager.rb new file mode 100644 index 0000000..9d61df1 --- /dev/null +++ b/src/build_server/ProjectManager.rb @@ -0,0 +1,352 @@ +=begin + + ProjectManager.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require 'fileutils' +$LOAD_PATH.unshift File.dirname(__FILE__) +require "GitBuildProject.rb" +require "BinaryUploadProject.rb" +require "MultiBuildJob.rb" +require "PackageManifest.rb" +require "package.rb" + +class ProjectManager + attr_accessor :projects + + # initialize + def initialize( server ) + @server = server + @projects = [] + @project_root = "#{@server.path}/projects" + end + + + # load existing project from server configuration + def load() + # check project root + if not File.exist? @project_root then + FileUtils.mkdir_p @project_root + end + + # scan all projects + Dir.new(@project_root).entries.each do |name| + # skip . or .. + if name.eql? "." or name.eql? ".." then next end + + # create project + @server.log.info "Loading project : #{name}" + prj = load_project( name ) + if not prj.nil? then + @projects.push prj + end + end + + end + + + # get_project of the name + def get_project ( name ) + @projects.each do |prj| + if prj.name.eql? name then return prj end + end + + return nil + end + + + def add_git_project(name , repos, branch, passwd, os_list) + prj = get_project( name) + if not prj.nil? then return false end + + new_prj = GitBuildProject.new(name, repos, branch, @server, os_list) + if not passwd.nil? and not passwd.empty? then + new_prj.passwd = passwd + end + @projects.push new_prj + + # check project directory + if not File.exist? "#{@project_root}/#{name}" then + FileUtils.mkdir_p "#{@project_root}/#{name}" + end + + # write configuration + write_configuration(name, repos, branch, passwd, os_list) + + return true + end + + + def add_binary_project(name, pkg_name, passwd, os_list) + prj = get_project( name) + if not prj.nil? then return false end + + new_prj = BinaryUploadProject.new(name, pkg_name, @server, os_list) + if not passwd.nil? and not passwd.empty? then + new_prj.passwd = passwd + end + @projects.push new_prj + + # check project directory + if not File.exist? "#{@project_root}/#{name}" then + FileUtils.mkdir_p "#{@project_root}/#{name}" + end + + # write configuration + write_configuration_for_binary_project(name, pkg_name, passwd, os_list) + + return true + end + + + def add_remote_project( name, server_id) + end + + + # create new job for project + # if cannot create, return nil + def create_new_job( name, os ) + prj = get_project( name ) + if prj.nil? then return nil end + + return prj.create_new_job( os ) + end + + + # create new multi build job + def create_new_multi_build_job( sub_job_list ) + result = MultiBuildJob.new( @server ) + + sub_job_list.each do |job| + result.add_sub_job( job ) + end + + return result + end + + + # create new full job + def create_new_full_build_job( ) + # create multi job + result = MultiBuildJob.new( @server ) + + # create sub jobs + @projects.each do |prj| + if prj.type != "GIT" then next end + + prj.os_list.each do |os| + if not @server.supported_os_list.include? os then next end + + new_job = create_new_job( prj.name, os ) + if new_job.nil? then next end + + # This make project to build + # even though there is a package of same version on pkg-server + new_job.set_force_rebuild(true) + + # add to multi job + result.add_sub_job( new_job ) + end + end + + return result + end + + + # get project that includes specified pkg name and os + # will return [project,os,ver] list + def get_projects_from_pkgs(pkgs) + result = [] + @projects.each do |prj| + pkgs.each do |pkg| + name = pkg.package_name + ver = pkg.version + os = pkg.os + + # check project provide target package + if prj.include_package?(name, ver, os) then + result.push [prj, os, ver] + break + end + end + end + + return result + end + + + def get_project_from_package_name(pkg_name) + @projects.each do |prj| + # check project provide target package + if prj.include_package?(pkg_name) then + return prj + end + end + + return nil + end + + + # get project from git repository + def get_git_project( repos ) + @projects.each { |prj| + if prj.type == "GIT" and prj.repository == repos then + return prj + end + } + + return nil + end + + + def create_unnamed_git_project(repos) + name = "UNNAMED_PRJ_#{@projects.count}" + branch = "master" + passwd = nil + os_list = Utils.get_all_OSs() + # add + add_git_project(name , repos, branch, passwd, os_list) + # get + return get_project(name) + end + + protected + + # load and create project + def load_project(name) + + # check config file + config_file = "#{@project_root}/#{name}/build" + if not File.exist? config_file then return nil end + + # read configuration + type="GIT" + passwd="" + repos="none" + branch="master" + os_list = @server.supported_os_list + rserver_id=nil + pkg_name=nil + File.open( config_file, "r" ) do |f| + f.each_line do |l| + idx = l.index("=") + 1 + length = l.length - idx + + if l.start_with?("TYPE=") + type = l[idx,length].strip + elsif l.start_with?("PASSWD=") + passwd = l[idx,length].strip + elsif l.start_with?("GIT_REPOSITORY=") + repos = l[idx,length].strip + elsif l.start_with?("GIT_BRANCH=") + branch = l[idx,length].strip + elsif l.start_with?("OS_LIST=") + os_list = l[idx,length].strip.split(",") + elsif l.start_with?("REMOTE_SERVER_ID=") + rserver_id = l[idx,length].strip + elsif l.start_with?("PACKAGE_NAME=") + pkg_name = l[idx,length].strip + else + next + end + end + end + + # write back & create project + if type == "GIT" then + write_configuration(name, repos, branch, passwd, os_list) + new_project = GitBuildProject.new(name, repos, branch, @server, os_list) + + # read source info + sources_file = "#{@project_root}/#{name}/sources" + if File.exist? sources_file then + File.open(sources_file, "r") do |f| + f.each_line do |l| + version = l.split(",")[0].strip + info = l.split(",")[1].strip + + new_project.add_source_info( version, info ) + end + end + end + + # read pkginfo + pkginfo_dir = "#{@project_root}/#{name}/pkginfos" + if not File.exist? pkginfo_dir then FileUtils.mkdir_p pkginfo_dir end + Dir.new(pkginfo_dir).entries.each do |file| + if file.eql? "." or file.eql? ".." then next end + + vlen = file.length - ".manifest".length + version = file[0,vlen] + new_project.add_package_info( version, "#{pkginfo_dir}/#{file}" ) + end + + elsif type == "BINARY" then + write_configuration_for_binary_project(name, pkg_name, passwd, os_list) + new_project = BinaryUploadProject.new(name, pkg_name, @server, os_list) + end + + + # set passwd if exist + if not passwd.empty? then + new_project.passwd = passwd + end + + + return new_project + end + + + # write configuration + def write_configuration(name, repos, branch, passwd, os_list ) + config_file = "#{@project_root}/#{name}/build" + File.open( config_file, "w" ) do |f| + f.puts "TYPE=GIT" + if not passwd.nil? and not passwd.empty? then + f.puts "PASSWD=#{passwd}" + end + f.puts "GIT_REPOSITORY=#{repos}" + f.puts "GIT_BRANCH=#{branch}" + f.puts "OS_LIST=#{os_list.join(",")}" + end + end + + + # write configuration + def write_configuration_for_binary_project(name, pkg_name, passwd, os_list ) + config_file = "#{@project_root}/#{name}/build" + File.open( config_file, "w" ) do |f| + f.puts "TYPE=BINARY" + if not passwd.nil? and not passwd.empty? then + f.puts "PASSWD=#{passwd}" + end + f.puts "PACKAGE_NAME=#{pkg_name}" + f.puts "OS_LIST=#{os_list.join(",")}" + end + end + + +end diff --git a/src/build_server/RegisterPackageJob.rb b/src/build_server/RegisterPackageJob.rb new file mode 100644 index 0000000..0655ef0 --- /dev/null +++ b/src/build_server/RegisterPackageJob.rb @@ -0,0 +1,544 @@ +=begin + + RegisterBinaryJob.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require "fileutils" +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/builder" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server" +require "client.rb" +require "PackageManifest.rb" +require "Version.rb" +require "BuildServer.rb" +require "JobLog.rb" +require "mail.rb" +require "utils.rb" +require "ReverseBuildChecker.rb" + +class RegisterPackageJob + + attr_accessor :id, :server, :pre_jobs, :os, :type + attr_accessor :status, :log, :source_path + attr_accessor :pkgsvr_client, :thread, :pkg_type + attr_accessor :pkg_name, :pkginfo, :cancel_state + + + # initialize + def initialize( local_path, project, server, ftpurl=nil ) + @server = server + @id = server.jobmgr.get_new_job_id() + @log = nil + @type = "REGISTER" + + @status = "JUST_CREATED" + @host_os = Utils::HOST_OS + @pkgserver_url = @server.pkgserver_url + @job_root = "#{@server.path}/jobs/#{@id}" + @source_path = @job_root+"/temp" + @job_working_dir=@job_root+"/works" + @buildroot_dir = "#{@job_root}/buildroot" + @cancel_state = "NONE" + @pre_jobs = [] + + @local_path=local_path + @file_path = nil + @filename = File.basename(local_path) + if @filename =~ /.*_.*_.*\.zip/ then + @pkg_type = "BINARY" + new_name = @filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3') + @pkg_name = new_name.split(",")[0] + @pkg_version = new_name.split(",")[1] + @os = new_name.split(",")[2] + else + @pkg_type = "ARCHIVE" + @pkg_name = @filename + end + @pkginfo = nil #This info is valid only for BINARY package + @project = project + end + + + def is_sub_job? + return false + end + + + def get_project() + return @project + end + + + def get_buildroot() + return @buildroot_dir + end + + def get_parent_job() + return nil + end + + + def is_rev_build_check_job() + return false + end + + # execute + def execute(sync=false) + @log.info( "Invoking a thread for REGISTER Job #{@id}", Log::LV_USER) + if @status == "ERROR" then return end + @thread = Thread.new { + begin + thread_main() + terminate() + rescue => e + @log.error e.message + @log.error e.backtrace.inspect + end + } + + if sync then + @thread.join + end + + return true + end + + + # + def init + # mkdir + if not File.exist? @job_root then + FileUtils.mkdir_p @job_root + end + + if @cancel_state != "NONE" then return false end + + # create logger + if @log.nil? then + @log = JobLog.new(self, nil ) + end + + if @cancel_state != "NONE" then return false end + + @log.info( "Initializing job...", Log::LV_USER) + + # create dummy source path + if not File.exist? @source_path then + FileUtils.mkdir_p @source_path + end + + # copy package file to source path + @file_path = "#{@source_path}/#{File.basename(@local_path)}" + if not File.exist? @local_path then + @log.error( "File not found!", Log::LV_USER) + @status = "ERROR" + return false + else + if not @project.nil? then + # if remote upload remove file and its directory + FileUtils.mv(@local_path, @file_path) + FileUtils.rm_rf("#{File.dirname(@local_path)}") + else + FileUtils.cp(@local_path, @file_path) + end + end + + if @cancel_state != "NONE" then return false end + + # set up pkgsvr_client + @pkgsvr_client = Client.new(@pkgserver_url, @job_working_dir, @log) + + if @cancel_state != "NONE" then return false end + + # check if the os is supported by build server + if @pkg_type == "BINARY" and + not @server.supported_os_list.include? @os then + @log.error( "Unsupported OS \"#{@os}\" is used!", Log::LV_USER) + @status = "ERROR" + return false + end + + if @cancel_state != "NONE" then return false end + + # checking version if not reverse-build job + if @pkg_type == "BINARY" then + # extrac pkg file + cmd = "cd \"#{@source_path}\";unzip #{@file_path}" + if not Utils.execute_shell(cmd) then + @log.error( "Extracting package file failed!", Log::LV_USER) + @status = "ERROR" + return false + end + + if @cancel_state != "NONE" then return false end + + # set up pkg info + begin + @pkginfo = PackageManifest.new("#{@source_path}/pkginfo.manifest") + rescue => e + @log.error( e.message, Log::LV_USER) + @status = "ERROR" + return false + end + + if @cancel_state != "NONE" then return false end + + if not check_package_version() then + @status = "ERROR" + return false + end + end + + if @cancel_state != "NONE" then return false end + + return true + end + + + #terminate + def terminate() + # report error + if @status == "ERROR" then + @log.error( "Job is stopped by ERROR" , Log::LV_USER) + @server.cleaner.clean_afterwards(@id) + else + # clean up + @server.cleaner.clean(@id) + if not @project.nil? then + @project.set_log_cnt( @log.cnt ) + @project.write_ext_info + end + end + + # close logger + @log.close + end + + + #cancel + def cancel() + if not @log.nil? then + @log.info( "JOB is canceled by cancel operation !!", Log::LV_USER) + end + end + + + # check building is possible + def can_be_built_on?(host_os) + return true + end + + + def get_packages() + if @pkg_type == "BINARY" then + return @pkginfo.packages + else + return [] + end + end + + + def get_build_dependencies(target_os) + return [] + end + + + def get_source_dependencies(target_os,host_os) + return [] + end + + + def is_compatible_with?(o) + return false + end + + + def has_build_dependency?(other_job) + if has_same_packages?(other_job) or + does_depended_by?(other_job) then + + return true + else + return false + end + end + + + def has_same_packages?( wjob ) + if @type != wjob.type then return false end + + case @pkg_type + when "BINARY" + if @pkg_name == wjob.pkg_name and + @os == wjob.os then + return true + end + when "ARCHIVE" + if @pkg_name == wjob.pkg_name then return true end + end + + return false + end + + + # binary/archive package should not have build-dependencies + def does_depend_on?( wjob ) + return false + end + + + def does_depended_by?( wjob ) + if @pkg_type == "BINARY" then + wjob.get_build_dependencies(wjob.os).each do |dep| + # dep package of working job must have same name and target os + # with packages in my job + if dep.package_name == @pkg_name and + dep.target_os_list.include? @os then + #puts "Checking... A <- B" + return true + end + end + else + wjob.get_source_dependencies(wjob.os,@host_os).each do |dep| + if dep.package_name == @pkg_name then + return true + end + end + end + + return false + end + + + def is_connected? + return true + end + + + # return the job is asyncronous job + def is_asynchronous_job? + return false + end + + # set logger + def set_logger( logger ) + @log = logger + end + + + def progress + if not @log.nil? then + if @project.nil? or @project.get_latest_log_cnt.nil? then + return "--% (#{log.cnt.to_s} lines) " + else + return ( ( @log.cnt * 100 ) / @project.get_latest_log_cnt ).to_s + "%" + end + end + # if log is nil then can't figure progress out + return "" + end + + + def get_log_url() + # only when server support log url + if @server.job_log_url.empty? then + return "","" + end + + return "#{@server.job_log_url}/#{@id}/log","" + end + + # + # PROTECTED METHODS + # + protected + + + # main module + def thread_main + @log.info( "New Job #{@id} is started", Log::LV_USER) + + # clean build + if not ReverseBuildChecker.check( self, true ).empty? then + @status = "ERROR" + @log.error( "Reverse-build-check failed!" ) + return + end + + # if this package has compatible OS, check + if @pkg_type == "BINARY" and + @pkginfo.packages[0].os_list.count > 1 then + + pkg = @pkginfo.packages[0] + pkg.os_list.each do |os| + if @os == os then next end + + # skip when the os does not exist in project's supported os list + if not @project.nil? and not @project.os_list.include? os then next end + + # skip when there is higher version of the package + ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version") + if not ver_svr.nil? and + Version.new(@pkg_version) <= Version.new(ver_svr) then next end + + # make new package file for compatible OS + newfile = "#{@pkg_name}_#{@pkg_version}_#{os}.zip" + @log.info( "Copying #{@filename} to #{newfile}" ) + FileUtils.cp(@file_path,"#{@source_path}/#{newfile}") + + # reverse check + if not ReverseBuildChecker.check( self, true, os ) then + @status = "ERROR" + @log.error( "Reverse-build-check failed!" ) + return + end + end + end + + # upload + if not upload() then + @status = "ERROR" + return + end + + # INFO. don't change this string + @log.info( "Job is completed!", Log::LV_USER) + @status = "FINISHED" + end + + + # build projects that dependes on me + # can ignore some projects + def check_reverse_build( target_os ) + @log.info( "Checking reverse build dependency ...", Log::LV_USER) + + # get reverse-dependent projects + rev_pkgs = [] + if @pkg_type == "BINARY" then + rev_pkgs += @pkgsvr_client.get_reverse_build_dependent_packages(@pkg_name, target_os) + else + rev_pkgs += @pkgsvr_client.get_reverse_source_dependent_packages(@pkg_name) + end + + rev_projects = @server.prjmgr.get_projects_from_pkgs(rev_pkgs) + + # create reverse build job + rev_build_jobs = [] + rev_projects.each do |p| + prj = p[0] + os = p[1] + version = p[2] + + if prj.type != "GIT" then next end + + # create sub jobs for checking + new_job = prj.create_new_job_from_version(os, version) + new_job.set_rev_build_check_job(self) + + rev_build_jobs.push new_job + end + + # reverse build + if rev_build_jobs.count > 0 then + rev_prjs_txt = rev_build_jobs.map {|j| "#{j.get_project().name}(#{j.os})"}.join(", ") + @log.info( " * Will check reverse-build for next projects: #{rev_prjs_txt}", Log::LV_USER) + end + rev_build_jobs.each do |new_job| + @log.info( " * Checking reverse-build ... #{new_job.get_project().name}(#{new_job.id})", Log::LV_USER) + # job init + result = new_job.init() + # if init is succeeded!, try to execute + if result then + # check available server + rserver = @server.get_available_server( new_job ) + if rserver != nil and rserver != @server then + new_job.set_remote_job( rserver ) + end + # execute + new_job.execute(true) + if new_job.status == "ERROR" then result = false end + end + + # check result + if not result then + return false + end + end + + return true + end + + + def upload() + @log.info( "Uploading ...", Log::LV_USER) + + # get package path list + if @pkg_type == "ARCHIVE" then + binpkg_path_list = Dir.glob("#{@source_path}/#{@pkg_name}") + else + binpkg_path_list = Dir.glob("#{@source_path}/*_*_*.zip") + end + + # upload + u_client = Client.new( @server.pkgserver_url, nil, @log ) + snapshot = u_client.upload( @server.pkgserver_addr, @server.pkgserver_port, @server.ftp_addr, @server.ftp_port, @server.ftp_username, @server.ftp_passwd, binpkg_path_list) + + if snapshot.nil? then + @log.info( "Upload failed...", Log::LV_USER) + + return false + end + + # update local + @log.info( "Upload succeeded. Sync local pkg-server again...", Log::LV_USER) + @pkgsvr_client.update + @log.info("Snapshot: #{snapshot}", Log::LV_USER) + + return true + end + + + # check if local package version is greater than server + def check_package_version() + @log.info( "Checking package version ...", Log::LV_USER) + + # package update + @pkgsvr_client.update + + @pkginfo.packages.each do |pkg| + ver_local = pkg.version + #ver_svr = @pkgsvr_client.get_package_version( pkg.package_name, @os ) + ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version") + if not ver_svr.nil? and Version.new(ver_local) <= Version.new(ver_svr) then + @log.error( "Version must be increased : #{ver_local} <= #{ver_svr}", Log::LV_USER) + return false + end + end + + return true + end +end diff --git a/src/build_server/RemoteBuildJob.rb b/src/build_server/RemoteBuildJob.rb index 8e9355b..845a0c8 100644 --- a/src/build_server/RemoteBuildJob.rb +++ b/src/build_server/RemoteBuildJob.rb @@ -1,5 +1,5 @@ =begin - + RemoteBuildJob.rb Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. @@ -35,10 +35,9 @@ class RemoteBuildJob < BuildJob attr_accessor :id # initialize - def initialize (id) - super() + def initialize (id,server) + super(id,nil,nil,server) @id = id @type = nil - @outstream = nil end end diff --git a/src/build_server/RemoteBuildServer.rb b/src/build_server/RemoteBuildServer.rb index 07f4fa5..8c71460 100644 --- a/src/build_server/RemoteBuildServer.rb +++ b/src/build_server/RemoteBuildServer.rb @@ -30,13 +30,15 @@ require 'fileutils' $LOAD_PATH.unshift File.dirname(__FILE__) require "RemoteBuildJob.rb" require "BuildComm.rb" +require 'thread' class RemoteBuildServer attr_accessor :ip, :port, :status, :host_os attr_accessor :max_working_jobs, :working_jobs, :waiting_jobs + attr_accessor :pkgserver_url, :path # initialize - def initialize(ip, port) + def initialize(ip, port, parent) @ip = ip @port = port @status = "DISCONNECTED" @@ -44,6 +46,10 @@ class RemoteBuildServer @max_working_jobs = 2 @working_jobs = [] @waiting_jobs = [] + @pkgserver_url = parent.pkgserver_url + @path = "" + @file_transfer_cnt_mutex = Mutex.new + @file_transfer_cnt = 0 end @@ -63,10 +69,10 @@ class RemoteBuildServer def update_state # send - @status = "DISCONNECTED" + #@status = "DISCONNECTED" client = BuildCommClient.create( @ip, @port ) if client.nil? then return end - if client.send("QUERY,SYSTEM") then + if client.send("QUERY|SYSTEM") then result = client.read_lines do |l| tok = l.split(",").map { |x| x.strip } @host_os = tok[0] @@ -83,15 +89,15 @@ class RemoteBuildServer @waiting_jobs = [] client = BuildCommClient.create( @ip, @port ) if client.nil? then return end - if client.send("QUERY,JOB") then + if client.send("QUERY|JOB") then result = client.read_lines do |l| tok = l.split(",").map { |x| x.strip } job_status = tok[0] job_id = tok[1] - new_job = RemoteBuildJob.new(job_id) + new_job = RemoteBuildJob.new(job_id,self) case job_status - when "WAITING" + when "WAITING", "JUST_CREATED", "INITIALIZING" @waiting_jobs.push new_job when "WORKING" @working_jobs.push new_job @@ -123,5 +129,22 @@ class RemoteBuildServer def has_waiting_jobs return (@waiting_jobs.count > 0) end + + + def add_file_transfer() + @file_transfer_cnt_mutex.synchronize { + @file_transfer_cnt += 1 + } + end + + def remove_file_transfer() + @file_transfer_cnt_mutex.synchronize { + @file_transfer_cnt -= 1 + } + end + + def get_file_transfer_cnt() + return @file_transfer_cnt + end end diff --git a/src/build_server/RemoteBuilder.rb b/src/build_server/RemoteBuilder.rb new file mode 100644 index 0000000..0a1ea23 --- /dev/null +++ b/src/build_server/RemoteBuilder.rb @@ -0,0 +1,210 @@ +=begin + + RemoteBuilder.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server" +require "utils" +require "PackageManifest" +require "log" + +class RemoteBuilder + attr_accessor :id, :log + + # initialize + def initialize( id, server,ftp_addr, ftp_port, ftp_username, ftp_passwd) + @id = id + @server = server + @addr = server.ip + @port = server.port + @ftp_addr = ftp_addr + @ftp_port = ftp_port + @ftp_username = ftp_username + @ftp_passwd = ftp_passwd + @log = Log.new(nil) + end + + + # build + def build( git_repos, source_path, os, is_rev_build, srcinfo, no_reverse, local_pkgs ) + @log.info( "Start to build on remote server...", Log::LV_USER ) + + # create unique dock number + dock = Utils.create_uniq_name() + + # send local packages + begin + @server.add_file_transfer() + local_pkgs.each do |pkg_path| + @log.info( "Sending file... : #{pkg_path}", Log::LV_USER ) + result = send_file_to_remote( pkg_path, dock ) + if not result then + @log.error( "File transfering failed!", Log::LV_USER ) + @server.remove_file_transfer() + return false + end + end + ensure + @server.remove_file_transfer() + end + + # send build request + @log.info( "Sending build request to remote server...", Log::LV_USER ) + result, result_files = send_build_request(git_repos, os, + is_rev_build, srcinfo, no_reverse, local_pkgs, dock) + + @log.info( "Receiving log file from remote server...", Log::LV_USER ) + if not receive_file_from_remote( "#{source_path}/../remote_log", dock ) then + @log.warn( "File transfering failed! : remote_log", Log::LV_USER ) + end + + if not result then + @log.error( "Building job on remote server failed!", Log::LV_USER ) + return false + end + + # receive binary package + result_files.each do |file_name| + @log.info( "Receiving file from remote server : #{file_name}", Log::LV_USER ) + result = receive_file_from_remote( "#{source_path}/#{file_name}", dock ) + if not result then + @log.error( "File transfering failed! : #{file_name}", Log::LV_USER ) + return false + end + end + + return true + end + + + # upload binary packages that is need to be overwrite + # before remote package + protected + def send_file_to_remote(file_path, dock = "0") + # create client + client = BuildCommClient.create( @addr, @port, @log ) + if client.nil? then + @log.error( "Creating communication client failed!", Log::LV_USER) + return false + end + + # upload file + result = true + file_name = file_path.split("/")[-1] + msg = "UPLOAD|#{dock}" + if client.send( msg ) then + result=client.send_file( @ftp_addr, @ftp_port, @ftp_username, @ftp_passwd, file_path ) + if not result then + @log.error( "File uploading failed...#{file_name}", Log::LV_USER) + end + end + + #close connections + client.terminate + + return result + end + + + # send build request + protected + def send_build_request(git_repos, os, is_rev_build, commit, no_reverse, local_pkgs, dock = "0") + result_files = [] + + client = BuildCommClient.create( @addr, @port, @log ) + if client.nil? then + @log.error( "Creating communication client failed!", Log::LV_USER) + return false, result_files + end + + # get local package names + local_pkg_names = local_pkgs.map { |path| File.basename(path) } + + # send + # format: BUILD|GIT|repository|passwd|os|async|no_reverse|internal|rev-build|commit|pkgs|dock_num + # value : BUILD|GIT|repository| |os|NO |no_reverse|YES |rev-build|commit|pkgs|dock_num + result = true + commit = commit.nil? ? "":commit + pkg_list = local_pkg_names.join(",") + rev = is_rev_build ? "YES":"NO" + msg = "BUILD|GIT|#{git_repos}||#{os}|NO|#{no_reverse}|YES|#{rev}|#{commit}|#{pkg_list}|#{dock}" + if client.send( msg ) then + result = client.read_lines do |l| + # write log first + @log.output( l.strip, Log::LV_USER) + + # check build result + if l.include? "Job is stopped by ERROR" or + l.include? "Error:" then + result = false + break + end + + # gather result files if not reverse build + if not is_rev_build and l =~ /Creating package file \.\.\. (.*)/ then + file_name = $1 + result_files.push file_name + end + + end + end + + # close socket + client.terminate + + return result, result_files + end + + + # receive binary package of remote server + protected + def receive_file_from_remote(file_path, dock = "0") + # create client + client = BuildCommClient.create( @addr, @port, @log ) + if client.nil? then + @log.error( "Creating communication client failed!", Log::LV_USER) + return false + end + + # download file + result = true + file_name = file_path.split("/")[-1] + msg = "DOWNLOAD|#{dock}|#{file_name}" + if client.send( msg ) then + result=client.receive_file( @ftp_addr, @ftp_port, @ftp_username, @ftp_passwd, file_path ) + if not result then + @log.error( "File downloading failed...#{file_name}", Log::LV_USER) + end + end + + #close connections + client.terminate + + return result + end +end diff --git a/src/build_server/ReverseBuildChecker.rb b/src/build_server/ReverseBuildChecker.rb new file mode 100644 index 0000000..7ae943e --- /dev/null +++ b/src/build_server/ReverseBuildChecker.rb @@ -0,0 +1,218 @@ +=begin + + ReverseBuildChecker.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require "log" +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/pkg_server" +require "utils.rb" +require "client.rb" +require "BuildServer.rb" +require "JobLog.rb" +require "PackageManifest.rb" +require "BuildJob.rb" +require "RegisterPackageJob.rb" + +class ReverseBuildChecker + + # check + def ReverseBuildChecker.check( job, exit_on_error, override_os = nil ) + log = job.log + job_os = (override_os.nil?) ? job.os : override_os + + # start + log.info( "Checking reverse build dependency ...", Log::LV_USER) + + # get target packages that be checked + bin_pkg_name_list = [] + src_pkg_name_list = [] + case job.type + when "BUILD" + job.pkginfo.get_target_packages(job_os).each do |pkg| + bin_pkg_name_list.push pkg.package_name + end + when "REGISTER" + if job.pkg_type == "BINARY" then + bin_pkg_name_list.push job.pkg_name + else + src_pkg_name_list.push job.pkg_name + end + end + + # get reverse projects from build dependency + rev_pkgs = [] + bin_pkg_name_list.each do |pkg_name| + rev_pkgs += job.pkgsvr_client.get_reverse_build_dependent_packages(pkg_name, job_os) + end + src_pkg_name_list.each do |pkg_name| + rev_pkgs += job.pkgsvr_client.get_reverse_source_dependent_packages(pkg_name) + end + rev_pkgs.uniq! + rev_projects = job.server.prjmgr.get_projects_from_pkgs(rev_pkgs) + + # create reverse build job + rev_build_jobs = [] + rev_projects.each do |p| + rev_prj = p[0] + rev_os = p[1] + rev_ver = p[2] + + # if not "GIT" project, ignore it + if rev_prj.type != "GIT" then next end + + # if job on resolve process, its unresolved project + #of pending ancestor must be excluded. + if job.type == "BUILD" and not job.pending_ancestor.nil? then + found = false + job.pending_ancestor.rev_fail_projects.each { |fp| + f_prj = fp[0] + f_os = fp[1] + + if rev_prj == f_prj and rev_os == f_os then + found = true + break + end + } + if found then next end + end + + # if this is sub job, all other sibling job must be excluded + if job.is_sub_job? then + job.get_parent_job().get_sub_jobs.each do |sub_job| + sub_prj = sub_job.get_project() + sub_os = sub_job.os + if rev_prj == sub_prj and rev_os == sub_os then + found = true + break + end + end + if found then next end + end + + # create job + new_job = rev_prj.create_new_job_from_version( rev_os, rev_ver ) + new_job.set_rev_build_check_job( job ) + + rev_build_jobs.push new_job + end + + # reverse build + if rev_build_jobs.count > 0 then + rev_prjs_msg = rev_build_jobs.map {|j| "#{j.get_project().name}(#{j.os})"}.join(", ") + log.info( " * Will check reverse-build for projects: #{rev_prjs_msg}", Log::LV_USER) + end + + # for all reverse job + rev_build_jobs.each do |rev_job| + # add to job manager + job.server.jobmgr.add_reverse_build_job(rev_job) + log.info( " * Added new job for reverse-build ... \ + #{rev_job.get_project().name}(#{rev_job.os}) (#{rev_job.id})", Log::LV_USER) + end + + # wait for job finish + rev_build_finished = false + success_list = [] + failure_list = [] + cancel_other_jobs = false + while not rev_build_finished + rev_build_finished = true + rev_build_jobs.each do |rev_job| + rev_prj = rev_job.get_project() + rev_os = rev_job.os + + case rev_job.status + when "ERROR", "CANCELED" + # add fail list + if not is_project_included?(failure_list, rev_prj, rev_os) then + log.info( " * Reverse-build FAIL ... #{rev_prj.name}(#{rev_os}) (#{rev_job.id})", Log::LV_USER) + failure_list.push [ rev_prj, rev_os ] + write_log_url(log, rev_job) + end + + # if "exist on error" cancel all other jobs + if exit_on_error then + cancel_other_jobs = true + rev_build_jobs.each do |j| + if j.status != "ERROR" and j.status != "FINISHED" and + j.status != "CANCELED" and j.cancel_state == "NONE" then + + j.cancel_state = "INIT" + end + end + break + end + when "FINISHED" + # add success list + if not success_list.include? rev_job then + log.info( " * Reverse-build OK ... #{rev_prj.name}(#{rev_os}) (#{rev_job.id})", Log::LV_USER) + success_list.push rev_job + end + else + rev_build_finished = false + end + end + + sleep 1 + end + + # clean up all reverse build jobs + rev_build_jobs.each do |rev_job| + if rev_job.status == "ERROR" or rev_job.status == "CANCELED" then + rev_job.server.cleaner.clean_afterwards(rev_job.id) + else + rev_job.server.cleaner.clean(rev_job.id) + end + end + + return failure_list + end + + + private + def self.is_project_included?( prj_list, prj, os ) + prj_list.each do |p| + if p[0] == prj and p[1] == os then return true end + end + + return false + end + + + # write web url for log + private + def self.write_log_url(log, job) + url,remote_url = job.get_log_url() + if not url.empty? then + log.info( " ** Log1: #{url}", Log::LV_USER) + end + if not remote_url.empty? then + log.info( " ** Log2: #{remote_url}", Log::LV_USER) + end + end +end diff --git a/src/build_server/SocketJobRequestListener.rb b/src/build_server/SocketJobRequestListener.rb index f7656a5..ae438a9 100644 --- a/src/build_server/SocketJobRequestListener.rb +++ b/src/build_server/SocketJobRequestListener.rb @@ -27,8 +27,6 @@ Contributors: =end $LOAD_PATH.unshift File.dirname(__FILE__) -require "GitBuildJob.rb" -require "LocalBuildJob.rb" require "JobLog.rb" require "BuildComm.rb" @@ -40,13 +38,22 @@ class SocketJobRequestListener @parent_server = parent @thread = nil @finish_loop = false + @comm_server = nil @log = @parent_server.log end # start listening def start() @thread = Thread.new { - main() + # make loop recover when unhandled exception occurred + while not @finish_loop + begin + main() + rescue => e + @log.error e.message + @log.error e.backtrace.inspect + end + end } end @@ -63,21 +70,26 @@ class SocketJobRequestListener def main() # server open begin - server = BuildCommServer.new(@parent_server.port, @log) + ftp_url = Utils.generate_ftp_url(@parent_server.ftp_addr, @parent_server.ftp_port, + @parent_server.ftp_username, @parent_server.ftp_passwd) + cache_dir = "#{@parent_server.transport_path}/.cache" + @comm_server = BuildCommServer.create(@parent_server.port, @log, ftp_url, cache_dir) rescue @log.info "Server creation failed" + puts "Server creation failed" + @parent_server.stop return end # loop @log.info "Entering Control Listening Loop ... " @finish_loop = false - server.wait_for_connection(@finish_loop) do |req| + @comm_server.wait_for_connection(@finish_loop) do |req| handle_job_request( req ) end # quit - server.terminate + @comm_server.terminate end @@ -99,8 +111,8 @@ class SocketJobRequestListener # parse request cmd = "" - if req_line.split(",").count > 0 then - cmd = req_line.split(",")[0].strip + if req_line.split("|").count > 0 then + cmd = req_line.split("|")[0].strip end case cmd @@ -110,8 +122,36 @@ class SocketJobRequestListener handle_cmd_resolve( req_line, req ) when "QUERY" handle_cmd_query( req_line, req ) + when "CANCEL" + handle_cmd_cancel( req_line, req ) when "STOP" handle_cmd_stop( req_line, req ) + when "UPGRADE" + handle_cmd_upgrade( req_line, req ) + when "FULLBUILD" + handle_cmd_fullbuild( req_line, req ) + when "REGISTER" + handle_cmd_register( req_line, req ) + when "DOWNLOAD" + Thread.new { + begin + handle_cmd_download( req_line, req ) + rescue => e + @log.error "Transfering file failed!" + @log.error e.message + @log.error e.backtrace.inspect + end + } + when "UPLOAD" + Thread.new { + begin + handle_cmd_upload( req_line, req ) + rescue => e + @log.error "Transfering file failed!" + @log.error e.message + @log.error e.backtrace.inspect + end + } else @log.info "Received Unknown REQ: #{req_line}" raise "Unknown request: #{req_line}" @@ -122,87 +162,205 @@ class SocketJobRequestListener # "BUILD" def handle_cmd_build( line, req ) - tok = line.split(",").map { |x| x.strip } - if tok.count < 4 then + tok = line.split("|").map { |x| x.strip } + if tok.count < 3 then @log.info "Received Wrong REQ: #{line}" raise "Invalid request format is used: #{line}" end - case tok[1] - # BUILD,GIT,repos,commit,os,url,async - when "GIT" - @log.info "Received BUILD GIT => #{tok[2]}" + # check type + if tok[1] != "GIT" then + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end - # check asynchronous job - async = (not tok[6].nil? and tok[6]=="YES" ? true:false) - if async then - new_job = GitBuildJob.new( tok[2], tok[3], tok[4], tok[5], [], @parent_server, nil, nil, false) - else - new_job = GitBuildJob.new( tok[2], tok[3], tok[4], tok[5], [], @parent_server, nil, req, false) + # Case1. BUILD|GIT|project_name|passwd|os_list|async|no_reverse + # Case2. BUILD|GIT|git_repos||os|async|no_reverse|internal|rev_build|commit|pkgs|dock_num + + # parse + project_name_list = tok[2].split(",") + passwd_list = tok[3].split(",") + passwd = passwd_list[0] + os_list = tok[4].split(",") + async = tok[5].eql? "YES" + no_reverse = tok[6].eql? "YES" + is_internal = tok[7].eql? "YES" + rev_job = tok[8].eql? "YES" + git_commit = (not tok[9].nil? and not tok[9].empty?) ? tok[9] : nil + pkg_files = (not tok[10].nil? and not tok[10].empty?) ? tok[10].split(",") : [] + dock_num = (not tok[11].nil? and not tok[11].empty?) ? tok[11].strip : "0" + + # check supported os if not internal job + if not is_internal then + os_list = check_supported_os( os_list , req ) + if os_list.nil? or os_list.empty? then + raise "Unsupported OS name is used!" end - BuildCommServer.send_begin(req) + end - # start job. If log url is supported, show it - if not @parent_server.job_log_url.empty? then - new_job.log.info( "Added new job \"#{new_job.id}\"! Check following URL", Log::LV_USER) - new_job.log.info( " * Log URL : #{@parent_server.job_log_url}/#{new_job.id}/log", Log::LV_USER) + # multi build job + if project_name_list.count > 1 or os_list.count > 1 then + new_job_list = [] + i = 0 + project_name_list.each { |pname| + if not passwd_list[i].nil? then passwd = passwd_list[i] + else passwd = passwd_list[0] end + check_build_project(pname,passwd,req) + os_list.each { |os| + new_job = create_new_job( pname, os ) + if new_job.nil? then + @log.warn "\"#{pname}\" does not support #{os}" + next + end + new_job_list.push new_job + @log.info "Received a request for building this project : #{pname}, #{os}" + } + i = i + 1 + } + + if new_job_list.count > 1 then + new_job = @parent_server.prjmgr.create_new_multi_build_job( new_job_list ) + elsif new_job_list.count == 1 then + new_job = new_job_list[0] else - new_job.log.info( "Added new job \"#{new_job.id}\"!", Log::LV_USER) + raise "Multi-Build Job creation failed!" end - # if asynchronouse, quit connection - if async then - if not @parent_server.job_log_url.empty? then - req.puts( "Info: Added new job \"#{new_job.id}\"! Check following URL") - req.puts( "Info: * Log URL : #{@parent_server.job_log_url}/#{new_job.id}/log") - else - req.puts( "Info: Added new job \"#{new_job.id}\"!") - end + # transfered job + elsif is_internal then + git_repos = project_name_list[0] + os = os_list[0] - BuildCommServer.send_end(req) - BuildCommServer.disconnect(req) - end + new_job = create_new_internal_job(git_repos, os, git_commit, pkg_files, dock_num ) + if rev_job then new_job.set_rev_build_check_job(nil) end - # add - @parent_server.jobmgr.add_job( new_job ) + # single job + elsif project_name_list.count == 1 and os_list.count == 1 then + pname = project_name_list[0] + os = os_list[0] - # BUILD,LOCAL,path,os,url - when "LOCAL" - @log.info "Received BUILD LOCAL => #{tok[2]}" - - BuildCommServer.send_begin(req) - @parent_server.jobmgr.add_job( - LocalBuildJob.new( tok[2], tok[3], tok[4], [], @parent_server, nil, req, false)) + check_build_project(pname,passwd,req) + new_job = create_new_job( pname, os ) else - @log.info "Received Wrong REQ: #{line}" - raise "Invalid request format is used: #{line}" + BuildCommServer.send_begin(req) + req.puts "Error: There is no valid job to build!" + BuildCommServer.send_end(req) + raise "No valid jobs!" + end + + if no_reverse then new_job.set_no_reverse end + + # create logger and set + logger = JobLog.new( new_job, req ) + if not async then new_job.set_logger(logger) end + logger.init + + # notify that job has been received + logger.info( "Added new job \"#{new_job.id}\" for #{new_job.os}!", Log::LV_USER) + if not @parent_server.job_log_url.empty? then + logger.info( " * Log URL : #{@parent_server.job_log_url}/#{new_job.id}/log", Log::LV_USER) + end + + # if asynchronouse, quit connection + if async then + logger.info( "Above job(s) will be processed asynchronously!", Log::LV_USER) + logger.close end + + # add to job queue + if new_job.is_rev_build_check_job() then + @parent_server.jobmgr.add_reverse_build_job( new_job ) + else + @parent_server.jobmgr.add_job( new_job ) + end + end + + + def check_build_project(prj_name, passwd, req) + # check project + prj = check_project_exist(prj_name, req) + if prj.nil? then + raise "Requested project does not exist!" + end + + # check passwd + if not check_project_password(prj, passwd, req) then + raise "Project's password is not matched!!" + end + + # check project type + if prj.type == "BINARY" then + BuildCommServer.send_begin(req) + req.puts "Can't build about Binary type package." + BuildCommServer.send_end(req) + raise "Can't build about Binary type package." + end end # "RESOLVE" def handle_cmd_resolve( line ,req) - tok = line.split(",").map { |x| x.strip } - if tok.count < 4 then + tok = line.split("|").map { |x| x.strip } + if tok.count < 3 then @log.info "Received Wrong REQ: #{line}" raise "Invalid request format is used: #{line}" end case tok[1] - # RESOLVE,GIT,repos,commit,os,url + # RESOLVE|GIT|repos|commit|os|async when "GIT" - @log.info "Received RESOLVE GIT => #{tok[2]}" + # parse + project_name=tok[2] + passwd=tok[3] + os=tok[4] + async = tok[5].eql? "YES" + + # check project + prj = check_project_exist(project_name, req) + if prj.nil? then + raise "Requested project does not exist!" + end + + # check passwd + if not check_project_password(prj, passwd, req) then + raise "Project's password is not matched!!" + end - BuildCommServer.send_begin(req) - @parent_server.jobmgr.add_job( - GitBuildJob.new( tok[2], tok[3], tok[4], tok[5], [], @parent_server, nil, req, true)) - # RESOLVE,LOCAL,path,os,url - when "LOCAL" - @log.info "Received RESOLVE LOCAL => #{tok[2]}" + # check os + os_list = check_supported_os( os , req ) + if os_list.nil? or os_list.empty? then + raise "Unsupported OS name is used!" + end + os = os_list[0] + + # create new job + new_job = create_new_job( project_name, os ) + if new_job.nil? then + raise "Creating build job failed : #{project_name}, #{os}" + end + @log.info "Received a request for resolving this project : #{project_name}, #{os}" - BuildCommServer.send_begin(req) - @parent_server.jobmgr.add_job( - LocalBuildJob.new( tok[2], tok[3], tok[4], [], @parent_server, nil, req, true)) + # resolve + new_job.set_resolve_flag() + + # create logger and set + logger = JobLog.new( new_job, req ) + if not async then new_job.set_logger(logger) end + logger.init + + # notify that job has been received + logger.info( "Added new job \"#{new_job.id}\" for #{new_job.os}!", Log::LV_USER) + if not @parent_server.job_log_url.empty? then + logger.info( " * Log URL : #{@parent_server.job_log_url}/#{new_job.id}/log", Log::LV_USER) + end + + # if asynchronouse, quit connection + if async then + logger.info( "Above job(s) will be processed asynchronously!", Log::LV_USER) + logger.close + end + + @parent_server.jobmgr.add_job( new_job ) else @log.info "Received Wrong REQ: #{line}" raise "Invalid request format is used: #{line}" @@ -212,27 +370,58 @@ class SocketJobRequestListener # "QUERY" def handle_cmd_query( line, req ) - tok = line.split(",").map { |x| x.strip } + tok = line.split("|").map { |x| x.strip } if tok.count < 2 then @log.info "Received Wrong REQ: #{line}" raise "Invalid request format is used: #{line}" end case tok[1] + + # QUERY, FTP + when "FTP" + BuildCommServer.send_begin(req) + BuildCommServer.send(req,"#{@parent_server.ftp_addr},#{@parent_server.ftp_username},#{@parent_server.ftp_passwd}") + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + # QUERY,JOB when "JOB" #puts "Received QUERY JOB" + # gather all jobs to show + job_list = @parent_server.jobmgr.jobs + @parent_server.jobmgr.internal_jobs + @parent_server.jobmgr.reverse_build_jobs + + # send the status BuildCommServer.send_begin(req) - for job in @parent_server.jobmgr.get_working_jobs - BuildCommServer.send(req,"WORKING,#{job.id},#{job.pkginfo.packages[0].source}") - end - for job in @parent_server.jobmgr.get_waiting_jobs - BuildCommServer.send(req,"WAITING,#{job.id},#{job.pkginfo.packages[0].source}") - end - for job in @parent_server.jobmgr.get_remote_jobs - BuildCommServer.send(req,"REMOTE ,#{job.id},#{job.pkginfo.packages[0].source}") + job_list.each do |job| + status = job.status + if status == "REMOTE_WORKING" then status = "REMOTE" end + if job.cancel_state != "NONE" then status = "CANCEL" end + + case job.type + when "BUILD" + if status == "PENDING" then + if job.pending_ancestor.nil? then + ids = "/" + else + ids = job.pending_ancestor.id + end + BuildCommServer.send(req,"#{status}:#{ids},#{job.id},#{job.get_project().name},#{job.os} #{job.progress}") + else + BuildCommServer.send(req,"#{status},#{job.id},#{job.get_project().name},#{job.os} #{job.progress}") + end + when "REGISTER" + if job.pkg_type == "BINARY" and not job.get_project().nil? then + BuildCommServer.send(req,"#{status},#{job.id},#{job.get_project().name},#{job.os} #{job.progress}") + else + BuildCommServer.send(req,"#{status},#{job.id},#{job.pkg_name}") + end + when "MULTIBUILD" + BuildCommServer.send(req,"#{status},#{job.id},MULTI-BUILD : #{job.sub_jobs.map{|x| x.id}.join(" ")}") + end end + BuildCommServer.send_end(req) BuildCommServer.disconnect(req) @@ -244,6 +433,45 @@ class SocketJobRequestListener BuildCommServer.send(req,"#{@parent_server.host_os},#{@parent_server.jobmgr.max_working_jobs}") BuildCommServer.send_end(req) BuildCommServer.disconnect(req) + when "PROJECT" + BuildCommServer.send_begin(req) + # print GIT projects + sorted_list = @parent_server.prjmgr.projects.sort { |x,y| x.name <=> y.name } + sorted_list.each do |prj| + if prj.type != "GIT" then next end + BuildCommServer.send(req,"G,#{prj.name},#{prj.repository},#{prj.branch}") + end + # print BINARY projects + sorted_list.each do |prj| + if prj.type != "BINARY" then next end + BuildCommServer.send(req,"B,#{prj.name},#{prj.pkg_name}") + end + # print REMOTE project + sorted_list.each do |prj| + if prj.type != "REMOTE" then next end + BuildCommServer.send(req,"R,#{prj.name}") + end + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + + when "OS" + BuildCommServer.send_begin(req) + # print GIT projects + @parent_server.supported_os_list.each do |os_name| + BuildCommServer.send(req,"#{os_name}") + end + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + + when "FRIEND" + BuildCommServer.send_begin(req) + # print GIT projects + @parent_server.friend_servers.each do |server| + BuildCommServer.send(req,"#{server.status},#{server.host_os},#{server.waiting_jobs.length},#{server.working_jobs.length},#{server.max_working_jobs},#{server.get_file_transfer_cnt}") + end + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + else @log.info "Received Wrong REQ: #{line}" raise "Invalid request format is used: #{line}" @@ -251,9 +479,83 @@ class SocketJobRequestListener end + # "CANCEL" + def handle_cmd_cancel( line, req ) + tok = line.split("|").map { |x| x.strip } + if tok.count < 2 then + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end + cancel_job = nil + + #CANCEL, JOB + @parent_server.jobmgr.jobs.each do |j| + if "#{j.id}" == "#{tok[1]}" then + cancel_job = j + break + end + end + + BuildCommServer.send_begin(req) + if cancel_job.nil? then + BuildCommServer.send(req, "There is no job \"#{tok[1]}\"") + raise "There is no job \"#{tok[1]}\"" + else + if cancel_job.cancel_state == "NONE" then + # check passwd + if cancel_job.type == "MULTIBUILD" then + cancel_job.sub_jobs.select{|x| x.cancel_state == "NONE" }.each do |sub| + if not check_project_password( sub.get_project, tok[2], req) then + BuildCommServer.send(req, "Project's password is not matched!!") + raise "Project's password is not matched!!" + end + end + + BuildCommServer.send(req, "\"#{cancel_job.id}, #{cancel_job.sub_jobs.map{|x| x.id}.join(", ")}\" will be canceled") + cancel_job.cancel_state = "INIT" + else + if not check_project_password( cancel_job.get_project, tok[2], req) then + BuildCommServer.send(req, "Project's password is not matched!!") + raise "Project's password is not matched!!" + else + BuildCommServer.send(req, "\"#{cancel_job.id}\" will be canceled") + cancel_job.cancel_state = "INIT" + end + end + else + BuildCommServer.send(req, "\"#{cancel_job.id}\" is already canceled") + end + end + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + end + + # "STOP" def handle_cmd_stop( line, req ) - tok = line.split(",").map { |x| x.strip } + tok = line.split("|").map { |x| x.strip } + if tok.count < 2 then + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end + + BuildCommServer.send_begin(req) + if tok[1] != @parent_server.password then + BuildCommServer.send(req,"Password mismatched!") + else + BuildCommServer.send(req,"Server will be down!") + end + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + if tok[1] == @parent_server.password then + @parent_server.finish = true + end + end + + + # "UPGRADE" + def handle_cmd_upgrade( line, req ) + tok = line.split("|").map { |x| x.strip } if tok.count < 2 then @log.info "Received Wrong REQ: #{line}" raise "Invalid request format is used: #{line}" @@ -269,6 +571,306 @@ class SocketJobRequestListener BuildCommServer.disconnect(req) if tok[1] == @parent_server.password then @parent_server.finish = true + @parent_server.upgrade = true + end + end + + + # "FULLBUILD" + def handle_cmd_fullbuild( line, req ) + tok = line.split("|").map { |x| x.strip } + if tok.count < 2 then + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end + + server_passwd = tok[1] + + # check server password + if server_passwd != @parent_server.password then + BuildCommServer.send_begin(req) + BuildCommServer.send(req,"Password mismatched!") + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + else + # create full build job + new_job = @parent_server.prjmgr.create_new_full_build_job() + + # set logger + logger = JobLog.new( new_job, req ) + new_job.set_logger(logger) + logger.init + + # add to job + @parent_server.jobmgr.add_job( new_job ) + end + end + + + # "REGISTER" + def handle_cmd_register( line, req ) + tok = line.split("|").map { |x| x.strip } + if tok.count < 4 then + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end + + type = tok[1] + + case type + # REGISTER|BINARY-LOCAL|local_path + # REGISTER|SOURCE-LOCAL|local_path + when "BINARY-LOCAL", "SOURCE-LOCAL" + file_path = tok[2] + new_job = @parent_server.jobmgr.create_new_register_job( file_path ) + logger = JobLog.new( new_job, req ) + new_job.set_logger(logger) + logger.init + + # add + @parent_server.jobmgr.add_job( new_job ) + + # REGISTER|BINARY|filename|passwd + when "BINARY" + # parse + filename = tok[2] + passwd = tok[3] + dock = (tok[4].nil? or tok[4].empty?) ? "0" : tok[4].strip + + # check project + prj = check_project_for_package_file_name(filename, req) + if prj.nil? then + raise "No project is defined for this binary : #{filename}!" + end + + # check passwd + if not check_project_password(prj, passwd, req) then + raise "Project's password is not matched!!" + end + + # create new job + @log.info "Received a request for uploading binaries : #{filename}" + new_job = create_new_upload_job( prj.name, filename, dock, req ) + if new_job.nil? then + raise "Creating build job failed : #{prj.name}, #{filename}" + end + + # create logger and set + logger = JobLog.new( new_job, req ) + new_job.set_logger(logger) + logger.init + + # notify that job has been received + logger.info( "Added new job \"#{new_job.id}\" for #{new_job.os}!", Log::LV_USER) + if not @parent_server.job_log_url.empty? then + logger.info( " * Log URL : #{@parent_server.job_log_url}/#{new_job.id}/log", Log::LV_USER) + end + + # add + @parent_server.jobmgr.add_job( new_job ) + else + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end + + end + + + # "UPLOAD" + def handle_cmd_upload( line, req ) + @log.info "Received File transfer REQ : #{line}" + + tok = line.split("|").map { |x| x.strip } + if tok.count < 2 then + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end + + dock_num = tok[1].strip + + BuildCommServer.send_begin(req) + incoming_dir = "#{@parent_server.transport_path}/#{dock_num}" + if not File.exist? incoming_dir then FileUtils.mkdir_p incoming_dir end + @comm_server.receive_file( req, incoming_dir ) + BuildCommServer.send_end(req) + end + + + # "DOWNLOAD" + # format = DOWNLOAD|dock_num|file_name + def handle_cmd_download( line, req ) + @log.info "Received File transfer REQ : #{line}" + tok = line.split("|").map { |x| x.strip } + if tok.count < 3 then + @log.info "Received Wrong REQ: #{line}" + raise "Invalid request format is used: #{line}" + end + + dock_num = tok[1].strip + file_name = tok[2] + + @log.info "Received a request for download file : #{file_name}" + outgoing_dir = "#{@parent_server.transport_path}/#{dock_num}" + BuildCommServer.send_begin(req) + @log.info "Sending requested file...: #{file_name}" + @comm_server.send_file(req, "#{outgoing_dir}/#{file_name}") + # remove file if "dock" defined + if dock_num != "0" then + @log.info "Removing requested file...: #{file_name}" + FileUtils.rm_rf "#{outgoing_dir}/#{file_name}" + if Utils.directory_emtpy?(outgoing_dir) then + FileUtils.rm_rf "#{outgoing_dir}" + end + end + + BuildCommServer.send_end(req) + end + + + + private + def check_project_exist(project_name, req) + prj = @parent_server.prjmgr.get_project(project_name) + if prj.nil? then + BuildCommServer.send_begin(req) + req.puts "Error: Requested project does not exist!" + req.puts "Info: Check project name using \"query\" command option !" + BuildCommServer.send_end(req) + return nil + end + + return prj + end + + private + def check_project_for_package_file_name(filename, req) + # get package name + new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3') + pkg_name = new_name.split(",")[0] + + prj = @parent_server.prjmgr.get_project_from_package_name(pkg_name) + if prj.nil? then + BuildCommServer.send_begin(req) + req.puts "Error: Requested project does not exist!" + req.puts "Info: Check project name using \"query\" command option !" + BuildCommServer.send_end(req) + return nil + end + + return prj + end + + + private + def check_project_password(prj, passwd, req) + + if prj.is_passwd_set? and not prj.passwd_match?(passwd) then + BuildCommServer.send_begin(req) + req.puts "Error: Project's password is not matched!" + req.puts "Error: Use -w option to input your project password" + BuildCommServer.send_end(req) + return false + end + + return true + end + + + private + def check_supported_os(os_list, req) + + # check if supported os list contain at least one OS + if @parent_server.supported_os_list.empty? then + BuildCommServer.send_begin(req) + req.puts "Error: There is no OS supported by the build server." + BuildCommServer.send_end(req) + return nil + end + + result = [] + os_list.each do |os| + if os == "all" or os == "*" then + result = result + @parent_server.supported_os_list + + elsif os == "default" then + os = @parent_server.supported_os_list[0] + result.push os + @log.info "The default OS \"#{os}\" is used as target OS" + + elsif os.include? "*" then + reg_os = os.gsub("*","[a-zA-Z0-9.]*") + @parent_server.supported_os_list.each do |svr_os| + matches = svr_os.match("#{reg_os}") + if not matches.nil? and matches.size == 1 and + matches[0] == svr_os then + result.push svr_os + end + end + else + if not @parent_server.supported_os_list.include?(os) then + BuildCommServer.send_begin(req) + req.puts "Error: Unsupported OS name \"#{os}\" is used!" + req.puts "Error: Check the following supported OS list. " + @parent_server.supported_os_list.each do |os_name| + req.puts " * #{os_name}" + end + BuildCommServer.send_end(req) + return nil + else + result.push os + end + end + end + + if result.empty? then + BuildCommServer.send_begin(req) + req.puts "Error: There is no OS supported by the build server." + BuildCommServer.send_end(req) + return nil + end + + result.uniq! + + return result + end + + + private + def create_new_job( project_name, os ) + return @parent_server.prjmgr.create_new_job(project_name, os) + end + + + private + def create_new_upload_job( project_name, filename, dock, req) + + new_job = @parent_server.prjmgr.get_project(project_name).create_new_job(filename, dock) + + if new_job.nil? then + BuildCommServer.send_begin(req) + req.puts "Error: Creating job failed: #{project_name} #{filename}" + BuildCommServer.send_end(req) + return nil end + + return new_job + end + + + private + def create_new_internal_job( git_repos, os, git_commit, pkg_files, dock_num ) + prj = @parent_server.prjmgr.get_git_project( git_repos ) + if prj.nil? then + prj = @parent_server.prjmgr.create_unnamed_git_project( git_repos ) + end + new_job = prj.create_new_job(os) + new_job.set_internal_job( dock_num ) + new_job.set_git_commit(git_commit) + incoming_dir = "#{@parent_server.transport_path}/#{dock_num}" + pkg_files.each { |file| + new_job.add_external_package( file ) + } + + return new_job end end diff --git a/src/builder/Builder.rb b/src/builder/Builder.rb index a9e672c..f0087fe 100644 --- a/src/builder/Builder.rb +++ b/src/builder/Builder.rb @@ -36,22 +36,24 @@ require "log" class Builder private_class_method :new - attr_accessor :id, :pkgserver_url, :log + attr_accessor :id, :pkgserver_url, :log, :buildroot_dir, :cache_dir CONFIG_ROOT = Utils::HOME + "/.build_tools/builder" @@instance_map = {} # initialize - def initialize (id, pkgserver_url, log_path) + def initialize (id, pkgserver_url, log_path, buildroot_dir, cache_dir) @id = id @pkgserver_url = pkgserver_url @host_os = Utils::HOST_OS + @buildroot_dir = buildroot_dir + @cache_dir = cache_dir @log = Log.new(log_path) end # create - def self.create (id, pkgserver_url, log_path) + def self.create (id, pkgserver_url, log_path, buildroot_dir, cache_dir) # check builder config root check_builder_config_root @@ -61,8 +63,24 @@ class Builder FileUtils.rm_rf "#{CONFIG_ROOT}/#{id}" end + # create buildroot if not set + if buildroot_dir.nil? then + buildroot_dir = "#{CONFIG_ROOT}/#{id}/buildroot" + if not File.exist? buildroot_dir then + FileUtils.mkdir_p buildroot_dir + end + end + + # create cachedir if not set + if cache_dir.nil? then + cache_dir = "#{CONFIG_ROOT}/#{id}/build_cache" + if not File.exist? cache_dir then + FileUtils.mkdir_p cache_dir + end + end + # create new instance and return it - @@instance_map[id] = new( id, pkgserver_url, log_path ) + @@instance_map[id] = new( id, pkgserver_url, log_path, buildroot_dir, cache_dir ) # write config write_builder_config( @@instance_map[id] ) @@ -83,6 +101,18 @@ class Builder end + def self.exist?( id ) + # check builder config root + check_builder_config_root + + # check id + if File.exist? "#{CONFIG_ROOT}/#{id}" then + return true + else + return false + end + end + # get def self.get( id ) @@ -106,55 +136,59 @@ class Builder # clean def clean( src_path ) - build_root_dir = "#{CONFIG_ROOT}/#{@id}/buildroot" - - # create pkginfo - pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest") - - # make clean - for pkg in pkginfo.packages - FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.linux" - FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.windows" - FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.darwin" - end - env_def = - "SRCDIR=\"#{src_path}\" " - build_command = "cd \"#{src_path}\";" + env_def + "./package/build.#{@host_os} clean" - if not Utils.execute_shell_with_log( build_command, @log ) - @log.error( "Failed on clean script", Log::LV_USER ) - return false - end - - return true + return clean_project_directory( src_path, nil ) end # build - def build( src_path, os, clean, reverse_dep_check, pending_pkg_dir_list, ignore_rev_dep_build_list ) + def build( src_path, os, clean, local_pkgs, is_local_build ) # create pkginfo - pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest") - - # check there are packages which can be built - if not pkginfo.package_exist?(os, Utils::HOST_OS ) then - @log.error( "There are no packages which can be built on this host OS: #{Utils::HOST_OS}") - @log.error( " * Check \"Build-host-os\" in pkginfo.manifest" ) + if not File.exist? "#{src_path}/package/pkginfo.manifest" then + @log.error( "The \"package/pkginfo.manifest\" file does not exist!", Log::LV_USER) return false end - # set build root - if clean then - build_root_dir = "#{CONFIG_ROOT}/#{@id}/temp_root" - else - build_root_dir = "#{CONFIG_ROOT}/#{@id}/buildroot" + # read pkginfo + begin + pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest") + rescue => e + @log.error( e.message, Log::LV_USER) + return false end - FileUtils.mkdir_p build_root_dir + # set default build os + build_host_os = @host_os - local_pkg_list = [] - pending_pkg_dir_list.each do |dir| - local_pkg_list += Dir.entries(dir).select{|e| e =~ /\.zip$/}.map{|p| dir + "/" + p} - end + # check there are packages which can be built + if not pkginfo.package_exist?(os, build_host_os ) then + if is_local_build and File.exist? "#{src_path}/package/pkginfo.manifest.local" then + begin + pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest.local") + rescue => e + @log.error( e.message, Log::LV_USER) + return false + end + if not pkginfo.package_exist?(os, build_host_os ) then + + @log.error( "This project does not support a build on this host OS: #{build_host_os}") + @log.error( " * Check \"Build-host-os\" in pkginfo.manifest or pkginfo.manifest.local" ) + + return false + end + else + @log.error( "This project does not support a build on this host OS: #{build_host_os}") + @log.error( " * Check \"Build-host-os\" in pkginfo.manifest" ) + + return false + end + end + + # set build root + build_root_dir = @buildroot_dir + if not File.exist? build_root_dir then + FileUtils.mkdir_p build_root_dir + end # create client @log.info( "Downloding client is initializing...", Log::LV_USER) @@ -162,12 +196,17 @@ class Builder if clean then cl.clean(true) end - cl.update + + # get local repository path list + repos_paths = [] + local_pkgs.each { |path| + repos_paths.push File.dirname(path) + } + repos_paths.uniq! # install build dependencies - package_overwrite_list = [] @log.info( "Installing dependent packages...", Log::LV_USER) - pkginfo.get_build_dependencies( os, @host_os ).each do |dep| + pkginfo.get_build_dependencies( os ).each do |dep| if dep.target_os_list.count != 0 then dep_target_os = dep.target_os_list[0] else @@ -177,73 +216,52 @@ class Builder # get local dependent package pkgexp = Regexp.new("\/#{dep.package_name}_.*_#{dep_target_os}\.zip$") - package_overwrite_list += local_pkg_list.select{|l| l =~ pkgexp} - - if not cl.install(dep.package_name, dep_target_os, true, false) then - @log.error( "Installing \"#{dep.package_name}\" failed!", Log::LV_USER) - return false + local_dep_pkgs = local_pkgs.select{|l| l =~ pkgexp} + + # install package from remote package server + if local_dep_pkgs.empty? then + if not cl.install(dep.package_name, dep_target_os, true, false) then + @log.error( "Installing \"#{dep.package_name}\" failed!", Log::LV_USER) + return false + end + else + local_dep_pkgs.each do |l| + @log.info( "Installing local pacakge...#{l}", Log::LV_USER) + if not File.exist? l then + @log.error( "File not found!: #{l}", Log::LV_USER ) + end + cl.install_local_pkg(l,true,false, repos_paths) + end end end - # overwrite local dependent packages - package_overwrite_list.each do |l| - cl.install_local_pkg(l,false) - end - @log.info( "Downloading dependent source packages...", Log::LV_USER) - pkginfo.get_source_dependencies(os,@host_os).each do |dep| - @log.info( " * #{dep.package_name}", Log::LV_USER) - - if cl.download_dep_source(dep.package_name).nil? then - @log.error( "Downloading \"#{dep.package_name}\" failed!", Log::LV_USER) + src_archive_list = [] + pkginfo.get_source_dependencies(os,build_host_os).each do |dep| + src_archive_list.push dep.package_name + end + src_archive_list.uniq! + src_archive_list.each do |archive_name| + @log.info( " * #{archive_name}", Log::LV_USER) + if cl.download_dep_source(archive_name).nil? then + @log.error( "Downloading \"#{archive_name}\" failed!", Log::LV_USER) return false end - end + end # make clean @log.info( "Make clean...", Log::LV_USER) - for pkg in pkginfo.packages - FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.linux" - FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.windows" - FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.darwin" - end - - # convert path if windows - if Utils::HOST_OS == "windows" then - build_root_dir = Utils.get_unix_path( build_root_dir ) - end - env_def = - "BUILD_TARGET_OS=#{os} \ - SRCDIR=\"#{src_path}\" \ - ROOTDIR=\"#{build_root_dir}\" " - build_command = "cd \"#{src_path}\";" + env_def + "./package/build.#{@host_os} clean" - - if not Utils.execute_shell_with_log( build_command, @log ) - @log.error( "Failed on clean script", Log::LV_USER) + if not clean_project_directory( src_path, os ) then return false end - # make source package - @log.info( "Make source package...", Log::LV_USER) - build_command = "cd \"#{src_path}\";tar czf #{pkginfo.packages[0].source}_#{pkginfo.packages[0].version}.tar.gz --exclude=.git *" - if not Utils.execute_shell_with_log( build_command, @log ) - @log.error( "Failed on tar script", Log::LV_USER) - return false - end - - # execute build script @log.info( "Make build...", Log::LV_USER) - build_command = "cd \"#{src_path}\";" + env_def + "./package/build.#{@host_os} build" - if not Utils.execute_shell_with_log( build_command, @log ) - @log.error( "Failed on build script", Log::LV_USER) + if not execute_build_command("build", src_path, build_root_dir, os, pkginfo.get_version) then return false end - # execute install script @log.info( "Make install...", Log::LV_USER) - build_command = "cd \"#{src_path}\";" + env_def + "./package/build.#{@host_os} install" - if not Utils.execute_shell_with_log( build_command, @log ) - @log.error( "Failed on build script", Log::LV_USER) + if not execute_build_command("install", src_path, build_root_dir, os, pkginfo.get_version) then return false end @@ -256,37 +274,12 @@ class Builder # zip @log.info( "Zipping...", Log::LV_USER) - make_zip(pkginfo,os,src_path) - - # check reverse dependecy if needed - if reverse_dep_check then - if not check_reverse_build_dependency_fail_list(src_path, os, cl, true, ignore_rev_dep_build_list).empty? then - return false - end + if not make_zip(pkginfo,os,src_path) then + @log.error( "Creating packages failed!", Log::LV_USER) + return false end - return true - end - def build_resolve(src_path, os, pending_pkg_dir_list, ignore_rev_dep_build_list) - # clean build - if not build(src_path, os, true, false, pending_pkg_dir_list, ignore_rev_dep_build_list) then - return nil - end - # init client - build_root_dir = "#{CONFIG_ROOT}/#{@id}/temp_root" - cl = Client.new(@pkgserver_url, build_root_dir, @log) - # rev build - return check_reverse_build_dependency_fail_list(src_path, os, cl, false, ignore_rev_dep_build_list ) - end - - - # reset - def reset() - build_root_dir = "#{CONFIG_ROOT}/#{@id}/buildroot" - temp_dir = cl.location - cl.location = build_root_dir - cl.clean(true) - cl.location = temp_dir + return true end @@ -307,15 +300,17 @@ class Builder def self.write_builder_config( obj ) # create config folder builder_dir = "#{CONFIG_ROOT}/#{obj.id}" - FileUtils.mkdir_p( "#{builder_dir}" ) + if not File.exist? builder_dir then + FileUtils.mkdir_p( "#{builder_dir}" ) + end # write configuration File.open( "#{builder_dir}/builder.cfg", "w" ) do |f| f.puts "ID=#{obj.id}" f.puts "PSERVER_URL=#{obj.pkgserver_url}" f.puts "LOG-PATH=#{obj.log.path}" + f.puts "CACHE-DIR=#{obj.cache_dir}" end - puts "#{builder_dir}/builder.cfg" end @@ -326,6 +321,8 @@ class Builder # read configuration builder_dir = "#{CONFIG_ROOT}/#{id}" log_path = nil + cache_dir = "#{CONFIG_ROOT}/#{id}/build_cache" + buildroot_dir = "#{CONFIG_ROOT}/#{id}/buildroot" File.open( "#{builder_dir}/builder.cfg", "r" ) do |f| f.each_line do |l| if l.start_with?("PSERVER_URL=") @@ -333,6 +330,10 @@ class Builder elsif l.start_with?("LOG-PATH=") log_path = l.split("=")[1].strip log_path = nil if log_path == "STDOUT" + elsif l.start_with?("CACHE-DIR=") + cache_dir = l.split("=")[1].strip + elsif l.start_with?("BUILDROOT-DIR=") + buildroot_dir = l.split("=")[1].strip else next end @@ -340,151 +341,137 @@ class Builder end if log_path.empty? then log_path = nil end + # create object & return it - return new( id, pkgserver_url, log_path ) + return new( id, pkgserver_url, log_path, buildroot_dir, cache_dir ) end - # check reverse build dependency - def check_reverse_build_dependency_fail_list( parent_path, os, pkg_cl, immediately, ignore_rev_dep_build_list ) - @log.info( "Checking reverse build dependency ...", Log::LV_USER) - - reverse_fail_list = [] - - # install newly packages - for path in Dir.glob("*_*_#{os}.zip") - # install - pkg_cl.install_local_pkg( path, false ) - end - - # get reverse-dependent source-codes - pkginfo = PackageManifest.new("#{parent_path}/package/pkginfo.manifest") - pkg_list = [] - for pkg in pkginfo.packages - pkg_list = pkg_list + pkg_cl.get_reverse_build_dependent_packages(pkg.package_name, os) - @log.info( "Extract reverse build dependency #{pkg.package_name} ...", Log::LV_USER) - end - pkg_list -= ignore_rev_dep_build_list - pkg_list.uniq! - - # download sources - src_path_hash = {} - pkg_list.each do |pkg| - # download - src_path = pkg_cl.download_source(pkg, os) - @log.info( "Downloaded #{pkg} source package to #{src_path}", Log::LV_USER) - - if src_path_hash[src_path].nil? then - src_path_hash[src_path] = [pkg] - else - src_path_hash[src_path] += [pkg] - end - end - src_path_list = src_path_hash.keys - - # add jobs for building reverse-dependent - src_path_list.each do |path| - # extract source package to test path - @log.info( " * Extracting source ... #{path}", Log::LV_USER) - test_src_path = "#{parent_path}/tmp_build" - FileUtils.mkdir_p test_src_path - Utils.execute_shell("cd \"#{test_src_path}\";tar xf #{path}") - - # build - @log.info( " * Building source ... ", Log::LV_USER) - result = build_test_with_pkg_client( pkg_cl, test_src_path, os, parent_path ) - FileUtils.rm_rf test_src_path - if not result then - reverse_fail_list += src_path_hash[path] - @log.error( "Build \"#{src_path_hash[path].join(", ")}\" test failed", Log::LV_USER) - if immediately then - return reverse_fail_list - end - end - end + # execute build command + def execute_build_command( target, src_path, build_root_dir, os, version ) - return reverse_fail_list.uniq - end + # get category + os_category = Utils.get_os_category( os ) + # convert directory format when windows + if Utils.is_windows_like_os( @host_os ) then + src_path2 = Utils.get_unix_path( src_path ) + else + src_path2 = src_path + end - # build test - def build_test_with_pkg_client( pkg_cl, src_path, os, parent_path) + env_def = + "BUILD_TARGET_OS=#{os} \ + TARGET_OS=#{os} \ + TARGET_OS_CATEGORY=#{os_category} \ + SRCDIR=\"#{src_path2}\" \ + ROOTDIR=\"#{build_root_dir}\" \ + VERSION=\"#{version}\" " + + # check script file + script_file = "#{src_path}/package/build.#{@host_os}" + if not File.exist? script_file then + if Utils.is_linux_like_os( @host_os ) then + script_file = "#{src_path}/package/build.linux" + elsif Utils.is_windows_like_os( @host_os ) then + script_file = "#{src_path}/package/build.windows" + elsif Utils.is_macos_like_os( @host_os ) then + script_file = "#{src_path}/package/build.macos" + end + # check old script file + if not File.exist? script_file then + @log.error( "The script file not found!: \"package/build.#{@host_os}\"", Log::LV_USER) + return false + end + end - local_pkg_list = [] - local_pkg_list += Dir.entries(parent_path).select{|e| e =~ /\.zip$/}.map{|p| parent_path + "/" + p} + # read build script + # this will ignore last lines without block + contents = [] + File.open( script_file, "r" ) do |f| + lines = [] + f.each_line do |l| + lines.push l + if l.start_with? "}" then + contents = contents + lines + lines = [] + end + end + end - # create pkginfo - pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest") + # generate shell script + File.open( "#{src_path}/.build.sh", "w" ) do |f| + f.puts "#!/bin/sh -xe" + contents.each do |l| + f.puts l + end - # install build dependencies - package_overwrite_list = [] - pkginfo.get_build_dependencies(os,@host_os).each do |dep| - if dep.target_os_list.count != 0 then - dep_target_os = dep.target_os_list[0] + case target + when "clean" + f.puts " " + when "build" + f.puts " " + when "build_cache" + f.puts "CACHEDIR=${PKG_CACHE_DIR}/$(cache_key)" + when "save_cache" + f.puts "rm -rf ${PKG_CACHE_DIR}/*" + f.puts "CACHEDIR=${PKG_CACHE_DIR}/$(cache_key)" + f.puts "mkdir -p ${CACHEDIR}" + when "install" + f.puts " " else - dep_target_os = os + @log.warn( "Wrong build-target is used: \"#{target}\"", Log::LV_USER) + return false end - - # get local dependent package - pkgexp = Regexp.new("\/#{dep.package_name}_.*_#{dep_target_os}\.zip$") - package_overwrite_list += local_pkg_list.select{|l| l =~ pkgexp} - - pkg_cl.install(dep.package_name, dep_target_os, true, false) - end - - # overwrite local dependent packages - package_overwrite_list.each do |l| - @log.info( "Package overwrite ..#{l}", Log::LV_USER) - pkg_cl.install_local_pkg(l,false) - end - - # source download - pkginfo.get_source_dependencies(os,@host_os).each do |dep| - pkg_cl.download_dep_source(dep.package_name) + f.puts "#{target}" + f.puts "echo \"success\"" end + Utils.execute_shell_with_log( "chmod +x #{src_path}/.build.sh", @log ) + build_command = "cd \"#{src_path}\";" + env_def + "./.build.sh" - # execute build script - build_root_dir = pkg_cl.location - env_def = - "BUILD_TARGET_OS=#{os} \ - SRCDIR=\"#{src_path}\" \ - ROOTDIR=\"#{build_root_dir}\" " - build_command = "cd \"#{src_path}\";" + env_def + "./package/build.#{@host_os} build" + # execute script if not Utils.execute_shell_with_log( build_command, @log ) then - @log.error( "Failed on build script", Log::LV_USER) + @log.error( "Failed on build script: \"#{target}\"", Log::LV_USER) return false else + Utils.execute_shell_with_log( "rm -rf #{src_path}/.build.sh", @log ) return true end - end - # write pkginfo.manifest and install/remove script + # write pkginfo.manifest def write_pkginfo_files(pkginfo,os,src_path) - for pkg in pkginfo.packages + # get category + os_category = Utils.get_os_category( os ) + + pkginfo.packages.each do |pkg| # skip if not support the target os - if not pkg.os.include? os + if not pkg.os_list.include? os next end - # install script files - copy_post_install_script(pkg,os,src_path); - copy_post_remove_script(pkg,os,src_path); + # install/remove script files + if not copy_post_install_script(pkg,os,src_path) then return false end + if not copy_post_remove_script(pkg,os,src_path) then return false end # write manifest file install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}" - + # if there is no intall directory, error if not File.exist? install_dir then - @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER) - @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER) - return false + install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" + + if not File.exist? install_dir then + @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER) + @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER) + return false + end end - - # write pkginfo.manifest + + # write pkginfo.manifest File.open("#{install_dir}/pkginfo.manifest", "w") do |f| - pkg.print_to_file_with_os( f, os ) + pkg.print_to_file( f ) end end @@ -494,74 +481,207 @@ class Builder # copy post-install script def copy_post_install_script(pkg,os,src_path) - + tar = nil + src = nil - if File.exist? "#{src_path}/package/#{pkg.package_name}.install.#{os}" - src = "#{src_path}/package/#{pkg.package_name}.install.#{os}" - else - src = nil + # get category + os_category_list = [] + pkg.os_list.each do |cos| + os_category_list.push Utils.get_os_category(cos) + end + + # check compatable os + (pkg.os_list + os_category_list).uniq.each do |cos| + if File.exist? "#{src_path}/package/#{pkg.package_name}.install.#{cos}" then + if src.nil? then + src = "#{src_path}/package/#{pkg.package_name}.install.#{cos}" + else + @log.error( "compatable package can have only one install script\n but you have another availabe install scripts", Log::LV_USER) + @log.error( " * package/#{File.basename src}", Log::LV_USER) + @log.error( " * package/#{pkg.package_name}.install.#{cos}", Log::LV_USER) + return false + end + end + end + + install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}" + + # if there is no intall directory, error + if not File.exist? install_dir then + os_category = Utils.get_os_category( os ) + install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" + + if not File.exist? install_dir then + @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER) + @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER) + return false + end end if not src.nil? then - if os == "linux" or os == "darwin" then - tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/install.sh" - elsif os == "windows" then - tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/install.BAT" + if Utils.is_unix_like_os( os ) then + tar = "#{install_dir}/install.sh" + elsif Utils.is_windows_like_os( os) then + tar = "#{install_dir}/install.BAT" else puts "Unknown OS: #{os} " - return + return true end FileUtils.cp(src, tar) - end - - return + end + + return true end # copy post-remove script def copy_post_remove_script(pkg,os,src_path) - + tar = nil + src = nil - if File.exist? "#{src_path}/package/#{pkg.package_name}.remove.#{os}" - src = "#{src_path}/package/#{pkg.package_name}.remove.#{os}" - else - src = nil + # get category + os_category_list = [] + pkg.os_list.each do |cos| + os_category_list.push Utils.get_os_category(cos) + end + + # check compatable os + (pkg.os_list + os_category_list).uniq.each do |cos| + if File.exist? "#{src_path}/package/#{pkg.package_name}.remove.#{cos}" then + if src.nil? then + src = "#{src_path}/package/#{pkg.package_name}.remove.#{cos}" + else + @log.error( "compatable package can have only one remove script but you have another availabe remove scripts", Log::LV_USER) + @log.error( " * package/#{File.basename src}", Log::LV_USER) + @log.error( " * package/#{pkg.package_name}.remove.#{cos}", Log::LV_USER) + return false + end + end + end + + install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}" + + # if there is no intall directory, error + if not File.exist? install_dir then + os_category = Utils.get_os_category( os ) + install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" + + if not File.exist? install_dir then + @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER) + @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER) + return false + end end if not src.nil? - puts "------> #{src}" - if os == "linux" or os == "darwin" then - tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/remove.sh" - puts "-0--\==> #{tar}" - elsif os == "windows" then - tar = "#{src_path}/package/#{pkg.package_name}.package.#{os}/remove.BAT" + if Utils.is_unix_like_os( os ) then + tar = "#{install_dir}/remove.sh" + elsif Utils.is_windows_like_os( os) then + tar = "#{install_dir}/remove.BAT" else puts "Unknown OS: #{os} " - return + return true end FileUtils.cp(src, tar) - end + end + return true end # create package file def make_zip(pkginfo,os,src_path) - for pkg in pkginfo.packages + # get category + os_category = Utils.get_os_category( os ) + + pkginfo.packages.each do |pkg| # skip if not support the target os - if not pkg.os.include? os + if not pkg.os_list.include? os next end # cd install dir install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os}" + if not File.exist? install_dir then + install_dir = "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" + + if not File.exist? install_dir then + @log.error( "Following directory must be created before writing pkginfo.manifest", Log::LV_USER) + @log.error( " * package/#{pkg.package_name}.package.#{os}", Log::LV_USER) + return false + end + end # zip @log.info( "Creating package file ... #{pkg.package_name}_#{pkg.version}_#{os}.zip", Log::LV_USER) - Utils.execute_shell("cd \"#{install_dir}\"; zip -r -y #{src_path}/#{pkg.package_name}_#{pkg.version}_#{os}.zip *") + @log.info("cd \"#{install_dir}\"; zip -r -y #{src_path}/#{pkg.package_name}_#{pkg.version}_#{os}.zip *") + Utils.execute_shell_with_log("cd \"#{install_dir}\"; zip -r -y #{src_path}/#{pkg.package_name}_#{pkg.version}_#{os}.zip *", @log) + if not File.exist? "#{src_path}/#{pkg.package_name}_#{pkg.version}_#{os}.zip" then + return false + end + end + return true + end + + + # clean the temporary directory for packaged + def clean_project_directory(src_path, target_os = nil) + + # if os is not set, use host os instead + if target_os.nil? then target_os = @host_os end + + # convert path if windows + if Utils.is_windows_like_os(@host_os) then + build_root_dir = Utils.get_unix_path( @buildroot_dir ) + else + build_root_dir = @buildroot_dir + end + + # create pkginfo + begin + pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest") + rescue => e + @log.error( e.message, Log::LV_USER) + return false end + + # get category + # make clean + pkginfo.packages.each do |pkg| + os = pkg.os + os_category = Utils.get_os_category( os ) + + if File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}" then + FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}" + elsif File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" then + FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" + end + end + + # clean local-only package's directory + if File.exist? "#{src_path}/package/pkginfo.manifest.local" then + begin + pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest.local") + rescue => e + @log.error( e.message, Log::LV_USER) + return false + end + pkginfo.packages.each do |pkg| + os = pkg.os + os_category = Utils.get_os_category( os ) + + if File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}" then + FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{pkg.os}" + elsif File.exist? "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" then + FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{os_category}" + end + end + end + + # execute + return execute_build_command("clean", src_path, build_root_dir, target_os, pkginfo.get_version) end end diff --git a/src/builder/CleanOptionParser.rb b/src/builder/CleanOptionParser.rb index 31611fc..b2f5ad8 100644 --- a/src/builder/CleanOptionParser.rb +++ b/src/builder/CleanOptionParser.rb @@ -28,17 +28,27 @@ Contributors: require 'optparse' require 'logger' +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +require "utils" def parse() #option parsing option = {} - optparse = OptionParser.new do |opts| - opts.banner = "Usage: pkg-clean" - opts.on('-h','--help', 'display this information') do + optparse = OptionParser.new(nil, 32, ' '*8) do |opts| + opts.banner = "Clean the package service command-line tool." + "\n" \ + + "\n" + "Usage: pkg-clean [-h] [-v]" + "\n" \ + + "\n" + "Options:" + "\n" + + opts.on('-h','--help', 'display help') do puts opts exit end + + opts.on('-v','--version', 'display version') do + puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version() + exit + end end optparse.parse! diff --git a/src/builder/optionparser.rb b/src/builder/optionparser.rb index 05888f0..3f23fc3 100644 --- a/src/builder/optionparser.rb +++ b/src/builder/optionparser.rb @@ -28,35 +28,53 @@ Contributors: require 'optparse' require 'logger' +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +require "utils" def parse() #option parsing option = {} optparse = OptionParser.new do |opts| - opts.banner = "Usage: pkg-build -u -o -c -h" - opts.on('-u','--url ', 'remote package server url') do |url| + opts.banner = "Build and packaging service command-line tool." + "\n" \ + + "\n" + "Usage: pkg-build -u [-o ] [-c] [-h] [-v]" + "\n" \ + + "\n" + "Options:" + "\n" + + opts.on('-u','--url ', 'remote package server url: http://127.0.0.1/dibs/unstable') do |url| option[:url] = url end + option[:os] = nil opts.on('-o','--os ', 'operating system ') do |os| option[:os] = os - end + end + option[:clean] = false opts.on('-c','--clean', 'clean build') do option[:clean] = true - end + end + option[:rev] = false - opts.on('-r','--rev', 'reverse build dependency check') do - option[:rev] = true - end - opts.on('-h','--help', 'display this information') do + #opts.on('-r','--rev', 'reverse build dependency check') do + # option[:rev] = true + #end + + opts.on('-h','--help', 'display help') do puts opts exit + end + + opts.on('-v','--version', 'display version') do + puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version() + exit end - end + end optparse.parse! + + if option[:url].nil? or option[:url].empty? then + raise ArgumentError, "Usage: pkg-build -u [-o ] [-c] [-h]" + end - return option + return option end diff --git a/src/common/Action.rb b/src/common/Action.rb new file mode 100644 index 0000000..7b64559 --- /dev/null +++ b/src/common/Action.rb @@ -0,0 +1,47 @@ +=begin + + Action.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +class Action + attr_accessor :time, :period + + def initialize( time, period ) + @time = time + @period = period + end + + + # initialize action + def init() + end + + + # execute action + def execute() + end + +end diff --git a/src/common/PackageManifest.rb b/src/common/PackageManifest.rb index 4a9c2d3..044b7b6 100644 --- a/src/common/PackageManifest.rb +++ b/src/common/PackageManifest.rb @@ -33,30 +33,23 @@ class PackageManifest attr_accessor :packages def initialize( file_path ) - @pkg_map = Parser.read_pkginfo_list( file_path ) - @packages = @pkg_map.values + @packages = Parser.read_multy_pkginfo_from file_path end # scan all build dependencies - def get_build_dependencies( target_os, host_os ) + def get_build_dependencies( target_os ) # for all list = [] - for pkg in @packages + @packages.each do |pkg| # package that has the target os - if not pkg.os.include?(target_os) + if not pkg.os_list.include?(target_os) next end - # package that has the host os - if not pkg.build_host_os.include?(host_os) - next - end # package that has the target os - for dep in pkg.build_dep_list - # if dep.target_os_list.include? target_os - list.push dep - # end + pkg.build_dep_list.each do |dep| + list.push dep end end list.uniq! @@ -65,13 +58,13 @@ class PackageManifest end - # scan all build dependencies + # scan all source dependencies def get_source_dependencies( target_os, host_os ) # for all list = [] - for pkg in @packages + @packages.each do |pkg| # only package that used in target os - if not pkg.os.include?(target_os) + if not pkg.os_list.include?(target_os) next end @@ -81,7 +74,7 @@ class PackageManifest end # package that has the target os - for dep in pkg.source_dep_list + pkg.source_dep_list.each do |dep| # if dep.target_os_list.include? target_os list.push dep # end @@ -93,10 +86,32 @@ class PackageManifest end + # scan all install dependencies + def get_install_dependencies( target_os, pkg_name=nil ) + # for all + list = [] + @packages.each do |pkg| + if not pkg_name.nil? and pkg.package_name != pkg_name then next end + # only package that used in target os + if not pkg.os_list.include?(target_os) + next + end + + # package that has the target os + pkg.install_dep_list.each do |dep| + list.push dep + end + end + list.uniq! + + return list + end + + def package_exist?(target_os, host_os) - for pkg in @packages + @packages.each do |pkg| # only package that used in target os - if pkg.os.include?(target_os) and + if pkg.os_list.include?(target_os) and pkg.build_host_os.include?(host_os) return true end @@ -104,4 +119,34 @@ class PackageManifest return false end + + + def get_version() + return @packages[0].version + end + + + def get_target_packages(target_os) + pkgs = [] + @packages.each do |pkg| + if pkg.os_list.include?(target_os) then + pkgs.push pkg + end + end + + return pkgs + end + + + def pkg_exist?(name,ver,os) + @packages.each do |pkg| + if pkg.package_name != name then next end + if not ver.nil? and pkg.version != ver then next end + if not os.nil? and not pkg.os_list.include?(os) then next end + + return true + end + + return false + end end diff --git a/src/common/ScheduledActionHandler.rb b/src/common/ScheduledActionHandler.rb new file mode 100644 index 0000000..a62c5f0 --- /dev/null +++ b/src/common/ScheduledActionHandler.rb @@ -0,0 +1,99 @@ +=begin + + ScheduledActionHandler.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +$LOAD_PATH.unshift File.dirname(__FILE__) + +class ScheduledActionHandler + attr_accessor :quit + + # init + def initialize( ) + @thread = nil + @quit = false + @actions = [] + end + + + # register a action + def register( action ) + # init action + action.init + # add to list + @actions.push action + end + + + # start thread + def start() + @thread = Thread.new { + # main + thread_main() + + # close + terminate() + } + end + + + protected + + def thread_main + + while not @quit + + current_time = Time.new + + # get list + action_list = Array.new(@actions) + action_list.each do |action| + # if its time is reached, execute action + if not action.time.nil? and current_time > action.time then + action.execute + + # if periodic action, renew the time + # else remove it from list + if action.period != 0 then + while current_time > action.time + action.time = action.time + action.period + end + else + @actions.delete(action) + end + end + end + + # sleep 10 sec + sleep 10 + end + end + + + def terminate + end + +end diff --git a/src/common/Version.rb b/src/common/Version.rb index 3bdba12..633d378 100644 --- a/src/common/Version.rb +++ b/src/common/Version.rb @@ -45,5 +45,10 @@ class Version < Array end def == x (self <=> x) == 0 + end + def compare x + if self < x then return -1 + elsif self == x then return 0 + else return 1 end end end diff --git a/src/common/fileTransfer.rb b/src/common/fileTransfer.rb new file mode 100644 index 0000000..cda6a4b --- /dev/null +++ b/src/common/fileTransfer.rb @@ -0,0 +1,118 @@ + +require 'socket' + +class FileTransfer + + def FileTransfer.putfile(ip, port, username, passwd, bpath, logger) + filename = File.basename(bpath) + uniqdir = Utils.create_uniq_name + ftp_filepath = File.join(uniqdir, filename) + + begin + ftp = Net::FTP.new + if port.nil? or port == "" then + ftp.connect(ip) + else + ftp.connect(ip, port) + end + logger.info "[FTP log] Connected FTP server (#{ip}:#{port})" + ftp.login(username, passwd) + ftp.binary = true + ftp.passive = true + ftp.mkdir(uniqdir) + ftp.chdir(uniqdir) + ftp.put(bpath) + logger.info "[FTP log] Put a file" + logger.info "[FTP log] from \"#{bpath}\" to \"#{ftp_filepath}\"" + files = ftp.list(filename) + if files.empty? then + logger.error "[FTP log] Failed to upload file (#{filename} does not exist)" + return nil + end + ftp.quit + logger.info "[FTP log] Disconnected FTP server" + rescue => e + logger.error "[FTP log] Exception" + logger.error e.message + logger.error e.backtrace.inspect + return nil + end + return ftp_filepath + end + + def FileTransfer.getfile(ip, port, username, passwd, bpath, target, logger) + dirname = File.dirname(bpath) + filename = File.basename(bpath) + + # target can be directory or file + if File.directory? target then + dst_file = File.join(target,filename) + else + dst_file = target + end + + begin + ftp = Net::FTP.new + if port.nil? or port == "" then + ftp.connect(ip) + else + ftp.connect(ip, port) + end + logger.info "[FTP log] Connected FTP server (#{ip}:#{port})" + ftp.login(username, passwd) + ftp.binary = true + ftp.passive = true + ftp.chdir(dirname) + ftp.get(filename, dst_file) + logger.info "[FTP log] Get a file" + logger.info "[FTP log] from \"#{bpath}\" to \"#{dst_file}\"" + ftp.quit + logger.info "[FTP log] Disconnected FTP server" + rescue => e + logger.error "[FTP log] Exception" + logger.error e.message + logger.error e.backtrace.inspect + return nil + end + if not File.exist? dst_file then + logger.error "[FTP log] Failed to download file (#{dst_file} does not exist)" + return nil + end + return bpath + end + + def FileTransfer.cleandir(ip, port, username, passwd, path, logger) + dirname = File.dirname(path) + + begin + ftp = Net::FTP.new + if port.nil? or port == "" then + ftp.connect(ip) + else + ftp.connect(ip, port) + end + logger.info "[FTP log] Connected FTP server (#{ip}:#{port})" + ftp.login(username, passwd) + old_dir = ftp.pwd + ftp.chdir(dirname) + list = ftp.ls + # TODO: if list is directory? + list.each do |l| + file = l.split(" ")[-1].strip + ftp.delete(file) + end + ftp.chdir(old_dir) + ftp.rmdir(dirname) + logger.info "[FTP log] Clean dir (#{dirname})" + ftp.quit + logger.info "[FTP log] Disconnected FTP server" + rescue => e + logger.error "[FTP log] Exception" + logger.error e.message + logger.error e.backtrace.inspect + return nil + end + + return true + end +end diff --git a/src/common/log.rb b/src/common/log.rb index afa8fd7..e7eb190 100644 --- a/src/common/log.rb +++ b/src/common/log.rb @@ -30,7 +30,7 @@ require "logger" class Log - attr_accessor :path + attr_accessor :path, :cnt # Log LEVEL LV_NORMAL = 1 @@ -39,6 +39,7 @@ class Log # init def initialize(path, lv=LV_USER) + @cnt = 0 @path = path if @path.nil? then @logger = Logger.new(STDOUT) @@ -52,43 +53,47 @@ class Log # diable logger format @default_formatter = @logger.formatter @no_prefix_formatter = proc do |severity, datetime, progname, msg| - " >#{msg}" + " >#{msg}" end end def info(msg, lv=LV_NORMAL) - if @path.nil? then puts "Info: #{msg}" + if @path.nil? and not @second_out.nil? then puts "Info: #{msg}" else @logger.info msg end - if not @second_out.nil? and lv >= @second_out_level then - output_extra "Info: " + msg + if not @second_out.nil? and lv >= @second_out_level then + output_extra "Info: " + msg end - end + @cnt = @cnt + 1 + end - def warn(msg, lv=LV_NORMAL) - if @path.nil? then puts "Warn: #{msg}" + def warn(msg, lv=LV_NORMAL) + if @path.nil? and not @second_out.nil? then puts "Warn: #{msg}" else @logger.warn msg end if not @second_out.nil? and lv >= @second_out_level then - output_extra "Warn: " + msg + output_extra "Warn: " + msg end - end + @cnt = @cnt + 1 + end - def error(msg, lv=LV_NORMAL) - if @path.nil? then puts "Error: #{msg}" + def error(msg, lv=LV_NORMAL) + if @path.nil? and not @second_out.nil? then puts "Error: #{msg}" else @logger.error msg end if not @second_out.nil? and lv >= @second_out_level then - output_extra "Error: " + msg + output_extra "Error: " + msg end + @cnt = @cnt + 1 end - def output(msg, lv=LV_NORMAL) - if @path.nil? then puts msg + def output(msg, lv=LV_NORMAL) + if @path.nil? and not @second_out.nil? then puts msg else @logger.info msg end if not @second_out.nil? and lv >= @second_out_level then - output_extra msg + output_extra msg end + @cnt = @cnt + 1 end @@ -99,7 +104,6 @@ class Log protected def output_extra(msg) - #do nothing - end - + #do nothing + end end diff --git a/src/common/package.rb b/src/common/package.rb index 122f977..067342a 100644 --- a/src/common/package.rb +++ b/src/common/package.rb @@ -1,6 +1,6 @@ =begin - - package.rb + + package.rb Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. @@ -27,11 +27,13 @@ Contributors: =end class Package - attr_accessor :package_name, :version, :os, :build_host_os, :maintainer, :attribute, :install_dep_list, :build_dep_list, :source_dep_list, :conflicts, :source, :src_path, :path, :origin, :checksum, :size, :description + attr_accessor :package_name, :label, :version, :os, :build_host_os, :maintainer, :attribute, :install_dep_list, :build_dep_list, :source_dep_list, :conflicts, :source, :src_path, :path, :origin, :checksum, :size, :description, :os_list, :custom def initialize (package_name) @package_name = package_name + @label = "" @version = "" @os = "" + @os_list = [] @build_host_os = [] @maintainer = "" @attribute = [] @@ -46,116 +48,45 @@ class Package @checksum = "" @size = "" @description = "" - end + @custom = "" + end + def print - puts "Package : " + @package_name - if not @version.empty? then puts "Version : " + @version end - if not @os.empty? then puts "OS : " + @os end - if not @build_host_os.empty? then puts "Build-host-os : " + @build_host_os.join("|") end - if not @maintainer.empty? then puts "Maintainer : " + @maintainer end - if not @attribute.empty? then puts "Attribute : " + @attribute.join("|") end - if not @install_dep_list.empty? then - puts "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ") - end - if not @build_dep_list.empty? then - puts "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ") - end - if not @source_dep_list.empty? then - puts "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ") - end - if not @conflicts.empty? then - puts "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ") - end - if not @source.empty? then puts "Source : " + @source end - if not @src_path.empty? then puts "Src-path : " + @src_path end - if not @path.empty? then puts "Path : " + @path end - if not @origin.empty? then puts "Origin : " + @origin end - if not @checksum.empty? then puts "SHA256 : " + @checksum end - if not @size.empty? then puts "Size : " + @size end - if not @description.empty? then puts "Description : " + @description end - end + puts self.to_s + end def to_s string = "Package : " + @package_name - if not @version.empty? then string = string + "\n" + "Version : " + @version end - if not @os.empty? then string = string + "\n" + "OS : " + @os end - if not @build_host_os.empty? then string = string + "\n" + "Build-host-os : " + @build_host_os.join("|") end - if not @maintainer.empty? then string = string + "\n" + "Maintainer : " + @maintainer end - if not @attribute.empty? then string = string + "\n" + "Attribute : " + @attribute.join("|") end - if not @install_dep_list.empty? then + if not @label.empty? then string = string + "\n" + "Label : " + @label end + if not @version.empty? then string = string + "\n" + "Version : " + @version end + if not @os_list.empty? then string = string + "\n" + "OS : " + @os_list.join(", ") end + if not @build_host_os.empty? then string = string + "\n" + "Build-host-os : " + @build_host_os.join(", ") end + if not @maintainer.empty? then string = string + "\n" + "Maintainer : " + @maintainer end + if not @attribute.empty? then string = string + "\n" + "Attribute : " + @attribute.join("|") end + if not @install_dep_list.empty? then string = string + "\n" + "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ") - end - if not @build_dep_list.empty? then + end + if not @build_dep_list.empty? then string = string + "\n" + "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ") - end - if not @source_dep_list.empty? then + end + if not @source_dep_list.empty? then string = string + "\n" + "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ") - end - if not @conflicts.empty? then + end + if not @conflicts.empty? then string = string + "\n" + "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ") - end - if not @source.empty? then string = string + "\n" + "Source : " + @source end - if not @src_path.empty? then string = string + "\n" + "Src-path : " + @src_path end - if not @path.empty? then string = string + "\n" + "Path : " + @path end - if not @origin.empty? then string = string + "\n" + "Origin : " + @origin end - if not @checksum.empty? then string = string + "\n" + "SHA256 : " + @checksum end - if not @size.empty? then string = string + "\n" + "Size : " + @size end - if not @description.empty? then string = string + "\n" + "Description : " + @description end - return string - end - def print_to_file(file) - file.puts "Package : " + @package_name - if not @version.empty? then file.puts "Version : " + @version end - if not @os.empty? then file.puts "OS : " + @os end - if not @build_host_os.empty? then file.puts "Build-host-os : " + @build_host_os.join("|") end - if not @maintainer.empty? then file.puts "Maintainer : " + @maintainer end - if not @attribute.empty? then file.puts "Attribute : " + @attribute.join("|") end - if not @install_dep_list.empty? then - file.puts "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ") - end - if not @build_dep_list.empty? then - file.puts "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ") - end - if not @source_dep_list.empty? then - file.puts "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ") - end - if not @conflicts.empty? then - file.puts "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ") - end - if not @source.empty? then file.puts "Source : " + @source end - if not @src_path.empty? then file.puts "Src-path : " + @src_path end - if not @path.empty? then file.puts "Path : " + @path end - if not @origin.empty? then file.puts "Origin : " + @origin end - if not @checksum.empty? then file.puts "SHA256 : " + @checksum end - if not @size.empty? then file.puts "Size : " + @size end - if not @description.empty? then file.puts "Description : " + @description end + end + if not @source.empty? then string = string + "\n" + "Source : " + @source end + if not @src_path.empty? then string = string + "\n" + "Src-path : " + @src_path end + if not @path.empty? then string = string + "\n" + "Path : " + @path end + if not @origin.empty? then string = string + "\n" + "Origin : " + @origin end + if not @checksum.empty? then string = string + "\n" + "SHA256 : " + @checksum end + if not @size.empty? then string = string + "\n" + "Size : " + @size end + if not @custom.empty? then string = string + "\n" + @custom end + if not @description.empty? then string = string + "\n" + "Description : " + @description end + return string end - def print_to_file_with_os(file,target_os) - file.puts "Package : " + @package_name - if not @version.empty? then file.puts "Version : " + @version end - file.puts "OS : " + target_os - if not @build_host_os.empty? then file.puts "Build-host-os : " + @build_host_os.join("|") end - if not @maintainer.empty? then file.puts "Maintainer : " + @maintainer end - if not @attribute.empty? then file.puts "Attribute : " + @attribute.join("|") end - if not @install_dep_list.empty? then - file.puts "Install-dependency : " + @install_dep_list.map {|x| x.to_s}.join(", ") - end - if not @build_dep_list.empty? then - file.puts "Build-dependency : " + @build_dep_list.map {|x| x.to_s}.join(", ") - end - if not @source_dep_list.empty? then - file.puts "Source-dependency : " + @source_dep_list.map {|x| x.to_s}.join(", ") - end - if not @conflicts.empty? then - file.puts "Conflicts : " + @conflicts.map {|x| x.to_s}.join(", ") - end - if not @source.empty? then file.puts "Source : " + @source end - if not @src_path.empty? then file.puts "Src-path : " + @src_path end - if not @path.empty? then file.puts "Path : " + @path end - if not @origin.empty? then file.puts "Origin : " + @origin end - if not @checksum.empty? then file.puts "SHA256 : " + @checksum end - if not @size.empty? then file.puts "Size : " + @size end - if not @description.empty? then file.puts "Description : " + @description end + def print_to_file(file) + file.puts self.to_s end -end +end diff --git a/src/common/parser.rb b/src/common/parser.rb index 01f7acf..8bb863d 100644 --- a/src/common/parser.rb +++ b/src/common/parser.rb @@ -1,6 +1,6 @@ =begin - - parser.rb + + parser.rb Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. @@ -31,172 +31,304 @@ require "package" require "dependency" class Parser - def Parser.read_pkginfo_list (file) - pkglist = {} + def Parser.read_multy_pkginfo_from (file, only_common = false) + pkglist = [] + package = nil + + #file check + File.open file,"r" do |f| #variable initialize - package_name = "" - version = "" - os = "" - build_host_os = [] - maintainer = "" - attribute = [] - install_dep_list = [] - build_dep_list = [] - source_dep_list = [] - conflicts = [] - source = "" - src_path = "" - path = "" - origin = "" - checksum = "" - size = "" - description = "" - + state = "INIT" + common_source = "" + common_version = "" + common_maintainer = "" f.each_line do |l| # separator - if l.strip.empty? then - #make package and initialize - if not package_name.empty? and not os.empty? then - package = Package.new(package_name) - if not version.empty? then package.version = version end - if not os.empty? then package.os = os end - if not build_host_os.empty? then package.build_host_os = build_host_os end - if not maintainer.empty? then package.maintainer = maintainer end - if not attribute.empty? then package.attribute = attribute end - if not install_dep_list.empty? then package.install_dep_list = install_dep_list end - if not build_dep_list.empty? then package.build_dep_list = build_dep_list end - if not source_dep_list.empty? then package.source_dep_list = source_dep_list end - if not conflicts.empty? then package.conflicts = conflicts end - if not source.empty? then package.source = source end - if not src_path.empty? then package.src_path = src_path end - if not path.empty? then package.path = path end - if not origin.empty? then package.origin = origin end - if not checksum.empty? then package.checksum = checksum end - if not size.empty? then package.size = size end - if not description.empty? then package.description = description end - pkglist[[package_name,os]] = package - package_name = "" - version = "" - os = "" - bulid_host_os = [] - maintainer = "" - attribute = [] - install_dep_list = [] - build_dep_list = [] - source_dep_list = [] - conflicts = [] - source = "" - src_path = "" - path = "" - origin = "" - checksum = "" - size = "" - description = "" - end - next - end - # commant - if l.strip.start_with? "#" then next end - #contents - dsic_on = false - case l.strip.split(':')[0].strip - when /^Package/i then - package_name = l.sub(/^[ \t]*Package[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Version/i then - version = l.sub(/^[ \t]*Version[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^OS/i then - os = l.sub(/^[ \t]*OS[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Build-host-os/i then - build_host_os = l.sub(/^[ \t]*Build-host-os[ \t]*:[ \t]*/i,"").tr(" \t\n\r", "").split("|") - disc_on=false - when /^Maintainer/i then - maintainer = l.sub(/^[ \t]*Maintainer[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Attribute/i then - attribute = l.sub(/^[ \t]*Attribute[ \t]*:[ \t]*/i,"").tr(" \t\n\r","").split("|") - disc_on=false - when /^Install-dependency/i then - install_dep_list = dep_parser l.sub(/^[ \t]*Install-dependency[ \t]*:[ \t]*/i,"").split(',') - disc_on=false - when /^Build-dependency/i then - build_dep_list = dep_parser l.sub(/^[ \t]*Build-dependency[ \t]*:[ \t]*/i,"").split(',') - disc_on=false - when /^Source-dependency/i then - source_dep_list = dep_parser l.sub(/^[ \t]*Source-dependency[ \t]*:[ \t]*/i,"").split(',') - disc_on=false - when /^Conflicts/i then - conflicts = dep_parser l.sub(/^[ \t]*Conflicts[ \t]*:[ \t]*/i,"").split(',') - disc_on=false - when /^Source/i then - source = l.sub(/^[ \t]*Source[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Src-path/i then - src_path = l.sub(/^[ \t]*Src-path[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Path/i then - path = l.sub(/^[ \t]*Path[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Origin/i then - origin = l.sub(/^[ \t]*Origin[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^SHA256/i then - checksum = l.sub(/^[ \t]*SHA256[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Size/i then - size = l.sub(/^[ \t]*Size[ \t]*:[ \t]*/i,"").strip - disc_on=false - when /^Description/i then - description = l.sub(/^[ \t]*Description[ \t]*:[ \t]*/i,"") - disc_on=true - else - if disc_on then - description = description + l - else - puts "unknown section : #{l}" - end - end + if l.strip.empty? then + #make package and initialize + if state == "PACKAGE" then + if not package.package_name.empty? then + pkglist.push package + else + raise RuntimeError, "#{file} format is not valid" + end + end + state = "INIT" + package = nil + next + end + # commant + if l.strip.start_with? "#" then next end + #contents + dsic_on = false + case l.strip.split(':')[0].strip + when /^Package/i then + if only_common then return [common_source, common_version, common_maintainer] end + # state control + case state + when "INIT" then state = "PACKAGE" + when "COMMON" then state = "PACKAGE" + when "PACKAGE" then + if not package.package_name.empty? then + pkglist.push package + else + raise RuntimeError, "Package name is not set in \"#{file}\" file" + end + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end - end - #i essent - - # check last package - if not package_name.empty? and not os.empty? then - package = Package.new(package_name) - if not version.empty? then package.version = version end - if not os.empty? then package.os = os end - if not build_host_os.empty? then package.build_host_os = build_host_os end - if not maintainer.empty? then package.maintainer = maintainer end - if not attribute.empty? then package.attribute = attribute end - if not install_dep_list.empty? then package.install_dep_list = install_dep_list end - if not build_dep_list.empty? then package.build_dep_list = build_dep_list end - if not source_dep_list.empty? then package.source_dep_list = source_dep_list end - if not conflicts.empty? then package.conflicts = conflicts end - if not source.empty? then package.source = source end - if not src_path.empty? then package.src_path = src_path end - if not path.empty? then package.path = path end - if not origin.empty? then package.origin = origin end - if not checksum.empty? then package.checksum = checksum end - if not size.empty? then package.size = size end - if not description.empty? then package.description = description end - pkglist[[package_name,os]] = package - end - end - return pkglist - end + package_name = l.sub(/^[ \t]*Package[ \t]*:[ \t]*/i,"").strip + if not package_name.empty? then + package = Package.new(package_name) + package.source = common_source + package.version = common_version + package.maintainer = common_maintainer + else + raise RuntimeError, "Package name is not set in \"#{file}\" file" + end + disc_on=false + when /^Label/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Label field in Common section in \"#{file}\" file" + when "PACKAGE" then package.label = l.sub(/^[ \t]*Label[ \t]*:[ \t]*/i,"").strip + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Version/i then + case state + when "INIT" , "COMMON" then + if common_version.empty? then + common_version = l.sub(/^[ \t]*Version[ \t]*:[ \t]*/i,"").strip + else + raise RuntimeError, "Version information is conflict in \"#{file}\" file\nIf use Version field in Common section then Package section can't contain Version field" + end + when "PACKAGE" then + if common_version.empty? then + package.version = l.sub(/^[ \t]*Version[ \t]*:[ \t]*/i,"").strip + else + raise RuntimeError, "Version information is conflict in \"#{file}\" file\nIf use Version field in Common section then Package section can't contain Version field" + end + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^OS/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support OS field in Common section in \"#{file}\" file" + when "PACKAGE" then + package.os_list = l.sub(/^[ \t]*OS[ \t]*:[ \t]*/i,"").tr(" \t\n\r", "").split(",") + package.os = package.os_list[0] + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Build-host-os/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Build-host-os field in Common section in \"#{file}\" file" + when "PACKAGE" then package.build_host_os = l.sub(/^[ \t]*Build-host-os[ \t]*:[ \t]*/i,"").tr(" \t\n\r", "").split(",") + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Maintainer/i then + case state + when "INIT" , "COMMON" then + if common_maintainer.empty? then + common_maintainer = l.sub(/^[ \t]*Maintainer[ \t]*:[ \t]*/i,"").strip + else + raise RuntimeError, "Maintainer information is conflict in \"#{file}\" file\nIf use Maintainer field in Common section then Package section can't contain Maintainer field" + end + when "PACKAGE" then + if common_maintainer.empty? then + package.maintainer = l.sub(/^[ \t]*Maintainer[ \t]*:[ \t]*/i,"").strip + else + raise RuntimeError, "Maintainer information is conflict in \"#{file}\" file\nIf use Maintainer field in Common section then Package section can't contain Maintainer field" + end + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Attribute/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Attribute field in Common section in \"#{file}\" file" + when "PACKAGE" then package.attribute = l.sub(/^[ \t]*Attribute[ \t]*:[ \t]*/i,"").tr(" \t\n\r","").split("|") + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Install-dependency/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Install-dependency field in Common section in \"#{file}\" file" + when "PACKAGE" then package.install_dep_list = dep_parser l.sub(/^[ \t]*Install-dependency[ \t]*:[ \t]*/i,"").split(',') + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Build-dependency/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Build-dependency field in Common section in \"#{file}\" file" + when "PACKAGE" then package.build_dep_list = dep_parser l.sub(/^[ \t]*Build-dependency[ \t]*:[ \t]*/i,"").split(',') + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Source-dependency/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Source-dependency field in Common section in \"#{file}\" file" + when "PACKAGE" then package.source_dep_list = dep_parser l.sub(/^[ \t]*Source-dependency[ \t]*:[ \t]*/i,"").split(',') + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Conflicts/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Conflicts field in Common section in \"#{file}\" file" + when "PACKAGE" then package.conflicts = dep_parser l.sub(/^[ \t]*Conflicts[ \t]*:[ \t]*/i,"").split(',') + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Source/i then + case state + when "INIT" , "COMMON" then + state = "COMMON" + if common_source.empty? then + common_source = l.sub(/^[ \t]*Source[ \t]*:[ \t]*/i,"").strip + else + raise RuntimeError, "Source information is conflict in \"#{file}\" file\nIf use Source field in Common section then Package section can't contain Source field" + end + when "PACKAGE" then + if common_source.empty? then + package.source = l.sub(/^[ \t]*Source[ \t]*:[ \t]*/i,"").strip + else + raise RuntimeError, "Source information is conflict in \"#{file}\" file\nIf use Source field in Common section then Package section can't contain Source field" + end + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Src-path/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Src-path field in Common section in \"#{file}\" file" + when "PACKAGE" then + package.src_path = l.sub(/^[ \t]*Src-path[ \t]*:[ \t]*/i,"").strip + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^ORIGIN/ then + #for compatable + next + when /^Include/i then + case state + when "INIT", "COMMON" then + pfile = File.dirname(file) + "/" + l.sub(/^[ \t]*Include[ \t]*:[ \t]*/i,"").strip + if File.exist? pfile then + pkglist = Parser.read_multy_pkginfo_from pfile + list = Parser.read_multy_pkginfo_from(pfile, true) + common_source = list[0] + common_version = list[1] + common_maintainer = list[2] + else + raise RuntimeError, "Not exist \"#{pfile}\"" + end + when "PACKAGE" then raise RuntimeError, "Not support Include field in Common section in \"#{file}\" file" + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Path/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Path field in Common section in \"#{file}\" file" + when "PACKAGE" then package.path = l.sub(/^[ \t]*Path[ \t]*:[ \t]*/i,"").strip + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Origin/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Origin field in Common section in \"#{file}\" file" + when "PACKAGE" then package.origin = l.sub(/^[ \t]*Origin[ \t]*:[ \t]*/i,"").strip + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^SHA256/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support SHA256 field in Common section in \"#{file}\" file" + when "PACKAGE" then package.checksum = l.sub(/^[ \t]*SHA256[ \t]*:[ \t]*/i,"").strip + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Size/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Size field in Common section in \"#{file}\" file" + when "PACKAGE" then package.size = l.sub(/^[ \t]*Size[ \t]*:[ \t]*/i,"").strip + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + when /^Description/i then + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Description field in Common section in \"#{file}\" file" + when "PACKAGE" then package.description = l.sub(/^[ \t]*Description[ \t]*:[ \t]*/i,"") + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=true + when /^C-/ then + #custom field + case state + when "INIT" then raise RuntimeError, "\"Package :\" string must be infront of Package section in \"#{file}\" file" + when "COMMON" then raise RuntimeError, "Not support Description field in Common section in \"#{file}\" file" + when "PACKAGE" then + if package.custom.empty? then + package.custom = l.strip + else + package.custom = package.custom + "\n" + l.strip + end + else raise RuntimeError, "UNKNOWN parser state : #{state}" + end + disc_on=false + else + if disc_on and state == "PACKAGE" then + package.description = package.description + l + else + raise RuntimeError, "Can't parse below line in \"#{file}\" file \n\t#{l}" + end + end + end - def Parser.read_pkginfo (file) - return read_pkg_list(file).values[0] - end + if only_common then return [common_source, common_version, common_maintainer] end + + # check last package + if state == "PACKAGE" then + if not package.package_name.empty? then + pkglist.push package + else + raise RuntimeError, "Package name is not set in \"#{file}\" file" + end + end + end + return pkglist + end + + def Parser.read_single_pkginfo_from (file) + return read_multy_pkginfo_from(file)[0] + end - def Parser.read_pkg_list (file) + def Parser.read_repo_pkg_list_from (file) result = {} - read_pkginfo_list(file).values.each { |x| result[x.package_name]=x } + read_multy_pkginfo_from(file).each { |x| result[x.package_name]=x } return result - end + end + + #for test + def Parser.print (array) + array.each do |package| + puts package.to_s + puts "" + end + end private def Parser.dep_parser (string_list) @@ -208,7 +340,7 @@ class Parser base_version = nil target_os_list = [] #string trim - dependency = dep.tr " \t\n", "" + dependency = dep.tr " \t\r\n", "" #version extract vs = dependency.index('(') ve = dependency.index(')') diff --git a/src/common/utils.rb b/src/common/utils.rb index 24b12ff..ea2bf95 100644 --- a/src/common/utils.rb +++ b/src/common/utils.rb @@ -27,32 +27,61 @@ Contributors: =end class Utils + STARTUP_INFO_SIZE = 68 + PROCESS_INFO_SIZE = 16 + NORMAL_PRIORITY_CLASS = 0x00000020 + + def Utils.identify_current_OS() + os = "UnsupportedOS" - if defined?(HOST_OS).nil? then case `uname -s`.strip when "Linux" - HOST_OS = "linux" - when /MINGW32.*/ - HOST_OS = "windows" + if File.exist? "/etc/debian_version" then + arch = (`uname -i`.strip == "x86_64") ? "64" : "32" + os = "ubuntu-#{arch}" + elsif File.exist? "/etc/redhat-release" then + os = "redhat-unknown" + elsif File.exist? "/etc/SuSE-release" then + arch = (`uname -i`.strip == "x86_64") ? "64" : "32" + os = "opensuse-#{arch}" + elsif File.exist? "/etc/mandrake-release" then + os = "mandrake-unknown" + end + when "MINGW32_NT-5.1" + progfile_path = Utils.execute_shell_return("echo $PROGRAMFILES","windows")[0].strip + if progfile_path.include?("(x86)") then arch = "64" else arch = "32" end + os = "windows-#{arch}" + when "MINGW32_NT-6.1" + progfile_path = Utils.execute_shell_return("echo $PROGRAMFILES","windows")[0].strip + if progfile_path.include?("(x86)") then arch = "64" else arch = "32" end + os = "windows-#{arch}" when "Darwin" - HOST_OS = "darwin" - else - end + os = "macos-64" + end + + return os end - # set static variable in WORKING_DIR, HOME - if defined?(WORKING_DIR).nil? then WORKING_DIR = Dir.pwd end - if defined?(HOME).nil? then - # get home directory, using Dir.chdir - Dir.chdir - HOME = Dir.pwd - Dir.chdir WORKING_DIR + + def Utils.check_host_OS() + if Utils.get_all_OSs().include? HOST_OS then + return true + else + return false + end + end + + + def Utils.get_all_OSs() + return ["ubuntu-32","ubuntu-64","windows-32","windows-64","macos-64","opensuse-32", "opensuse-64"] end + def Utils.create_uniq_name time = Time.new + # uniq snapshot_name name is year_month_day_hour_min_sec_microsec - return time.strftime("%m%d%H%M%S") + time.usec.to_s() + return time.strftime("%m%d%H%M%S") + time.usec.to_s.rjust(6, '0') end def Utils.is_url_remote(url) @@ -109,26 +138,44 @@ class Utils return 0 end + def Utils.execute_shell_generate(cmd, os_category = nil) + result_lines = [] + + if os_category.nil? then os_category = get_os_category( HOST_OS ) end + + if os_category == "windows" then + mingw_path = "sh.exe -c " + cmd = cmd.gsub("\"", "\\\"") + cmd = mingw_path + "\"#{cmd}\"" + end - def Utils.execute_shell(cmd) + return cmd + end + + + def Utils.execute_shell(cmd, os_category = nil) ret = false - if HOST_OS.eql? "windows" then + if os_category.nil? then os_category = get_os_category( HOST_OS ) end + + if os_category == "windows" then mingw_path = "sh.exe -c " cmd = cmd.gsub("\"", "\\\"") cmd = mingw_path + "\"#{cmd}\"" end - system "#{cmd}" + `#{cmd}` if $?.to_i == 0 then ret = true else ret = false end return ret end - def Utils.execute_shell_return(cmd) + def Utils.execute_shell_return(cmd, os_category = nil) result_lines = [] - if HOST_OS.eql? "windows" then + if os_category.nil? then os_category = get_os_category( HOST_OS ) end + + if os_category == "windows" then mingw_path = "sh.exe -c " cmd = cmd.gsub("\"", "\\\"") cmd = mingw_path + "\"#{cmd}\"" @@ -148,8 +195,10 @@ class Utils end end - def Utils.execute_shell_return_ret(cmd) - if HOST_OS.eql? "windows" then + def Utils.execute_shell_return_ret(cmd, os_category = nil) + if os_category.nil? then os_category = get_os_category( HOST_OS ) end + + if os_category == "windows" then mingw_path = "sh.exe -c " cmd = cmd.gsub("\"", "\\\"") cmd = mingw_path + "\"#{cmd}\"" @@ -158,9 +207,11 @@ class Utils return `#{cmd}` end - def Utils.execute_shell_with_log(cmd, log) + def Utils.execute_shell_with_log(cmd, log, os_category = nil) - if HOST_OS.eql? "windows" then + if os_category.nil? then os_category = get_os_category( HOST_OS ) end + + if os_category == "windows" then mingw_path = "sh.exe -c " cmd = cmd.gsub("\"", "\\\"") cmd = mingw_path + "\"#{cmd}\"" @@ -180,16 +231,79 @@ class Utils end end + def Utils.spawn(cmd, os_category = nil) + + if os_category.nil? then os_category = get_os_category( HOST_OS ) end + + if os_category == "windows" then + create_process(cmd) + else + fork do + exec(cmd) + end + end + + end + + def Utils.create_process(command,redirStdout="", redirStderr="") + + if redirStdout.length > 0 + tmpfile = File.new(redirStdout,"w") + save_stdout = $stdout.clone + $stdout.reopen(tmpfile) + end + + if redirStderr.length > 0 + tmpfile = File.new(redirStderr,"w") + save_stderr = $stderr.clone + $stderr.reopen(tmpfile) + end + + params = [ + 'L', # IN LPCSTR lpApplicationName + 'P', # IN LPSTR lpCommandLine + 'L', # IN LPSECURITY_ATTRIBUTES lpProcessAttributes + 'L', # IN LPSECURITY_ATTRIBUTES lpThreadAttributes + 'L', # IN BOOL bInheritHandles + 'L', # IN DWORD dwCreationFlags + 'L', # IN LPVOID lpEnvironment + 'L', # IN LPCSTR lpCurrentDirectory + 'P', # IN LPSTARTUPINFOA lpStartupInfo + 'P' # OUT LPPROCESS_INFORMATION lpProcessInformation + ] + returnValue = 'I' # BOOL + + startupInfo = [STARTUP_INFO_SIZE].pack('I') + ([0].pack('I') * (STARTUP_INFO_SIZE - 4)) + processInfo = [0].pack('I') * PROCESS_INFO_SIZE + + createProcess = Win32API.new("kernel32", "CreateProcess", params, returnValue) + + createProcess.call(0, command, 0, 0, 0, NORMAL_PRIORITY_CLASS, 0, 0, startupInfo, processInfo) + + if redirStdout.length > 0 + $stdout.reopen(save_stdout) + end + + save_stdout.close if save_stdout + + if redirStderr.length > 0 + $stderr.reopen(save_stderr) + end + + save_stderr.close if save_stderr + + ($0 == __FILE__ ) ? processInfo : processInfo.unpack("LLLL")[2] + end def Utils.is_absolute_path(path) - if HOST_OS.eql? "linux" or HOST_OS.eql? "darwin" then + if is_unix_like_os( HOST_OS ) then # if path start "/" then absoulte path if path.start_with?("/") then return true else return false end - elsif HOST_OS.eql? "windows" then + elsif is_windows_like_os( HOST_OS ) then # if path start "c:/" or "D:/" or ... then absoulte path if path =~ /^[a-zA-Z]:[\/]/ then return true @@ -204,9 +318,9 @@ class Utils # this will be used on MinGW/MSYS def Utils.get_unix_path(path) - if HOST_OS.eql? "linux" or HOST_OS.eql? "darwin" then + if is_unix_like_os( HOST_OS ) then return path - elsif HOST_OS.eql? "windows" then + elsif is_windows_like_os( HOST_OS ) then new_path = path if is_absolute_path( new_path ) then new_path = "/" + new_path[0,1] + new_path[2..-1] @@ -217,4 +331,234 @@ class Utils return path end end + + def Utils.file_lock(lock_file_name) + lock_file = File.new(lock_file_name, File::RDWR|File::CREAT, 0644) + lock_file.flock(File::LOCK_EX) + lock_file.rewind + lock_file.flush + lock_file.truncate(lock_file.pos) + + return lock_file + end + + def Utils.file_unlock(lock_file) + lock_file.close + end + + def Utils.parse_server_addr(saddr) + addr = saddr.split(":") + return nil unless addr.length == 2 + return addr + end + + def Utils.parse_ftpserver_url(surl) + return nil unless surl.start_with? "ftp://" + + surl = surl[6..-1] + parse1 = surl.split("@") + return nil unless parse1.length == 2 + + idpw = parse1[0] + url = parse1[1] + parse1 = idpw.split(":") + return nil unless parse1.length == 2 + + id = parse1[0] + passwd = parse1[1] + if url.end_with? "/" then url = url.chop end + + parse1 = url.split(":") + if parse1.length == 2 then + ip = parse1[0] + port = parse1[1] + elsif parse1.length == 1 then + ip = parse1[0] + port = 21 + else + return nil + end + + return [ip, port, id, passwd] + end + + + def Utils.generate_ftp_url(addr, port, username, passwd) + return "ftp://#{username}:#{passwd}@#{addr}:#{port}" + end + + + def Utils.extract_a_file(file_path, target_file, path) + dirname = File.dirname(file_path) + filename = File.basename(file_path) + ext = File.extname(filename) + + # path should be unix path if it is used in tar command + _package_file_path = Utils.get_unix_path(file_path) + _path = Utils.get_unix_path(path) + + case ext + when ".zip" then + if not path.nil? then + extract_file_command = "unzip -xo #{_package_file_path} #{target_file} -d #{_path}" + else + extract_file_command = "unzip -xo #{_package_file_path} #{target_file}" + end + when ".tar" then + if not path.nil? then + extract_file_command = "tar xf #{_package_file_path} -C #{_path} #{target_file}" + else + extract_file_command = "tar xf #{_package_file_path} #{target_file}" + end + end + + # check exit code + ret = execute_shell "#{extract_file_command}" + if not ret then return false end + + # check result file + if not path.nil? then + target_file_path = File.join(path, target_file) + else + target_file_path = target_file + end + + if not File.exist? target_file_path then + return false + else + return true + end + end + + + # check if the os is windows-like + def Utils.is_windows_like_os(os_name) + if os_name.start_with? "windows-" then + return true + else + return false + end + end + + + # check if the os is unix-like + def Utils.is_unix_like_os(os_name) + if os_name.start_with? "ubuntu-" or + os_name.start_with? "opensuse-" or + os_name.start_with?"macos-" then + return true + else + return false + end + end + + + # check if the os is linux-like + def Utils.is_linux_like_os(os_name) + if os_name.start_with? "ubuntu-" or + os_name.start_with? "opensuse-" then + return true + else + return false + end + end + + + # check if the os is macos-like + def Utils.is_macos_like_os(os_name) + if os_name.start_with?"macos-" then + return true + else + return false + end + end + + + def Utils.get_os_category(os_name) + if os_name.start_with? "ubuntu-" or os_name.start_with? "opensuse-" then + return "linux" + elsif os_name.start_with?"macos-" then + return "macos" + elsif os_name.start_with? "windows-" then + return "windows" + else + return os_name + end + end + + + def Utils.get_package_name_from_package_file( local_path ) + filename = File.basename(local_path) + if filename =~ /.*_.*_.*\.zip/ then + new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3') + return new_name.split(",")[0] + end + return nil + end + + + def Utils.get_version_from_package_file( local_path ) + filename = File.basename(local_path) + if filename =~ /.*_.*_.*\.zip/ then + new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3') + return new_name.split(",")[1] + end + return nil + end + + + def Utils.get_os_from_package_file( local_path ) + filename = File.basename(local_path) + if filename =~ /.*_.*_.*\.zip/ then + new_name = filename.sub(/(.*)_(.*)_(.*)\.zip/,'\1,\2,\3') + return new_name.split(",")[2] + end + return nil + end + + def Utils.multi_argument_test( arg, seperator ) + return ( not arg.end_with? seperator and not arg.split( seperator ).select{|x| x.empty?}.length > 0 ) + end + + def Utils.directory_emtpy?(dir_path) + return (Dir.entries(dir_path).join == "...") + end + + + def Utils.checksum(file_path) + if File.exist? file_path then + return `sha256sum #{file_path}`.split(" ")[0] + else + return nil + end + end + + def Utils.get_version() + version_file = "#{File.dirname(__FILE__)}/../../VERSION" + + if not File.exist? version_file then + return nil + end + + f = File.open( version_file, "r" ) + version = f.readline + f.close + + return version + end + + if defined?(HOST_OS).nil? then + HOST_OS = Utils.identify_current_OS() + end + + # set static variable in WORKING_DIR, HOME + if defined?(WORKING_DIR).nil? then WORKING_DIR = Dir.pwd end + if defined?(HOME).nil? then + # get home directory, using Dir.chdir + Dir.chdir + HOME = Dir.pwd + Dir.chdir WORKING_DIR + end + + end diff --git a/src/pkg_server/DistSync.rb b/src/pkg_server/DistSync.rb new file mode 100644 index 0000000..5fb6df9 --- /dev/null +++ b/src/pkg_server/DistSync.rb @@ -0,0 +1,98 @@ +=begin + + DistSync.rb + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require "fileutils" +require "thread" +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +require "packageServer.rb" +require "Action.rb" +require "ScheduledActionHandler.rb" + +class DistSyncAction < Action + + def initialize(time, pkgserver, dist_name ) + super(time, pkgserver.sync_interval) + + @pkgserver = pkgserver + @dist_name = dist_name + end + + + def init + end + + + def execute + # Start to sync job + @pkgserver.log.info "Executing sync action for the #{@dist_name}" + begin + execute_internal() + rescue => e + @pkgserver.log.error e.message + @pkgserver.log.error e.backtrace.inspect + end + end + + + private + def execute_internal() + # update pkg info + @pkgserver.reload_dist_package + + # sync + @pkgserver.sync( @dist_name, false ) + end +end + + +class DistSync + attr_accessor :quit + + # init + def initialize( server ) + @server = server + @handler = ScheduledActionHandler.new + end + + # start thread + def start() + # scan all sync distribution + @server.distribution_list.each do |dist| + # if dist does not have parent server then skip sync + if dist.server_url.empty? then next end + + time = Time.now + @server.log.info "Registered sync-action for dist : #{dist.name}" + @handler.register(DistSyncAction.new(time, @server, dist.name)) + end + + # start handler + @handler.start + end +end diff --git a/src/pkg_server/SocketRegisterListener.rb b/src/pkg_server/SocketRegisterListener.rb new file mode 100644 index 0000000..2ad78e3 --- /dev/null +++ b/src/pkg_server/SocketRegisterListener.rb @@ -0,0 +1,213 @@ +require 'socket' +require 'thread' +$LOAD_PATH.unshift File.dirname(__FILE__) +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/build_server" +require "packageServerConfig" +require "fileTransfer" +require "BuildComm" +require "net/ftp" + +# mutax for register operation +$register_mutex = Mutex.new + +class SocketRegisterListener + + # initialize + def initialize (parent) + @parent_server = parent + @thread = nil + @finish_loop = false + @log = @parent_server.log + end + + # start listening + def start() + @log.info "SocketRegisterListener start" + @thread = Thread.new { + main() + } + end + + # quit listening + def stop_listening() + @finish_loop = true + end + + private + + # thread main + def main() + @log.info "SocketRegisterListener entering main loop" + # server open + begin + @comm_server = BuildCommServer.create(@parent_server.port, @log) + rescue => e + @log.info "Server creation failed" + @log.error e.message + @log.error e.backtrace.inspect + return + end + + # loop + @log.info "Entering Control Listening Loop ... " + @finish_loop = false + @comm_server.wait_for_connection(@finish_loop) do |req| + begin + handle_job_request( req ) + rescue => e + @log.info "error occured in handle_job_request function" + @log.error e.message + @log.error e.backtrace.inspect + end + end + + # quit + @comm_server.terminate + end + + # wait for job requests + def wait_for_job_requests + req_list = [] + req_list.push @tcp_server.accept + + return req_list + end + + # handle job request + def handle_job_request( req ) + + # read request + req_line = req.gets + if req_line.nil? then return end + + # parse request + cmd = "" + if req_line.split("|").count > 0 then + cmd = req_line.split("|")[0].strip + end + + case cmd + when "UPLOAD" + Thread.new { + handle_cmd_upload( req_line, req ) + } + when "REGISTER" + Thread.new { + handle_cmd_register( req_line, req ) + } + when "STOP" + handle_cmd_stop( req_line, req ) + else + @log.error "Received Unknown REQ: #{req_line}" + end + @log.info "REQ processing done" + end + + # "UPLOAD" + def handle_cmd_upload( line, req ) + @log.info "Received File transfer REQ : #{line}" + + BuildCommServer.send_begin(req) + + tok = line.split("|").map { |x| x.strip } + if tok.count > 1 then + dock_name = tok[1].strip + incoming_dir = "#{@parent_server.incoming_path}/#{dock_name}" + FileUtils.mkdir_p(incoming_dir) + else + incoming_dir = "#{@parent_server.incoming_path}" + end + + file_path_list = [] + begin + @comm_server.receive_file(req, incoming_dir) + rescue => e + @log.error "Failed to transfer file" + @log.error e.message + @log.error e.backtrace.inspect + end + BuildCommServer.send_end(req) + end + + # "Register" + def handle_cmd_register( line, req ) + @log.info "Received register REQ : #{line}" + BuildCommServer.send_begin(req) + + tok = line.split("|").map { |x| x.strip } + if tok.count < 3 then + @log.error "Received Wrong REQ : #{line}" + BuildCommServer.send(req, "ERROR|Invalid REQ format") + return + end + dist_name = tok[1].strip + + if tok[2].start_with? "DOCK" then + dock_name = tok[3] + idx = 4 + else + dock_name = "" + idx = 2 + end + + file_path_list = [] + + while idx < tok.length do + if dock_name.empty? then + file_path_list.push "#{@parent_server.incoming_path}/#{tok[idx]}" + else + file_path_list.push "#{@parent_server.incoming_path}/#{dock_name}/#{tok[idx]}" + end + idx = idx + 1 + end + # register mutex + $register_mutex.synchronize { + begin + @parent_server.reload_dist_package() + snapshot_name = @parent_server.register( file_path_list, dist_name, true, false, true) + BuildCommServer.send(req,"SUCC|#{snapshot_name}") + rescue => e + @log.error "register failed" + @log.error e.message + @log.error e.backtrace.inspect + BuildCommServer.send(req, "ERROR|#{e.message}") + @parent_server.release_lock_file + return + end + } + + if not dock_name.empty? then + FileUtils.rm_rf "#{@parent_server.incoming_path}/#{dock_name}" + end + + BuildCommServer.send_end(req) + end + + # "STOP" + def handle_cmd_stop( line, req ) + @log.info "Received STOP REQ" + + BuildCommServer.send_begin(req) + + tok = line.split("|").map { |x| x.strip } + if tok.count < 2 then + @log.error "Received Wrong REQ : #{line}" + BuildCommServer.send(req, "ERROR|Invalid REQ format") + return + end + passwd = tok[1].strip + + if @parent_server.passwd.eql? passwd then + @parent_server.finish = true + @log.info "Package server stop flag set" + BuildCommServer.send(req,"SUCC") + else + @log.info "Received stop command, but passwd mismatched : #{passwd}" + BuildCommServer.send(req,"ERROR|Password mismatched!") + end + BuildCommServer.send_end(req) + BuildCommServer.disconnect(req) + end +end + diff --git a/src/pkg_server/client.rb b/src/pkg_server/client.rb index 5cf8423..a7e2034 100644 --- a/src/pkg_server/client.rb +++ b/src/pkg_server/client.rb @@ -1,6 +1,5 @@ - =begin - + client.rb Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. @@ -28,29 +27,36 @@ Contributors: =end require "fileutils" +require "thread" $LOAD_PATH.unshift File.dirname(__FILE__) $LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/build_server" require "downloader" require "installer" +require "fileTransfer" require "packageServerConfig" require "package" require "parser" require "utils" require "log" require "Version" - +require "net/ftp" +$update_mutex = Mutex.new +$get_snapshot_mutex = Mutex.new +$filemove_mutex = Mutex.new class Client # constant - SUPPORTED_OS = ["linux", "windows", "darwin"] PKG_LIST_FILE_PREFIX = "pkg_list_" INSTALLED_PKG_LIST_FILE = "installedpackage.list" CONFIG_PATH = "#{PackageServerConfig::CONFIG_ROOT}/client" PACKAGE_INFO_DIR = ".info" DEFAULT_INSTALL_DIR = "#{Utils::HOME}/build_root" DEFAULT_SERVER_ADDR = "http://172.21.17.55/dibs/unstable" + OS_INFO_FILE = "os_info" + ARCHIVE_PKG_LIST_FILE = "archive_pkg_list" - attr_accessor :server_addr, :location, :pkg_hash_os, :is_server_remote, :installed_pkg_hash_loc, :archive_pkg_list, :all_dep_list, :log + attr_accessor :server_addr, :location, :pkg_hash_os, :is_server_remote, :installed_pkg_hash_loc, :archive_pkg_list, :all_dep_list, :log, :support_os_list, :config_dist_path, :download_path, :tmp_path, :snapshot_path, :snapshots_path, :snapshot_url public # initialize @@ -64,348 +70,354 @@ class Client # set default server address, location if server_addr.nil? then server_addr = get_default_server_addr() end - if location.nil? then location = get_default_inst_dir() end + if location.nil? then location = get_default_inst_dir() end # chop server address, if end with "/" - if server_addr.strip.end_with? "/" then server_addr = server_addr.chop end + if server_addr.strip.end_with? "/" then server_addr = server_addr.chop end + + @snapshot_path = nil + @snapshot_url = false + + if is_snapshot_url(server_addr) then + @snapshot_url = true + @server_addr, @snapshot_path = split_addr_and_snapshot(server_addr) + else + @server_addr = server_addr + end - @server_addr = server_addr @location = location @pkg_hash_os = {} @installed_pkg_hash_loc = {} @archive_pkg_list = [] @all_dep_list = [] @is_server_remote = Utils.is_url_remote(server_addr) + @support_os_list = [] + @config_dist_path = CONFIG_PATH + "/" + get_flat_serveraddr + @download_path = @config_dist_path + "/downloads" + @tmp_path = @config_dist_path + "/tmp" + @snapshots_path = @config_dist_path + "/snapshots" + + # create directory + if not File.exist? @config_dist_path then FileUtils.mkdir_p "#{@config_dist_path}" end + if not File.exist? @download_path then FileUtils.mkdir_p "#{@download_path}" end + if not File.exist? @snapshots_path then FileUtils.mkdir_p "#{@snapshots_path}" end + if not File.exist? @tmp_path then FileUtils.mkdir_p "#{@tmp_path}" end # set log if logger.nil? or logger.class.to_s.eql? "String" then @log = Log.new(logger) else @log = logger - end - - FileInstaller.set_logger(@log) - FileDownLoader.set_logger(@log) + end # read installed pkg list, and create hash - FileUtils.mkdir_p "#{@location}" - create_installed_pkg_hash() - - # readk remote pkg list, and hash list - create_remote_pkg_hash(false) - @log.info "Initialize - #{server_addr}, #{location}" - end + if not File.exist? @location then FileUtils.mkdir_p "#{@location}" end + @log.info "Update local package list.. [#{@location}]" + read_installed_pkg_list() + + # read remote pkg list, and hash list + @log.info "Update remote package list and supported os list.." + update() + @log.info "Initialize - #{server_addr}, #{location}" + end public # update package list from server def update() - if not create_remote_pkg_hash(true) then - @log.error "\"#{@server_addr}\" does not have package list file properly." - return false - end - create_default_config(@server_addr) - @log.info "Update package list from \"#{@server_addr}\".. OK" + if not @snapshot_url then + $get_snapshot_mutex.synchronize { + @snapshot_path = get_lastest_snapshot(@is_server_remote) + } + end + @log.info "The lastest snapshot : #{@snapshot_path}" + if @snapshot_path.nil? then + @log.warn "Failed to get the lastest package list" + @snapshot_path = "" + end + + exists_snapshot = false + if is_snapshot_exist(@snapshot_path) then + @log.info "Snapshot information is already cached [#{get_pkglist_path()}]" + exists_snapshot = true + else + @log.info "Snapshot information is not cached" + end + + list_path = get_pkglist_path() + if list_path.nil? then + @log.error "Failed to get package list path" + return false + end + + clean_list() + + if exists_snapshot then + read_supported_os_list(list_path) + read_remote_pkg_list(list_path) + read_archive_pkg_list(list_path) + else + $update_mutex.synchronize { + uniq_name = Utils.create_uniq_name + tmp_dir = File.join(@config_dist_path, uniq_name) + FileUtils.mkdir_p tmp_dir + if not download_os_list(@is_server_remote, tmp_dir) then + @log.error "\"#{@server_addr}\" does not have supported os list file properly." + Utils.execute_shell("rm -rf #{tmp_dir}") + return false + else read_supported_os_list(tmp_dir) end + + if not download_pkg_list(@is_server_remote, tmp_dir) then + @log.error "\"#{@server_addr}\" does not have package list file properly." + Utils.execute_shell("rm -rf #{tmp_dir}") + return false + else read_remote_pkg_list(tmp_dir) end + + if not download_archive_pkg_list(@is_server_remote, tmp_dir) then + @log.error "\"#{@server_addr}\" does not have archive package list file properly. This error can be ignored" + else read_archive_pkg_list(tmp_dir) end + Utils.execute_shell("mv #{tmp_dir} #{list_path}") + @log.info "Moved \"#{tmp_dir}\" to" + @log.info " \"#{list_path}\"" + # tmp_dir should be removed whether mv command is failed + Utils.execute_shell("rm -rf #{tmp_dir}") + remove_snapshots() + } + end + + $update_mutex.synchronize { + create_default_config(@server_addr) + @log.info "Update package list from \"#{@server_addr}\".. OK" + } + return true - end + end + + private + def clean_list() + @pkg_hash_os.clear + @archive_pkg_list.clear + @support_os_list.clear + @log.info "Cleard package list, supported os list.. OK" + end public # download package - def download(pkg_name, os, trace) + def download(pkg_name, os, trace, loc = nil) + + if loc.nil? then loc = @location end dependent_pkg_list = [] # get dependent list if trace then - dependent_pkg_list = get_install_dependent_packages(pkg_name, os, true, false) + dependent_pkg_list = get_install_dependent_packages(pkg_name, os, true, true) if dependent_pkg_list.nil? then @log.error "Failed to get dependency for \"#{pkg_name}\" package" return nil end else dependent_pkg_list = [pkg_name] end - surl = nil - addr_arr = @server_addr.split('/') - if addr_arr[-2].eql? "snapshots" then - surl = @server_addr + "/../.." - else - surl = @server_addr - end - + surl = @server_addr # download files file_local_path = [] dependent_pkg_list.each do |p| + pkg_name = get_attr_from_pkg(p, os, "name") pkg_path = get_attr_from_pkg(p, os, "path") pkg_ver = get_attr_from_pkg(p, os, "version") + pkg_checksum = get_attr_from_pkg(p, os, "checksum") + pkg_size = get_attr_from_pkg(p, os, "size") if pkg_path.nil? or pkg_ver.nil? then @log.error "\"#{p}\" package does not exist in package server. If it exist in package server, then try \"pkg-cli update\"" return nil end url = surl + pkg_path filename = pkg_path.split('/')[-1] - if not FileDownLoader.download(url, @location) then - @log.error "Failed download #{pkg_name} [#{pkg_ver}]" - return nil + + if not FileDownLoader.download(url, loc, @log) then end - file_path = File.join(@location, filename) + file_path = File.join(loc, filename) file_local_path.push(file_path) - @log.info "Downloaded \"#{p} [#{pkg_ver}]\" package file.. OK" - #@log.info " [path : #{file_path}]" end if trace then @log.info "Downloaded \"#{pkg_name}\" package with all dependent packages.. OK" - else - @log.info "Downloaded only \"#{pkg_name}\" package.. OK" end - @log.info " [path : #{file_local_path.join(", ")}]" + @log.info " [path: #{file_local_path.join(", ")}]" return file_local_path - end - - public - # download source package - def download_source(pkg_name, os) - - # get source file path - src_path = get_attr_from_pkg(pkg_name, os, "src_path") - if src_path.nil? or src_path.empty? then - @log.error "#{pkg_name} package does not have source" - return nil - end - file_url = nil - - addr_arr = @server_addr.split('/') - if addr_arr[-2].eql? "snapshots" then - surl = @server_addr + "/../.." + src_path - else - surl = @server_addr + src_path - end + end - # download file - filename = src_path.split('/')[-1] - if not FileDownLoader.download(surl, @location) then - @log.error "Failed download #{pkg_name} source" - return nil - end - file_local_path = File.join(@location, filename) - @log.info "Downloaded source of #{pkg_name} package.. OK" - @log.info " [path : #{file_local_path}]" + private + def remove_downloaded_pkgs(pkg_name, os) + pkg_file_prefix = "#{@download_path}/#{pkg_name}_*_#{os}.zip" + pkg_files = Dir[pkg_file_prefix].sort_by { |f| File.mtime(f) }.reverse + + if not pkg_files.nil? and pkg_files.length >= 4 then + Utils.execute_shell("rm -rf #{pkg_files[3..-1].join(" ")}") + @log.info "Removed old package files.." + @log.info " * #{pkg_files[3..-1].join(", ")}" + end + end + + private + def move_downloaded_pkg(filepath, distpath) + if filepath.nil? or filepath == "" then return nil end + filename = filepath.split('/')[-1] + if not File.exist? distpath then FileUtils.mkdir_p "#{distpath}" end + distfile = File.join(distpath, filename) + @log.info "Moving \"#{filename}\" to download cache directory" + @log.info " [path: #{distpath}]" + $filemove_mutex.synchronize { + if not File.exist? distfile then + Utils.execute_shell("mv #{filepath} #{distfile}") + else + Utils.execute_shell("rm -f #{filepath}") + return distfile + end + } + + if File.exist? distfile then return distfile + else + @log.info "Failed to move [#{filenamae}] to " + @log.info " [#{distpath}]" + return nil + end + end + + private + def remove_snapshots() + listing_prefix = "#{@snapshots_path}/*" + dirs = Dir[listing_prefix].sort_by { |f| File.mtime(f) }.reverse + + if not dirs.nil? and dirs.length >= 20 then + Utils.execute_shell("rm -rf #{dirs[19..-1].join(" ")}") + @log.info "Removed old snapshots.." + @log.info " * #{dirs[19]} ~ " + end + end + + private + def get_cached_filepath(pkg_filename, pkg_checksum, pkg_size) + + cached_filepath = "#{@download_path}/#{pkg_filename}" + if File.exist? cached_filepath then + checksum = `sha256sum #{cached_filepath}`.split(" ")[0] + size = `du -b #{cached_filepath}`.split[0].strip + if checksum.eql? pkg_checksum and size.eql? pkg_size then + return cached_filepath + end + end + return nil + end - return file_local_path - end - public # download dependent source def download_dep_source(file_name) - file_url = nil - - addr_arr = @server_addr.split('/') - if addr_arr[-2].eql? "snapshots" then - file_url = @server_addr + "/../../source/" + file_name - else - file_url = @server_addr + "/source/#{file_name}" - end - if not FileDownLoader.download(file_url, @location) then + file_url = @server_addr + "/source/#{file_name}" + if not FileDownLoader.download(file_url, @location, @log) then @log.error "Failed download #{file_name}" return nil end file_local_path = File.join(@location, file_name) @log.info "Downloaded \"#{file_name}\" source file.. OK" - @log.info " [path : #{file_local_path}]" + @log.info " [path: #{file_local_path}]" return file_local_path end - public - # check archive file - def check_archive_file(file_name) - - result = false - filename = "archive_pkg_list" - local_file_path = File.join(CONFIG_PATH, filename) - if File.exist? local_file_path then - File.open(local_file_path, "r") do |f| - f.each_line do |l| - if l.strip.eql? file_name.strip then - result = true - break - end - end - end - end - return result - end - public # upload package - def upload(ssh_alias, id, binary_path_list, source_path_list, verify) + def upload(ip, port, ftp_addr, ftp_port, ftp_username, ftp_passwd, binary_path_list) - # check source path list - if source_path_list.nil? or source_path_list.empty? then - @log.error "source package path should be set." - return nil - end - - # verify ssh alias - verify = false - hostfound = false - sshconfig = "#{Utils::HOME}/.ssh/config" - File.open(sshconfig, "r") do |f| - f.each_line do |l| - if l.strip.upcase.start_with? "HOST" then - al = l.strip.split(' ')[1].strip - if al.eql? ssh_alias then hostfound = true - else next end - end - end - end + # check ip and port + if ip.nil? or port.nil? then + @log.error "Ip and port should be set." + return nil + end - if not hostfound then - @log.error "\"#{ssh_alias}\" does not exist in \".ssh/config\" file" + # check binary path list + if binary_path_list.nil? or binary_path_list.empty? then + @log.error "Binary package path should be set." return nil end - # get distribution from server addr - dist = get_distribution() - if dist.nil? then - @log.error "Distribution is nil" - return nil - end - - serveraddr = @server_addr - snapshot = nil - if serveraddr.include? "snapshots" then snapshot = serveraddr.split("/")[-1] end - - # set server homd directory - server_home = `ssh #{ssh_alias} pwd` - server_home = server_home.strip - - # set "pkg-svr" file path - # if pkg-svr exist in path then using pkg-svr - result = `ssh #{ssh_alias} which pkg-svr` - if not( result.nil? or result.empty? or result.strip.empty? ) then - pkg_svr = "pkg-svr" - else - # if pkg-svr not exist in path then try ~/tizen_sdk/dev_tools/pkg-svr - result = `ssh #{ssh_alias} which #{server_home}/tizen_sdk/dev_tools/pkg-svr` - if not( result.nil? or result.empty? or result.strip.empty? ) then - pkg_svr = "#{server_home}/tizen_sdk/dev_tools/pkg-svr" - else - @log.error "Can't find server's pkg-svr command" - return nil - end - end - pkg_svr = "#{server_home}/tizen_sdk/dev_tools/pkg-svr" + # create unique dock number + dock = Utils.create_uniq_name() - # set incoming directory (~/.build_tools/pkg_server/#{id}/incoming) - incoming_path = "#{server_home}/.build_tools/pkg_server/#{id}/incoming" + # upload file + binary_list = [] + binary_path_list.each do |bpath| + filename = File.basename(bpath) + client = BuildCommClient.create(ip, port, @log) - # set pkg-svr register command - register_command = "#{pkg_svr} register -i #{id} -d #{dist}" + if client.nil? then + @log.error "Failed to create BuildCommClient instance.." + return nil + end + + @log.info "Send ready REQ.. [UPLOAD]" + result = client.send("UPLOAD|#{dock}") + if not result then + @log.error "Failed to send ready REQ.." + return nil + end - # upload source package (scp) - server_src_pkg_list_command = "\"" - source_path_list.each do |spath| - # set source package file path for server filesystem - src_file_name = File.basename(spath) - server_src_pkg_path = "#{incoming_path}/#{src_file_name}" - server_src_pkg_list_command = server_src_pkg_list_command + server_src_pkg_path + "," - # upload source package - if File.exist? spath then - Utils.execute_shell("cd #{File.dirname(spath)};scp #{File.basename(spath)} #{ssh_alias}:#{server_src_pkg_path}") - else - @log.error "#{spath} file does not exist" + begin + result = client.send_file(ftp_addr, ftp_port, ftp_username, ftp_passwd, bpath) + rescue => e + @log.error "FTP failed to put file (exception)" + @log.error "#{e.message}" + @log.error e.backtrace.inspect return nil end - end - server_src_pkg_list_command = server_src_pkg_list_command.strip - if server_src_pkg_list_command.end_with? "," then - server_src_pkg_list_command = server_src_pkg_list_command.chop + "\"" - else - server_src_pkg_list_command = server_src_pkg_list_command + "\"" - end + if not result then + @log.error "FTP failed to put file (result is false)" + return nil + end - # add src package list to register command - register_command = register_command + " -s #{server_src_pkg_list_command} -g" + client.terminate + binary_list.push(filename) + end - # upload binary package (scp) - if not binary_path_list.nil? then - server_bin_pkg_list_command = "\"" - binary_path_list.each do |bpath| - bin_file_name = File.basename(bpath) - bin_pkg_name = bin_file_name.split("_")[0] - if verify then - if not verify_upload(bin_pkg_name, bpath) then - @log.error "Failed to verify \"#{bpath}\" file" + # register file + if not binary_list.empty? then + client = BuildCommClient.create(ip, port, @log) + dist = get_distribution + if dist.empty? then + @log.error "Distribution is empty.." + return nil + end + + @log.info "Send register message.. [REGISTER|#{dist}|DOCK|#{dock}|#{binary_list.join("|")}]" + snapshot = nil + if client.send "REGISTER|#{dist}|DOCK|#{dock}|#{binary_list.join("|")}" then + output = client.read_lines do |l| + line = l.split("|") + if line[0].strip == "ERROR" then + @log.error l.strip return nil + elsif line[0].strip == "SUCC" then + snapshot = line[1].strip end end - - server_bin_pkg_path = "#{incoming_path}/#{bin_file_name}" - server_bin_pkg_list_command = server_bin_pkg_list_command + server_bin_pkg_path + "," - # upload binary package - if File.exist? bpath then - Utils.execute_shell("cd #{File.dirname(bpath)};scp #{File.basename(bpath)} #{ssh_alias}:#{server_bin_pkg_path}") - else - @log.error "#{bpath} file does not exist" + if not output then + @log.error "Failed to register" return nil end - end + end - server_bin_pkg_list_command = server_bin_pkg_list_command.strip - if server_bin_pkg_list_command.end_with? "," then - server_bin_pkg_list_command = server_bin_pkg_list_command.chop + "\"" - else - server_bin_pkg_list_command = server_bin_pkg_list_command + "\"" + client.terminate + snapshot = @server_addr + "/snapshots/" + snapshot + @log.info "Registered successfully! [#{binary_path_list.join("|")}]" + if snapshot.empty? then + @log.error "Failed to generate snapshot" end + end - # add bin package list to register command - register_command = register_command + " -p #{server_bin_pkg_list_command}" - end - - @log.info "register_command : #{register_command}" - - # register packages to server - result = `ssh #{ssh_alias} #{register_command}` - if result.strip.include? "Error occured" then - puts result - return nil - end - - # parsing snapshot url to show user - serveraddr = @server_addr - arr = serveraddr.split("/") - if serveraddr.include? "snapshots" then sid = arr[-4] - else sid = arr[-2] end - i = serveraddr.index(sid) - serveraddr = serveraddr[0..i-1] - serveraddr = serveraddr + id + "/" + dist - - addr = [] - result2 = "" - arr_re = result.split("\n") - arr_re.each do |l| - l = l.strip - if l.start_with? "snapshot is generated :" then - addr = l.split(":")[1].split("/") - if addr.include? dist then - i = addr.index(dist) - addr = addr[i+1..-1] - str = "" - addr.each do |l| str = str + "/" + l end - str = serveraddr.strip + str - result2 = result2 + str +"\n" - end - end - end - - @log.info "Upload packages.. OK" - @log.info " [#{binary_path_list.join(", ")}]" - @log.info " [#{source_path_list.join(", ")}]" - return result2 + return snapshot end private @@ -415,19 +427,30 @@ class Client manifest_file = "pkginfo.manifest" uniq_name = Utils.create_uniq_name path = Utils::HOME + "/tmp/#{uniq_name}" - FileUtils.mkdir_p "#{path}" - if not FileInstaller.extract_specified_file(pkg_path, manifest_file, path) then - @log.error "The \"pkginfo.manifest\" file does not exist in \"#{pkg_path}\"" - return false - end - manifest_path = File.join(path, manifest_file) - pkg_hash = Parser.read_pkg_list(manifest_path) - FileUtils.rm_f(manifest_path) - FileUtils.remove_dir(path, true) - - new_pkg_ver = pkg_hash[pkg_name].version - new_pkg_install_dep_list = pkg_hash[pkg_name].install_dep_list - os = pkg_hash[pkg_name].os + if not File.exist? path then FileUtils.mkdir_p "#{path}" end + begin + if not FileInstaller.extract_a_file(pkg_path, manifest_file, path, @log) then + @log.error "The \"pkginfo.manifest\" file does not exist in \"#{pkg_path}\"" + return false + end + manifest_path = File.join(path, manifest_file) + pkg = Parser.read_single_pkginfo_from manifest_path + if File.exists? manifest_path then FileUtils.rm_f(manifest_path) end + FileUtils.remove_dir(path, true) + rescue Interrupt + @log.error "Client: Interrupted.." + FileUtils.remove_dir(path, true) + @log.info "Removed #{path}" + raise Interrupt + rescue RuntimeError => e + @log.error( e.message, Log::LV_USER) + FileUtils.remove_dir(path, true) + @log.info "Removed #{path}" + return false + end + new_pkg_ver = pkg.version + new_pkg_install_dep_list = pkg.install_dep_list + os = pkg.os list = get_all_reverse_install_dependent_packages_remote(pkg_name, os, true) @@ -474,20 +497,36 @@ class Client end dist = "" - server_arr = server.split("/") - if server_arr.include? "snapshots" then - i = server_arr.index("snapshots") - dist = server_arr[i-1] - else dist = File.basename(server) end + dist = File.basename(server) return dist - end + end + + private + def get_flat_serveraddr() + server = @server_addr + if server.nil? or server.empty? then + @log.error "Server addr is nil" + return nil + end + + server = server.delete ".:/@" + return server + end public # install package # install all install dependency packages def install(pkg_name, os, trace, force) + ret = install_internal( pkg_name, os, trace, force ) + return ret + end + + + private + def install_internal(pkg_name, os, trace, force) + if trace.nil? then trace = true end if force.nil? then force = false end @@ -501,43 +540,44 @@ class Client @log.error "#{pkg_name} package does not exist in remote package list" return false end + compare_result = compare_version_with_installed_pkg(pkg_name, pkg_ver) if not force then - case compare_result - when -1 then - @log.warn "\"#{pkg_name}\" package version is bigger then remote package version" - return true - when 0 then - @log.warn "\"#{pkg_name}\" package version is same with remote package version" - return true - when 1, 2 then - end - end - - # if enable trace, create all dependent package list - if trace then - dependent_pkg_list = get_install_dependent_packages(pkg_name, os, true, force) - if dependent_pkg_list.nil? then - @log.error "Failed to get dependency for \"#{pkg_name}\" package" - return false - end - else - dependent_pkg_list = [pkg_name] - end - - # TODO: need to compare dependent package version - # install packages including dependent packages - dependent_pkg_list.each do |pkg| - if not install_pkg(pkg, os, force) then - @log.error "#{pkg} does not exist" - return false - end - add_pkg_info(pkg, os) - end - - # write installed package information to file - write_pkg_hash_to_file(nil) + case compare_result + when -1 then + @log.warn "Checked \"#{pkg_name}\" package version : it is bigger then remote package version" + return true + when 0 then + @log.warn "Checked \"#{pkg_name}\" package version : it is same with remote package version" + return true + when 1, 2 then + end + end + + # if enable trace, create all dependent package list + if trace then + dependent_pkg_list = get_install_dependent_packages(pkg_name, os, true, force) + if dependent_pkg_list.nil? then + @log.error "Failed to get dependency for \"#{pkg_name}\" package" + return false + end + else + dependent_pkg_list = [pkg_name] + end + + # TODO: need to compare dependent package version + # install packages including dependent packages + dependent_pkg_list.each do |pkg| + if not install_pkg(pkg, os, force) then + @log.error "#{pkg} does not exist" + return false + end + add_pkg_info(pkg, os) + end + + # write installed package information to file + write_pkg_hash_to_file(nil) if trace then @log.info "Installed \"#{pkg_name} [#{pkg_ver}]\" package with all dependent packages.. OK" @@ -545,12 +585,22 @@ class Client else @log.info "Install only \"#{pkg_name} [#{pkg_ver}]\" package.. OK" end + return true end + public # install local package (ignore dependent packages) - def install_local_pkg(pkg_path, force) + def install_local_pkg(pkg_path, trace, force, repos_paths = nil) + + ret = install_local_pkg_internal(pkg_path, trace, force, repos_paths) + return ret + end + + + private + def install_local_pkg_internal(pkg_path, trace, force, repos_paths) file_name = File.basename(pkg_path) pkg_name = file_name.split('_')[0] @@ -566,50 +616,93 @@ class Client return false end pkg_name = filename.split("_")[0] - type = "binary" manifest_file = "pkginfo.manifest" - pkg_config_path = File.join(@location, PACKAGE_INFO_DIR, pkg_name) uniq_name = Utils.create_uniq_name path = Utils::HOME + "/tmp/#{uniq_name}" - FileUtils.mkdir_p "#{path}" - if not FileInstaller.extract_specified_file(pkg_path, manifest_file, path) then - @log.error "pkginfo.manifest file does not exist in #{pkg_path}" - return false - end - manifest_path = File.join(path, manifest_file) - pkg_hash = Parser.read_pkg_list(manifest_path) - new_pkg_ver = pkg_hash[pkg_name].version - FileUtils.rm_f(manifest_path) - FileUtils.remove_dir(path, true) - - compare_result = compare_version_with_installed_pkg(pkg_name, new_pkg_ver) - if not force then - case compare_result - when -1 then - @log.warn "\"#{pkg_name}\" package version is bigger then remote package version.." - return true - when 0 then - @log.warn "\"#{pkg_name}\" package version is same with remote package version.." - return true - when 1, 2 then + if not File.exist? path then FileUtils.mkdir_p "#{path}" end + begin + if not FileInstaller.extract_a_file(pkg_path, manifest_file, path, @log) then + @log.error "pkginfo.manifest file does not exist in #{pkg_path}" + return false end - end + manifest_path = File.join(path, manifest_file) + pkg = Parser.read_single_pkginfo_from manifest_path + new_pkg_ver = pkg.version + FileUtils.remove_dir(path, true) + rescue Interrupt + @log.error "Client: Interrupted.." + FileUtils.remove_dir(path, true) + @log.info "Removed #{path}" + raise Interrupt + rescue RuntimeError => e + @log.error( e.message, Log::LV_USER) + FileUtils.remove_dir(path, true) + @log.info "Removed #{path}" + return false + end + + compare_result = compare_version_with_installed_pkg(pkg_name, new_pkg_ver) + if not force then + case compare_result + when -1 then + @log.warn "Installed \"#{pkg_name}\" package version is bigger.." + return true + when 0 then + @log.warn "Checked \"#{pkg_name}\" package version : it is same with installed package version" + return true + when 1, 2 then + end + end + + if check_installed_pkg(pkg_name) then + uninstall(pkg_name, false) + end + + if trace then + install_dep_pkgs = pkg.install_dep_list + new_pkg_os = pkg.os + install_dep_pkgs.each do |p| + # check local path first + if not repos_paths.nil? then + # search + binpkgs = [] + repos_paths.each { |repos_path| + binpkgs += Dir.glob("#{repos_path}/#{p.package_name}_*_#{new_pkg_os}.zip") + } + if not binpkgs.empty? then + if not install_local_pkg_internal(binpkgs[0], true, false, repos_paths) then + @log.warn "#{p} package is not installed" + end + else + if not install_internal(p.package_name, new_pkg_os, true, false) then + @log.warn "#{p} package is not installed" + end + end + else + if not install_internal(p.package_name, new_pkg_os, true, false) then + @log.warn "#{p} package is not installed" + end + end + end + end - if check_installed_pkg(pkg_name) then - uninstall(pkg_name, false) - end + # install package + ret = FileInstaller.install(pkg_name, pkg_path, "binary", @location, @log) - # install package - ret = FileInstaller.install(pkg_name, pkg_path, "binary", @location) + if not ret then + @log.error "Install failed \"#{pkg_path} [#{new_pkg_ver}]\" file.. " + return false + end - add_local_pkg_info(pkg_name) - write_pkg_hash_to_file(nil) + add_local_pkg_info(pkg_name) + write_pkg_hash_to_file(nil) @log.info "Installed \"#{pkg_path} [#{new_pkg_ver}]\" file.. OK" return true end + public # upgrade package def upgrade(os, trace) @@ -630,7 +723,7 @@ class Client end end - if not install(p, os, trace, false) then + if not install_internal(p, os, trace, false) then @log.error "Failed to install \"#{p}\" package.." return false end @@ -753,7 +846,7 @@ class Client pkg_list.each do |p| if not check_installed_pkg(p) then next end - if not FileInstaller.uninstall(p, type, @location) then + if not FileInstaller.uninstall(p, type, @location, @log) then @log.error "Failed uninstall \"#{pkg_name}\" package" return false end @@ -784,11 +877,11 @@ class Client return end end - FileUtils.rm_rf(@location) + if File.exist? @location then FileUtils.rm_rf(@location) end FileUtils.mkdir_p(@location) - @pkg_hash_os.clear + #@pkg_hash_os.clear @installed_pkg_hash_loc.clear - @archive_pkg_list.clear + #@archive_pkg_list.clear @log.info "Cleaned \"#{@location}\" path.. OK" end @@ -798,13 +891,14 @@ class Client result = [] pkg_hash = @pkg_hash_os[os] + if pkg_hash.nil? then return [] end pkg_list = pkg_hash.values pkg_list.each do |pkg| pkg.build_dep_list.each do |dep| if dep.package_name.eql? pkg_name and not dep.target_os_list.nil? and dep.target_os_list.include? os then - result.push(pkg.package_name) + result.push(pkg) end end end @@ -814,18 +908,20 @@ class Client public # get reverse source dependent packages (just 1 depth) - def get_reverse_source_dependent_packages(pkg_name, os) + def get_reverse_source_dependent_packages(pkg_name) result = [] - pkg_hash = @pkg_hash_os[os] - pkg_list = pkg_hash.values - pkg_list.each do |pkg| - pkg.source_dep_list.each do |p| - if p.package_name.eql? pkg_name then - result.push(pkg.package_name) - end - end - end + @support_os_list.each do |os| + pkg_hash = @pkg_hash_os[os] + pkg_list = pkg_hash.values + pkg_list.each do |pkg| + pkg.source_dep_list.each do |p| + if p.package_name.eql? pkg_name then + result.push(pkg) + end + end + end + end return result end @@ -938,7 +1034,7 @@ class Client i = i + 1 end - @log.info "Get install dependent packages for #{pkg_name} package.. OK" + @log.info "Get install dependent packages for \"#{pkg_name}\" package.. OK" if reverse then return result.reverse.uniq.push(pkg_name) else return result.uniq.insert(0, pkg_name) end end @@ -998,7 +1094,7 @@ class Client if pkg_hash.nil? then return false end pkg = pkg_hash[pkg_name] if pkg.nil? then - @log.warn "There is no \"#{pkg_name}\" remote package information in list" + #@log.warn "There is no \"#{pkg_name}\" remote package information in list" return false end @@ -1048,6 +1144,7 @@ class Client if pkg.nil? then return nil end case attr + when "name" then return pkg.package_name when "path" then return pkg.path when "source" then return pkg.source when "version" then return pkg.version @@ -1056,6 +1153,9 @@ class Client when "build_dep_list" then return pkg.build_dep_list when "install_dep_list" then return pkg.install_dep_list when "attribute" then return pkg.attribute + when "checksum" then return pkg.checksum + when "size" then return pkg.size + end end @@ -1209,11 +1309,12 @@ class Client return s end - private + public def get_pkg_from_list(pkg_name, os) pkg_hash = @pkg_hash_os[os] if pkg_hash.nil? then return nil end + pkg = pkg_hash[pkg_name] return pkg @@ -1246,16 +1347,20 @@ class Client # below code should be changed type = path.split('/')[-2] new_pkg_ver = get_attr_from_pkg(pkg_name, os, "version") + pkg_checksum = get_attr_from_pkg(pkg_name, os, "checksum") + pkg_size = get_attr_from_pkg(pkg_name, os, "size") + pkg_path = get_attr_from_pkg(pkg_name, os, "path") + filename = pkg_path.split('/')[-1] # compare version with installed package versiona compare_result = compare_version_with_installed_pkg(pkg_name, new_pkg_ver) if not force then case compare_result when -1 then - @log.warn "\"#{pkg_name}\" package version is bigger then remote package version" + @log.warn "Checked \"#{pkg_name}\" package version : it is bigger then remote package version" return true when 0 then - @log.warn "\"#{pkg_name}\" package version is same with remote package version" + @log.warn "Checked \"#{pkg_name}\" package version : it is same with remote package version" return true end end @@ -1268,32 +1373,34 @@ class Client end end - # download package file - # change download location temporary (back to the origin path after downloading) - loc_back = @location - uniq_name = Utils.create_uniq_name - tmppath = Utils::HOME + "/tmp/#{uniq_name}" - FileUtils.mkdir_p "#{tmppath}" - @location = tmppath - file_local_path = download(pkg_name, os, false)[0] - @location = loc_back - if file_local_path.nil? then - FileUtils.remove_dir(tmppath, true) - return false - end - # install package - ret = FileInstaller.install(pkg_name, file_local_path, type, @location) - FileUtils.rm_f(file_local_path) - FileUtils.remove_dir(tmppath, true) + cached_filepath = nil + if Utils.is_linux_like_os( Utils::HOST_OS ) then + cached_filepath = get_cached_filepath(filename, pkg_checksum, pkg_size) + end + if not cached_filepath.nil? then + @log.info "Cached #{pkg_name} package file.. OK" + ret = FileInstaller.install(pkg_name, cached_filepath, type, @location, @log) + else + filepath = download(pkg_name, os, false, @tmp_path) + if filepath.nil? then + return false + end + filepath = move_downloaded_pkg(filepath[0], @download_path) + if filepath.nil? then + return false + end + ret = FileInstaller.install(pkg_name, filepath, type, @location, @log) + remove_downloaded_pkgs(pkg_name, os) + end return ret - end + end private - def compare_version_with_installed_pkg(pkg_name, new_pkg_ver) + def compare_version_with_installed_pkg(pkg_name, new_pkg_ver) if check_installed_pkg_list_file() then - create_installed_pkg_hash() + read_installed_pkg_list() if check_installed_pkg(pkg_name) then installed_pkg_ver = get_attr_from_installed_pkg(pkg_name, "version") compare_result = Utils.compare_version(installed_pkg_ver, new_pkg_ver) @@ -1332,14 +1439,14 @@ class Client else pkg_hash[pkg_name] = get_pkg_from_list(pkg_name, os) end @installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash - @log.info "Added information for \"#{pkg_name}\" package.. OK" + #@log.info "Added information for \"#{pkg_name}\" package.. OK" return pkg_hash end private # add package manifest info def add_local_pkg_info(pkg_name) - + config_path = File.join(@location, PACKAGE_INFO_DIR, "#{pkg_name}") pkg = read_pkginfo_file(pkg_name, config_path) @@ -1356,7 +1463,7 @@ class Client else pkg_hash[pkg_name] = pkg end @installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash - @log.info "Added information for \"#{pkg_name}\" package.. OK" + #@log.info "Added information for \"#{pkg_name}\" package.. OK" return pkg_hash end @@ -1365,75 +1472,203 @@ class Client def read_pkginfo_file(pkg_name, path) file_path = File.join(path, "pkginfo.manifest") - pkg_hash = Parser.read_pkg_list(file_path) + begin + pkg = Parser.read_single_pkginfo_from file_path + rescue => e + @log.error( e.message, Log::LV_USER) + return nil + end - if pkg_hash.nil? then + if pkg.nil? then @log.error "Failed to read manifest file : #{file_path}" return nil end - @log.info "Added information for \"#{pkg_name}\" package.. OK" - return pkg_hash[pkg_name] + @log.info "Read information for \"#{pkg_name}\" package.. OK" + return pkg end - private + # get the lastest snapshot # from_server : if true, update from server - def create_remote_pkg_hash(from_server) - - for os in SUPPORTED_OS - filename = PKG_LIST_FILE_PREFIX + os - file_url = @server_addr + "/" + filename - local_file_path = File.join(CONFIG_PATH, filename) - if from_server then - if not FileDownLoader.download(file_url, CONFIG_PATH) then - return false - end - end - local_file_path = File.join(CONFIG_PATH, filename) - if File.exist? local_file_path then - pkg_hash = Parser.read_pkg_list(local_file_path) - @pkg_hash_os[os] = pkg_hash - end + def get_lastest_snapshot(from_server) + ssinfo_file = "snapshot.info" + file_url = File.join(@server_addr, ssinfo_file) + if from_server then + if not FileDownLoader.download(file_url, @config_dist_path, @log) then + @log.warn "Server does not have \"#{ssinfo_file}\" file. This error can be ignored." + end + else + if File.exist? file_url then FileUtils.cp(file_url, @config_dist_path) + else @log.warn "Server does not have \"#{ssinfo_file}\" file. This error can be ignored." end + end + + file_path = File.join(@config_dist_path, ssinfo_file) + if not File.exist? file_path then return nil end + + contents = File.open(file_path, "r").read + + _list = contents.split("\n\n") + if _list.nil? or _list == "" or _list.empty? then return nil end + list = _list[-1].split("\n") + if list.nil? or list == "" or list.empty? then return nil end + _path = list[-1].split(":") + if _path.nil? or _path == "" or _path.length != 2 then return nil end + path = _path[1].strip + if path == nil or path == "" then return nil end + + return path + end + + def get_pkglist_path() + return File.join(@config_dist_path, @snapshot_path) + end + + # if url includes snapshot infomation, retuen true + def is_snapshot_url(addr = nil) + if addr.nil? then addr = @server_addr end + addr_arr = addr.split('/') + if addr_arr[-2].eql? "snapshots" then + return true + else + return false end + end - filename = "archive_pkg_list" - file_url = @server_addr + "/" + filename - if from_server then - if not FileDownLoader.download(file_url, CONFIG_PATH) then - @log.warn "Server does not have \"#{filename}\" file. This error can be ignored." - end + def split_addr_and_snapshot(addr = nil) + if addr.nil? then addr = @server_addr end + addr_arr = addr.split('/') + if addr_arr[-2].eql? "snapshots" then + return addr_arr[0..-3].join("/"), addr_arr[-2..-1].join("/") + else + return nil end - local_file_path = File.join(CONFIG_PATH, filename) + end + + def is_snapshot_exist(ss_path = nil) + if ss_path.nil? then ss_path = @snapshot_path + elsif ss_path == "" then return false end + + local_snapshot_path = File.join(@config_dist_path, ss_path) + if File.directory? local_snapshot_path then return true + else return false end + end + + def read_remote_pkg_list(list_path) + @support_os_list.each do |os| + filename = PKG_LIST_FILE_PREFIX + os + local_file_path = File.join(list_path, filename) + if File.exist? local_file_path then + begin + pkg_hash = Parser.read_repo_pkg_list_from local_file_path + @pkg_hash_os[os] = pkg_hash + @log.info "Get package information for #{os}.. OK" + rescue => e + @log.error( e.message, Log::LV_USER) + @pkg_hash_os[os] = {} + end + else + @log.warn "Failed to read pkg_list_#{os} file" + @pkg_hash_os[os] = {} + end + end + end + + def read_supported_os_list(list_path) + local_file_path = File.join(list_path, OS_INFO_FILE) + if File.exist? local_file_path then + File.open(local_file_path, "r") do |f| + f.each_line do |l| + os = l.strip + if @support_os_list.index(os).nil? then @support_os_list.push(os) end + end + end + @log.info "Get supported os infomation.. OK" + else + @log.warn "Failed to get supported os infomation" + end + end + + def download_os_list(from_server, dist = nil) + if dist.nil? then dist = get_pkglist_path end + file_url = File.join(@server_addr, OS_INFO_FILE) + if from_server then + if not FileDownLoader.download(file_url, dist, @log) then return false end + else + if File.exist? file_url then FileUtils.cp(file_url, dist) + else return false end + end + + return true + end + + def read_archive_pkg_list(list_path) + local_file_path = File.join(list_path, ARCHIVE_PKG_LIST_FILE) if File.exist? local_file_path then File.open(local_file_path, "r") do |f| f.each_line do |l| - @archive_pkg_list.push(l.strip) + pkg = l.strip + if @archive_pkg_list.index(pkg).nil? then @archive_pkg_list.push(pkg) end end - end + end + @log.info "Get archive package infomation.. OK" + else + @log.warn "Failed to get archive package infomation" end + end - return true - end - - private - # create installed package hash - def create_installed_pkg_hash() + def download_archive_pkg_list(from_server, dist = nil) + if dist.nil? then dist = get_pkglist_path end + file_url = File.join(@server_addr, @snapshot_path, ARCHIVE_PKG_LIST_FILE) + if from_server then + if not FileDownLoader.download(file_url, dist, @log) then return false end + else + if File.exist? file_url then FileUtils.cp(file_url, dist) + else return false end + end + + return true + end + + def download_pkg_list(from_server, dist = nil) + if dist.nil? then dist = get_pkglist_path end + @support_os_list.each do |os| + filename = PKG_LIST_FILE_PREFIX + os + file_url = File.join(@server_addr, @snapshot_path, filename) + if from_server then + if not FileDownLoader.download(file_url, dist, @log) then return false end + else + if File.exist? file_url then FileUtils.cp(file_url, dist) + else return false end + end + end + + return true + end + + private + # create installed package hash + def read_installed_pkg_list() config_path = File.join(@location, PACKAGE_INFO_DIR) if not File.directory? config_path then return end installed_pkg_hash_key = get_installed_pkg_list_file_path() if @installed_pkg_hash_loc.has_key? installed_pkg_hash_key then return - else + else file_path = installed_pkg_hash_key if not File.exist? file_path then #raise RuntimeError, "#{file_path} file does not exist" return end - pkg_hash = Parser.read_pkg_list(file_path) - @installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash - end - end + begin + pkg_hash = Parser.read_repo_pkg_list_from file_path + rescue => e + @log.error( e.message, Log::LV_USER) + return + end + @installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash + end + end private # check to exist installed package list file @@ -1463,11 +1698,9 @@ class Client end if not pkg_hash.nil? then config_path = File.join(@location, PACKAGE_INFO_DIR) - FileUtils.mkdir_p "#{config_path}" + if not File.exist? config_path then FileUtils.mkdir_p "#{config_path}" end if File.exist? file_path then File.delete(file_path) end File.open(file_path, "a+") do |file| - file.puts "ORIGIN : #{@server_addr}" - file.puts "\n" pkg_list = pkg_hash.values pkg_list.each do |pkg| pkg.print_to_file(file) diff --git a/src/pkg_server/clientOptParser.rb b/src/pkg_server/clientOptParser.rb index ae12c36..695bd43 100644 --- a/src/pkg_server/clientOptParser.rb +++ b/src/pkg_server/clientOptParser.rb @@ -36,8 +36,7 @@ def set_default( options ) if options[:v].nil? then options[:v] = false end end -def option_error_check( options ) - $log.info "option error check" +def option_error_check( options ) case options[:cmd] @@ -51,167 +50,151 @@ def option_error_check( options ) when "download" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli download -p [-o ] [-l ] [-u ] [-t]" - end - - when "upload" then - if options[:alias].nil? or options[:alias].empty? or\ - options[:id].nil? or options[:id].empty? or \ - options[:srcpkg].nil? or options[:srcpkg].empty? then - raise ArgumentError, "Usage: pkg-cli upload -a -i -s [-b ]" - end - - when "source" then - if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli source -p [-o ] [-l ] [-u ]" + raise ArgumentError, "Usage: pkg-cli download -P [-o ] [-l ] [-u ] [--trace]" end when "install" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli install -p [-o ] [-l ] [-u ] [-t] [-f]" + raise ArgumentError, "Usage: pkg-cli install -P [-o ] [-l ] [-u ] [--trace] [--force]" end when "install-file" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli install-lpkg -p [-l ] [-f]" + raise ArgumentError, "Usage: pkg-cli install-lpkg -P [-l ] [-u ] [--trace] [--force]" end when "uninstall" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli uninstall -p [-l ] [-t]" + raise ArgumentError, "Usage: pkg-cli uninstall -P [-l ] [--trace]" end when "show-rpkg" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli show-rpkg -p [-o ] [-u ]" + raise ArgumentError, "Usage: pkg-cli show-rpkg -P [-o ] [-u ]" end when "list-rpkg" then when "show-lpkg" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli show-lpkg -p [-l ]" + raise ArgumentError, "Usage: pkg-cli show-lpkg -P [-l ]" end when "list-lpkg" then when "build-dep" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli build-dep -p [-o ]" + raise ArgumentError, "Usage: pkg-cli build-dep -P [-o ]" end when "install-dep" then if options[:pkg].nil? or options[:pkg].empty? then - raise ArgumentError, "Usage: pkg-cli install-dep -p [-o ]" + raise ArgumentError, "Usage: pkg-cli install-dep -P [-o ]" end else - raise ArgumentError, "input option incorrect : #{options[:cmd]}" + raise ArgumentError, "Input is incorrect : #{options[:cmd]}" end end def option_parse options = {} - optparse = OptionParser.new do|opts| - # Set a banner, displayed at the top - # of the help screen. - opts.banner = "Usage: pkg-cli {update|clean|download|source|install|uninstall|upgrade|rpkg-show|rpkg-list|lpkg-show|lpkg-list|build-dep|install-dep|help} ..." + "\n" \ - + "\t" + "pkg-cli update [-u ]" + "\n" \ - + "\t" + "pkg-cli clean [-l ] [-f]" + "\n" \ - + "\t" + "pkg-cli download -p [-o ] [-l ] [-u ] [-t]" + "\n" \ - + "\t" + "pkg-cli upload -a -i -s [-b [-o ] [-l ] [-u ]" + "\n" \ - + "\t" + "pkg-cli install -p [-o ] [-l ] [-u ] [-t] [-f]" + "\n" \ - + "\t" + "pkg-cli install-file -p [-l ] [-f]" + "\n" \ - + "\t" + "pkg-cli uninstall -p [-l ] [-t]" + "\n" \ - + "\t" + "pkg-cli upgrade [-l ] [-o ] [-u ] [-t]" + "\n" \ + banner = "Requiest service to package-server and control packages service command-line tool." + "\n" \ + + "\n" + "Usage: pkg-cli [OPTS] or pkg-cli (-h|-v)" + "\n" \ + + "\n" + "Subcommands:" + "\n" \ + + "\t" + "update Update to the latest package in your SDK environment." + "\n" \ + + "\t" + "clean Delete the package in your SDK environment." + "\n" \ + + "\t" + "download Download the package in your SDK environment." + "\n" \ + + "\t" + "install Download the package from package-server and install the package in your SDK environment." + "\n" \ + + "\t" + "install-file Install the package in your SDK environment." + "\n" \ + + "\t" + "uninstall Uninstall the package in your SDK environment." + "\n" \ + + "\t" + "upgrade Upgrade your SDK environment." + "\n" \ + + "\t" + "check-upgrade Check packages to upgrade." + "\n" \ + + "\t" + "show-rpkg Show the package in the package-server." + "\n" \ + + "\t" + "list-rpkg Show the all packages in the package-server." + "\n" \ + + "\t" + "show-lpkg Show the package in your SDK environment." + "\n" \ + + "\t" + "list-lpkg Show the all packages in your SDK environment." + "\n" \ + + "\t" + "build-dep Show build-dependency packages" + "\n" \ + + "\t" + "install-dep Show install-dependency packages" + "\n" \ + + "\n" + "Subcommand usage:" + "\n" \ + + "\t" + "pkg-cli update [-u ]" + "\n" \ + + "\t" + "pkg-cli clean [-l ] [--force]" + "\n" \ + + "\t" + "pkg-cli download -P [-o ] [-l ] [-u ] [--trace]" + "\n" \ + + "\t" + "pkg-cli install -P [-o ] [-l ] [-u ] [--trace] [--force]" + "\n" \ + + "\t" + "pkg-cli install-file -P [-l ] [-u ] [--trace] [--force]" + "\n" \ + + "\t" + "pkg-cli uninstall -P [-l ] [--trace]" + "\n" \ + + "\t" + "pkg-cli upgrade [-l ] [-o ] [-u ] [--trace]" + "\n" \ + "\t" + "pkg-cli check-upgrade [-l ] [-o ] [-u ]" + "\n" \ - + "\t" + "pkg-cli show-rpkg -p [-o ] [-u ]" + "\n" \ + + "\t" + "pkg-cli show-rpkg -P [-o ] [-u ]" + "\n" \ + "\t" + "pkg-cli list-rpkg [-o ] [-u ]" + "\n" \ - + "\t" + "pkg-cli show-lpkg -p [-l ]" + "\n" \ + + "\t" + "pkg-cli show-lpkg -P [-l ]" + "\n" \ + "\t" + "pkg-cli list-lpkg [-l ]" + "\n" \ - + "\t" + "pkg-cli build-dep -p [-o ]" + "\n" \ - + "\t" + "pkg-cli install-dep -p [-o ]" + "\n" \ + + "\t" + "pkg-cli build-dep -P [-o ]" + "\n" \ + + "\t" + "pkg-cli install-dep -P [-o ]" + "\n" \ + + "\n" + "Options:" + "\n" + + optparse = OptionParser.new(nil, 32, ' '*8) do|opts| + # Set a banner, displayed at the top + # of the help screen. + + opts.banner = banner - opts.on( '-p', '--pkg ', 'package name or package file name' ) do |name| + opts.on( '-P', '--pkg ', 'package name or package file name' ) do |name| options[:pkg] = name end - opts.on( '-o', '--os ', 'target operating system' ) do |os| + opts.on( '-o', '--os ', 'target operating system: ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64' ) do |os| options[:os] = os end - opts.on( '-u', '--url ', 'package server url' ) do|url| + opts.on( '-u', '--url ', 'package server url: http://127.0.0.1/dibs/unstable' ) do |url| options[:url] = url end - opts.on( '-a', '--alias ', 'ssh alias' ) do|al| - options[:alias] = al - end - - opts.on( '-i', '--id ', 'id' ) do|id| - options[:id] = id - end - - opts.on( '-l', '--loc ', 'location' ) do |loc| - options[:loc] = loc + opts.on( '-l', '--loc ', 'install/download location' ) do |loc| + options[:loc] = loc end - opts.on( '-s', '--src ', 'source package path' ) do|src| - options[:srcpkg] = [] - list = src.tr(" \t","").split(",") - list.each do |l| - if l.start_with? "~" then l = Utils::HOME + l.delete("~") end - options[:srcpkg].push l - end - end - - opts.on( '-t', '--trace', 'enable trace dependent packages' ) do + opts.on( '--trace', 'enable trace dependent packages' ) do options[:t] = true end - opts.on( '-b', '--bin ', 'binary package path' ) do|bin| - options[:binpkg] = [] - list = bin.tr(" \t","").split(",") - list.each do |l| - if l.start_with? "~" then l = Utils::HOME + l.delete("~") end - options[:binpkg].push l - end - end - - opts.on( '-f', '--force', 'enable force' ) do + opts.on( '--force', 'enable force' ) do options[:f] = true end - opts.on( '-h', '--help', 'display this information' ) do + opts.on( '-h', '--help', 'display help' ) do puts opts + exit + end + + opts.on( '-v', '--version', 'display version' ) do + puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version() exit end - end - - $log.info "option parsing start" - $log.info "option is : " + ARGV * "," cmd = ARGV[0] - if cmd.eql? "update" or cmd.eql? "download" or \ - cmd.eql? "install" or cmd.eql? "show-rpkg" or \ - cmd.eql? "list-rpkg" or cmd.eql? "source" or \ - cmd.eql? "uninstall" or cmd.eql? "show-lpkg" or \ - cmd.eql? "list-lpkg" or cmd.eql? "upload" or \ - cmd.eql? "install-file" or cmd.eql? "clean" or \ - cmd.eql? "upgrade" or cmd.eql? "check-upgrade" or \ - cmd.eql? "build-dep" or cmd.eql? "install-dep" or \ + if cmd.eql? "update" or cmd.eql? "download" or + cmd.eql? "install" or cmd.eql? "show-rpkg" or + cmd.eql? "list-rpkg" or + cmd.eql? "uninstall" or cmd.eql? "show-lpkg" or + cmd.eql? "list-lpkg" or + cmd.eql? "install-file" or cmd.eql? "clean" or + cmd.eql? "upgrade" or cmd.eql? "check-upgrade" or + cmd.eql? "build-dep" or cmd.eql? "install-dep" or + cmd =~ /(-v)|(--version)/ or cmd =~ /(help)|(-h)|(--help)/ then - if cmd.eql? "help" then ARGV[0] = "-h" end + + if cmd.eql? "help" then + V[0] = "-h" + end options[:cmd] = ARGV[0] else - raise ArgumentError, "first paramter must be {update|clean|download|upload|source|install|install-file|uninstall|upgrade|check-upgrade|show-rpkg|list-rpkg|show-lpkg|list-lpkg|build-dep|install-dep|help} : your input is #{ARGV[0]}" + raise ArgumentError, "Usage: pkg-cli [OPTS] or pkg-cli -h" end optparse.parse! - - $log.info "option parsing end" set_default options diff --git a/src/pkg_server/distribution.rb b/src/pkg_server/distribution.rb index 535f99e..d386890 100644 --- a/src/pkg_server/distribution.rb +++ b/src/pkg_server/distribution.rb @@ -29,14 +29,17 @@ Contributors: require 'fileutils' $LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" require "parser" +require "installer" class Distribution - attr_accessor :name, :location, :server_url + attr_accessor :name, :location, :server_url, :lock_file_path # constant - SUPPORTED_OS = ["linux", "windows", "darwin"] PKG_LIST_FILE_PREFIX = "pkg_list_" - ARCHIVE_PKG_LIST = "archive_pkg_list" + ARCHIVE_PKG_FILE = "archive_pkg_list" + OS_INFO_FILE = "os_info" + SNAPSHOT_INFO_FILE = "snapshot.info" + LOCK_FILE = ".lock_file" def initialize( name, location, server_url, pkg_server ) @@ -45,31 +48,32 @@ class Distribution @server_url = server_url @log = pkg_server.log @integrity = pkg_server.integrity + @lock_file_path = "#{location}/#{LOCK_FILE}" + @pkg_hash_os = {} + @archive_pkg_list = [] + @snapshot_hash = [] + @support_os_list = [] @log.info "Distribution class[#{name}] initialize " - @pkg_hash_os = {} - for os in SUPPORTED_OS - if @location.empty? or ( not File.exist? "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" ) then - @pkg_hash_os[os] = {} - else - @pkg_hash_os[os] = Parser.read_pkg_list( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" ) - end - end + initialize_pkg_list() end - def register (file_path, pkg) - @log.info "Distribution class's register" + def register (file_path, pkg, internal_flag) if pkg.nil? then raise RuntimeError, "package file does not contain pkginfo.manifest: [#{file_path}]" end + if not @pkg_hash_os.has_key?(pkg.os) then + raise RuntimeError, "package server does not support package's os : [#{pkg.os}]" + end + exist_pkg = @pkg_hash_os[pkg.os][pkg.package_name] # version check and if existing version is higher then upload version? - if not exist_pkg.nil? - if not ( Utils.compare_version( exist_pkg.version, pkg.version ).eql? 1 ) then - raise RuntimeError, "existing package's version is higher than register package" + if (not exist_pkg.nil?) and (not internal_flag) then + if not ( Utils.compare_version( exist_pkg.version, pkg.version ) == 1 ) then + raise RuntimeError, "existing package's version is higher then register package : [#{pkg.package_name}] in [#{pkg.os}]" end end @@ -77,11 +81,16 @@ class Distribution pkg.origin = "local" pkg.source = "" pkg.path = "/binary/" + File.basename( file_path ) - # TODO: windows and mac : sha256sum - if Utils::HOST_OS.eql? "linux" then - pkg.checksum = `sha256sum #{file_path}`.split(" ")[0] - end - pkg.size = `du -b #{file_path}`.split[0].strip + if pkg.checksum.empty? then + # TODO: windows and mac : sha256sum + if Utils.is_unix_like_os( Utils::HOST_OS ) then + pkg.checksum = `sha256sum #{file_path}`.split(" ")[0] + end + end + + if pkg.size.empty? then + pkg.size = `du -b #{file_path}`.split[0].strip + end @pkg_hash_os[pkg.os][pkg.package_name] = pkg @@ -89,7 +98,6 @@ class Distribution end def register_for_test (file_path, pkg) - @log.info "Distribution class's register for test" if pkg.nil? then raise RuntimeError, "package file does not contain pkginfo.manifest: [#{file_path}]" end @@ -99,7 +107,7 @@ class Distribution pkg.source = "" pkg.path = "/temp/" + File.basename( file_path ) # TODO: windows and mac : sha256sum - if Utils::HOST_OS.eql? "linux" then + if Utils.is_unix_like_os( Utils::HOST_OS ) then pkg.checksum = `sha256sum #{file_path}`.split(" ")[0] end pkg.size = `du -b #{file_path}`.split[0].strip @@ -107,8 +115,15 @@ class Distribution return pkg end - def generate_snapshot (name, base_snapshot, append_pkg_list) - @log.info "Distribution class's generate snapshot" + def register_archive_pkg( archive_pkg ) + if not @archive_pkg_list.include? archive_pkg then + @archive_pkg_list.push archive_pkg + else + @log.error("archive package already exist : [#{archive_pkg}]", Log::LV_USER) + end + end + + def generate_snapshot(name, base_snapshot, from_cmd) # if name is nil or empty then create uniq name if name.nil? or name.empty? then name = Utils.create_uniq_name @@ -119,294 +134,372 @@ class Distribution raise "Snapshot is already exist: #{name}" end - if base_snapshot.nil? then base_snapshot = "" else base_snapshot.strip! end - if append_pkg_list.nil? then append_pkg_list = [] end + FileUtils.mkdir "#{@location}/snapshots/#{name}" + + # base_snapshot_path + if base_snapshot.empty? then + snapshot_path = @location + else + snapshot_path = "#{@location}/snapshots/#{base_snapshot.strip}" + end - if base_snapshot.empty? and append_pkg_list.empty? then - FileUtils.mkdir "#{@location}/snapshots/#{name}" + # copy package list + @support_os_list.each do |os| + FileUtils.copy_file( "#{snapshot_path}/#{PKG_LIST_FILE_PREFIX}#{os}", + "#{@location}/snapshots/#{name}/#{PKG_LIST_FILE_PREFIX}#{os}" ) + end + + # copy archive package list + FileUtils.copy_file( "#{snapshot_path}/#{ARCHIVE_PKG_FILE}", + "#{@location}/snapshots/#{name}/#{ARCHIVE_PKG_FILE}" ) + + # copy os info file + FileUtils.copy_file( "#{snapshot_path}/#{OS_INFO_FILE}", + "#{@location}/snapshots/#{name}/#{OS_INFO_FILE}" ) - for os in SUPPORTED_OS - FileUtils.copy( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", - "#{@location}/snapshots/#{name}/#{PKG_LIST_FILE_PREFIX}#{os}" ) + # generate temp file + tmp_file_name = "" + while ( tmp_file_name.empty? ) + tmp_file_name = @location + "/temp/." + Utils.create_uniq_name + + if File.exist? tmp_file_name then + tmp_file_name = "" end + end - # copy archive package list - begin - FileUtils.copy( "#{@location}/#{ARCHIVE_PKG_LIST}", "#{@location}/snapshots/#{name}/#{ARCHIVE_PKG_LIST}" ) - rescue => e - @log.warn "ARCHIVE_PKG_LIST not exist" + FileUtils.copy_file( "#{@location}/#{SNAPSHOT_INFO_FILE}", tmp_file_name ) + File.open( tmp_file_name, "a" ) do |f| + f.puts "name : #{name}" + f.puts "time : #{Time.now.strftime("%Y%m%d%H%M%S")}" + if from_cmd then + f.puts "type : manual" + else + f.puts "type : auto" end + f.puts "path : /snapshots/#{name}" + f.puts + end + FileUtils.mv( tmp_file_name, "#{@location}/#{SNAPSHOT_INFO_FILE}", :force => true ) + + # snapshot is generated + @log.output( "snapshot is generated : #{@location}/snapshots/#{name}", Log::LV_USER) + return name + end + def sync(force) + pkg_list_update_flag = false + archive_update_flag = false + distribution_update_flag = false - @log.output( "snapshot is generated : #{@location}/snapshots/#{name}", Log::LV_USER) - # base_snapshot is exist - elsif not ( base_snapshot.empty? ) then - FileUtils.mkdir "#{@location}/snapshots/#{name}" - - for os in SUPPORTED_OS - # check base snapshot exist - if (not File.exist? "#{@location}/snapshots/#{base_snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}") then - raise RuntimeError, "Can't find base snapshot [#{base_snapshot}]" - end - - base_pkg_list = Parser.read_pkg_list( "#{@location}/snapshots/#{base_snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}" ) - snapshot_generate2( name, os, base_pkg_list, append_pkg_list ) + # reload pkg list from newest pkg list file + reload_distribution_information() + + # check distribution's server_url + if @server_url.empty? then + @log.error("This distribution has not remote server", Log::LV_USER) + return false + end + + # generate client class + client = Client.new( @server_url, "#{@location}/binary", @log ) + + # update os list + add_os_list = client.support_os_list - @support_os_list + add_os_list.each do |os| + add_os(os) + pkg_list_update_flag = true + end + + if force then + remove_os_list = @support_os_list - client.support_os_list + remove_os_list.each do |os| + remove_os(os) + pkg_list_update_flag = true end - - # copy archive package list - begin - FileUtils.copy( "#{@location}/#{ARCHIVE_PKG_LIST}", "#{@location}/snapshots/#{name}/#{ARCHIVE_PKG_LIST}" ) - rescue => e - @log.warn "ARCHIVE_PKG_LIST not exist" + end + update_pkg_list = [] + + @support_os_list.each do |os| + # error check + if client.pkg_hash_os[os].nil? then + @log.error("package server does not have os : #{os}", Log::LV_USER) + next + end + + server_pkg_name_list = client.pkg_hash_os[os].keys + local_pkg_name_list = @pkg_hash_os[os].keys + full_pkg_name_list = server_pkg_name_list + local_pkg_name_list + + full_pkg_name_list.each do |pkg_name| + ret = sync_package( pkg_name, client, os, force ) + if not ret.nil? then + update_pkg_list.push(ret) + pkg_list_update_flag = true + end end + end + + # sync archive package + update_archive_list = sync_archive_pkg() - @log.output( "snapshot is generated : #{@location}/snapshots/#{name}", Log::LV_USER) - # base_snapshot is empty - else - FileUtils.mkdir "#{@location}/snapshots/#{name}" + # lock + lock_file = Utils.file_lock(@lock_file_path) - for os in SUPPORTED_OS - base_pkg_list = Parser.read_pkg_list( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" ) - snapshot_generate2( name, os, base_pkg_list, append_pkg_list ) - end + # reload pkg list from newest pkg list file + reload_distribution_information() - # copy archive package list - begin - FileUtils.copy( "#{@location}/#{ARCHIVE_PKG_LIST}", "#{@location}/snapshots/#{name}/#{ARCHIVE_PKG_LIST}" ) - rescue => e - @log.warn "ARCHIVE_PKG_LIST not exist" + # update pkg_list hash + update_pkg_list.each do |update_option, os, pkg| + # if updated package's os is removed then skip update + if not @support_os_list.include? os then + next end - @log.output( "snapshot is generated : #{@location}/snapshots/#{name}", Log::LV_USER) - end - end + case update_option + when "ADD" + local_pkg = @pkg_hash_os[os][pkg.package_name] + + if (not force) and (not local_pkg.nil?) then + # if updated package 'local' package then skip + if local_pkg.origin.eql? "local" then + next + end - def snapshot_generate2( name, os, pkg_list, append_pkg_list ) - @log.info "snapshot_generate2: input append_pkg_list #{append_pkg_list}" - append_pkg_list.each do |pkg| - # os check - if pkg.os.eql? os.strip then pkg_list[pkg.package_name] = pkg end + # if package is update when sync time then skip + if Utils.compare_version(local_pkg.version, pkg.version) == -1 then + next + end + end + + @pkg_hash_os[os][pkg.package_name] = pkg + when "REMOVE" + if not force then + if @pkg_hash_os[os][pkg.package_name].origin.eql? "local" then + next + end + end + + @pkg_hash_os[os].delete(pkg.package_name) + else + @log.error("Unsupportd update option : #{update_option}", Log::LV_USER) + next + end end - - File.open( "#{@location}/snapshots/#{name}/#{PKG_LIST_FILE_PREFIX}#{os}", "w" ) do |f| - pkg_list.each_value do |pkg| - pkg.print_to_file(f) - f.puts + + update_archive_list.each do |pkg| + if not @archive_pkg_list.include? pkg then + @archive_pkg_list.push pkg + archive_update_flag = true end - end - end + end - def sync( force, os ) + # update pkg_list file + if pkg_list_update_flag then + write_all_pkg_list() + distribution_update_flag = true + end - # check distribution's server_url - if @server_url.empty? then - @log.error( "This distribution has not remote server" , Log::LV_USER) - return + # update archive list file + if archive_update_flag then + write_archive_pkg_list() + distribution_update_flag = true end - # generate client class - client_bin = Client.new( @server_url, "#{@location}/binary", @log ) - client_bin.update - client_src = Client.new( @server_url, "#{@location}/source", @log ) - client_src.update - - source_pkg_path_list = [] - dep_pkg_path_list = [] - - # error check - if client_bin.pkg_hash_os[os].nil? then - raise "Package list can't generated. url is #{@server_url}. os is #{os}" - end + # unlock + Utils.file_unlock(lock_file) - # check existing source package list - @pkg_hash_os[os].each_value do |pkg| - if not source_pkg_path_list.include? pkg.src_path then - source_pkg_path_list.push pkg.src_path - end - end + return distribution_update_flag + end - full_pkg_list = client_bin.pkg_hash_os[os].merge(@pkg_hash_os[os]) + def add_os(os) + if @support_os_list.include? os then + @log.error("#{os} is already exist ", Log::LV_USER) + return + end - full_pkg_list.each_key do |pkg_name| - server_pkg = client_bin.pkg_hash_os[os][pkg_name] - local_pkg = @pkg_hash_os[os][pkg_name] + # update os information + @support_os_list.push os + @pkg_hash_os[os] = {} + File.open("#{@location}/#{OS_INFO_FILE}", "a") do |f| + f.puts os + end - # if server and local has package - if ( not server_pkg.nil? ) and ( not local_pkg.nil? ) then - # if server version is not updated then skip - if ( Utils.compare_version( local_pkg.version, server_pkg.version ).eql? 0 ) then - @log.info "existing packages version equal to server's version. so package[#{pkg_name}] skip" - - next - end + # create pkg_list_#{os} file + File.open( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", "w" ) do |f| end + end - # if server's pakcage is local package and mode is not force then local package will be upaded - if ( local_pkg.origin.eql? "local" ) and ( not force ) then - @log.info "package [#{pkg_name}] is local package. so skip update" - - next - end + def clean( remain_snapshot_list ) + file_list = [] + used_archive_list = [] - # package update - @log.info "update package from server: [#{pkg_name}]" - file_path_list = client_bin.download( pkg_name, os, false ) - - # file download error check - if file_path_list.nil? or file_path_list.empty? then - @log.error( "Can't download package file #{pkg_name}" , Log::LV_USER) - next - else - @log.info "download binary package successfully: [#{pkg_name}]" - file_path = file_path_list[0] - end - - # update pkg class - server_pkg.path = "/binary/#{File.basename(file_path)}" - server_pkg.origin = client_bin.server_addr - @pkg_hash_os[os][pkg_name] = server_pkg - - dep_pkg_path_list = dep_pkg_path_list + server_pkg.source_dep_list - - # if binary only package, then skip downloading its source - if server_pkg.src_path.empty? then next end - - # if binary's source package is not downlaoded, download it - if ( not source_pkg_path_list.include? server_pkg.src_path ) then - @log.info "download source package: [#{server_pkg.src_path}]" - file = client_src.download_source( pkg_name, os ) - if file.nil? then - @log.error "Can't download source package [#{pkg_name}]" + # collect remaining file's name from current package server version + @support_os_list.each do |os| + @pkg_hash_os[os].each_value{ |pkg| + file_list.push(pkg.path.sub("/binary/","")) + + pkg.source_dep_list.each do |source_dep| + if @archive_pkg_list.include? source_dep.package_name then + used_archive_list.push source_dep.package_name else - source_pkg_path_list.push server_pkg.src_path - end - end - # if package exist only server - elsif ( not server_pkg.nil? ) then - #downnlaod binary package - file_path_list = client_bin.download( pkg_name, os, false ) - - # file download error check - if file_path_list.nil? or file_path_list.empty? then - @log.error( "Can't download package file #{pkg_name}", Log::LV_USER) - next - else - @log.info "download binary package successfully: [#{pkg_name}]" - file_path = file_path_list[0] - end - - # update pkg class - server_pkg.path = "/binary/#{File.basename(file_path)}" - server_pkg.origin = client_bin.server_addr - @pkg_hash_os[os][pkg_name] = server_pkg - - dep_pkg_path_list = dep_pkg_path_list + server_pkg.source_dep_list - - # if binary only package, then skip downloading its source - if server_pkg.src_path.empty? then next end - - # if binary's source package is not downlaoded, download it - if not source_pkg_path_list.include? server_pkg.src_path then - @log.info "download source package: [#{server_pkg.src_path}]" - file = client_src.download_source( pkg_name, os ) - if file.nil? - @log.error "Can't download source package [#{server_pkg.src_path}]" - else - source_pkg_path_list.push server_pkg.src_path + @log.error("Can't find dependency source package : #{source_dep.package_name}") end end - # if package exist only local - elsif ( not local_pkg.nil? ) then - # if pakcage is not local package then server's package is removed - # so, local package remove - if not local_pkg.origin.eql? "local" then - @pkg_hash_os[os].delete(pkg_name) + } + end + + # remain only used archive package + @archive_pkg_list = used_archive_list.uniq + write_archive_pkg_list + + # collect remaning file's name from snapshot list + remain_snapshot_list.each do |snapshot| + os_info = "#{@location}/snapshots/#{snapshot}/#{OS_INFO_FILE}" + os_list = [] + # if snapshot has os_info file then using that file + if File.exist? os_info + File.open( os_info, "r" ) do |f| + f.each_line do |l| + os_list.push l.strip + end end + # if snapshot does not have os_info file then using package server os_info list else - raise RuntimeError,"hash merge error!" + os_list = @support_os_list end - end - @log.info "pkg file update end" - # download dependency source packages - dep_pkg_path_list.uniq.each do |dep| - if dep.package_name.strip.empty? then next end - @log.info "download dep package: [#{dep.package_name}]" - file = client_src.download_dep_source( dep.package_name ) - if file.nil? - @log.error "Can't download dep package [#{dep.package_name}]" - end + os_list.each do |os| + begin + info_file = "#{@location}/snapshots/#{snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}" + if not File.exist? info_file then + @log.error( "pkg list file does not exist : #{info_file}", Log::LV_USER) + next + end + + pkg_list = Parser.read_repo_pkg_list_from(info_file) + + pkg_list.each_value{ |pkg| + file_list.push(pkg.path.sub("/binary/","")) + } + rescue => e + @log.error( e.message, Log::LV_USER) + end + end + + used_archive_list = used_archive_list + read_archive_pkg_list( snapshot ) end - @log.info "pkg deb file update end" - # pakcage list file update - write_pkg_list(os) - @log.info "write pkg list" - end + file_list.uniq! + used_archive_list.uniq! - def sync_archive_pkg - client = Client.new( @server_url, "#{@location}/source", @log ) - client.update - - downloaded_list = [] - client.archive_pkg_list.each do |pkg| - if not File.exist? "#{@location}/source/#{pkg}" then - file = client.download_dep_source(pkg) - if file.nil? - @log.error "Can't download archive package [#{file}]" - else - downloaded_list.push pkg - end + # remove unused binary file + Dir.new( @location + "/binary" ).each do |file| + if file.start_with? "." then next end + + if not file_list.include? file then + FileUtils.rm "#{@location}/binary/#{file}" end end - write_archive_pkg_list( downloaded_list ) - end + # remove unused archive file + Dir.new( @location + "/source" ).each do |file| + if file.start_with? "." then next end + + if not used_archive_list.include? file then + FileUtils.rm "#{@location}/source/#{file}" + end + end + + # remove unused snapshot + Dir.new( @location + "/snapshots" ).each do |snapshot| + if snapshot.start_with? "." then next end + + if not remain_snapshot_list.include? snapshot then + FileUtils.rm_rf "#{@location}/snapshots/#{snapshot}" + end + end + + # upate snapshot.info file + update_snapshot_info_file(remain_snapshot_list) + end + + def write_all_pkg_list + @support_os_list.each do |os| + write_pkg_list(os) + end + end def write_pkg_list( os ) - File.open( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", "w" ) do |f| + # if input os is empty then return + if os.nil? or os.empty? then return end + + # generate temp file + tmp_file_name = "" + while ( tmp_file_name.empty? ) + tmp_file_name = @location + "/temp/." + Utils.create_uniq_name + + if File.exist? tmp_file_name then + tmp_file_name = "" + end + end + + File.open( tmp_file_name, "w" ) do |f| @pkg_hash_os[os].each_value do |pkg| + # insert package information to file pkg.print_to_file(f) + # insert empty line to file f.puts end end - end - def write_archive_pkg_list( pkg_file_name_list ) - File.open( "#{@location}/#{ARCHIVE_PKG_LIST}", "a" ) do |f| - pkg_file_name_list.map { |name| f.puts(name) } - end + FileUtils.mv( tmp_file_name, "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", :force => true ) end # input: package file path(zip file) # return: pkg def get_package_from_file(file_path) - tmp_dir = "./" + Utils.create_uniq_name - FileUtils.mkdir "#{@location}/#{tmp_dir}" - - # file extention is zip - if file_path.end_with? ".zip" then - system("unzip -q #{file_path} pkginfo.manifest -d #{@location}/#{tmp_dir} ") - # file extention is tar.gz - elsif file_path.end_with? ".tar.gz" or file_path.end_with? ".tar" then - system("tar -xzf #{file_path} -C #{@location}/#{tmp_dir}") + tmp_dir = @location + "/" + Utils.create_uniq_name + + #if file extension is .zip then check pkginfo.manifest + if File.extname(file_path).eql? ".zip" then + FileUtils.mkdir tmp_dir + + ret = FileInstaller.extract_a_file(file_path, "pkginfo.manifest", tmp_dir, @log) else - raise "unsupported zipping file. just use [zip/tar.gz]" + return nil end - pkg = Parser.read_pkginfo( "#{@location}/#{tmp_dir}/pkginfo.manifest" ) - FileUtils.rm_rf "#{@location}/#{tmp_dir}" - return pkg + # if pkginfo.manifest file exist + if not ret.nil? then + begin + pkg = Parser.read_single_pkginfo_from "#{tmp_dir}/pkginfo.manifest" + rescue => e + @log.error( e.message, Log::LV_USER) + return nil + end + + FileUtils.rm_rf tmp_dir + return pkg + # if pkginfo.manifest file does not exist + else + FileUtils.rm_rf tmp_dir + return nil + end end def remove_pkg( pkg_name_list, os ) - for package_name in pkg_name_list + if os.eql? "all" then os_list = @support_os_list + else os_list = [ os ] + end + + pkg_name_list.each do |package_name| removed_flag = false - if os.eql? "all" then os_list = SUPPORTED_OS - else os_list = [ os ] - end + os_list.each do |os| + if not @support_os_list.include? os then + @log.error( "package server does not support input os : #{os}") + next + end - for os in os_list if @pkg_hash_os[os].key?(package_name) then @log.info( "remove package [#{package_name}] in #{os}", Log::LV_USER) @pkg_hash_os[os].delete(package_name) @@ -415,73 +508,403 @@ class Distribution end if not removed_flag then - @log.error( "Can't find package: #{package_name}", Log::LV_USER) + if @archive_pkg_list.include? package_name then + @archive_pkg_list.delete package_name + else + @log.error( "Can't find package: [#{package_name}]", Log::LV_USER) + end end end - + # check install dependency integrity - check_integrity + if @integrity.eql? "YES" then + @log.info "integrity check" + check_integrity + else + @log.info "skip integrity check" + end - for os in SUPPORTED_OS + + # update pkg_list file + os_list.each do |os| write_pkg_list(os) end + write_archive_pkg_list end + def remove_snapshot( snapshot_list ) + remain_snapshot = [] + removed_snapshot = [] + + # remove unused snapshot + Dir.new( @location + "/snapshots" ).each do |snapshot| + if snapshot.start_with? "." then next end + + if snapshot_list.include? snapshot then + FileUtils.rm_rf "#{@location}/snapshots/#{snapshot}" + snapshot_list.delete snapshot + removed_snapshot.push snapshot + else + remain_snapshot.push snapshot + end + end + + if not snapshot_list.empty? then + @log.output( "snapshot not exist : #{snapshot_list.join(",")}", Log::LV_USER ) + end + + if not removed_snapshot.empty? then + @log.output( "snapshot removed: #{removed_snapshot.join(",")}", Log::LV_USER ) + end + + update_snapshot_info_file(remain_snapshot) + end + def check_integrity @log.info "check server pkg's install dependency integrity" - if not @integrity.eql? "YES" then - @log.info "skip integrity check" - return + @support_os_list.each do |os| + @pkg_hash_os[os].each_value.each do |pkg| + check_package_integrity(pkg) + end end + end - for os in SUPPORTED_OS - for pkg in @pkg_hash_os[os].each_value - error_msg = "[#{pkg.package_name}]'s install dependency not matched in " + def check_package_integrity(pkg) + error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s install dependency not matched in " + os = pkg.os - for dep in pkg.install_dep_list - if @pkg_hash_os[os].has_key? dep.package_name then - target_pkg = @pkg_hash_os[os][dep.package_name] - else - raise RuntimeError,(error_msg + dep.to_s) - end + pkg.install_dep_list.each do |dep| + if @pkg_hash_os[os].has_key? dep.package_name then + target_pkg = @pkg_hash_os[os][dep.package_name] + else + raise RuntimeError,(error_msg + dep.to_s) + end + + # check package's version + if not dep.match? target_pkg.version then + raise RuntimeError,(error_msg + dep.to_s) + end + + end + + error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s build dependency not matched in " + pkg.build_dep_list.each do |dep| + if dep.target_os_list.length == 0 then + build_dep_os = os + else + build_dep_os = dep.target_os_list[0] + end + + if @pkg_hash_os[build_dep_os].has_key? dep.package_name then + target_pkg = @pkg_hash_os[build_dep_os][dep.package_name] + else + raise RuntimeError,(error_msg + dep.to_s) + end + + # check package's version + if not dep.match? target_pkg.version then + raise RuntimeError,(error_msg + dep.to_s) + end + end + + error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s source dependency not matched in " + pkg.source_dep_list.each do |dep| + if not @archive_pkg_list.include? dep.package_name then + raise RuntimeError,(error_msg + dep.to_s) + end + end + end + + def read_archive_pkg_list( snapshot_name ) + pkg_list = [] + + if snapshot_name.empty? + file_name = @location + "/" + ARCHIVE_PKG_FILE + else + file_name = @location + "/snapshots/" + snapshot_name + "/" + ARCHIVE_PKG_FILE + end + + if File.exist? file_name + File.open(file_name, "r") do |f| + f.each_line do |l| + pkg_list.push(l.strip) + end + end + end + + return pkg_list + end + + def write_archive_pkg_list() + File.open( "#{@location}/#{ARCHIVE_PKG_FILE}", "w" ) do |f| + @archive_pkg_list.each do |pkg| + f.puts(pkg) + end + end + end + + def initialize_pkg_list + if not File.exist? "#{@location}/#{OS_INFO_FILE}" then + return + end + + # get support_os_list + @support_os_list = [] + File.open( "#{@location}/#{OS_INFO_FILE}", "r" ) do |f| + f.each_line do |l| + @support_os_list.push l.strip + end + end + + # read package_list file + @support_os_list.each do |os| + @pkg_hash_os[os] = {} + pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" + + if File.exist? pkg_list_file then + begin + @pkg_hash_os[os] = Parser.read_repo_pkg_list_from( pkg_list_file ) + rescue => e + @log.error( e.message, Log::LV_USER) + @pkg_hash_os[os] = nil + end + end + end + + # read archive package_list file + @archive_pkg_list = read_archive_pkg_list("") + end + + def get_link_package(pkg, pkg_os) + pkg.os_list.each do |os| + # skip in same os for origin package + if pkg_os.eql? os then next end + # skip in unsupported os + if not @support_os_list.include? os then next end + + exist_pkg = @pkg_hash_os[os][pkg.package_name] + if exist_pkg.nil? then next end + + compare_version = Utils.compare_version(pkg.version, exist_pkg.version) + # if version same then compatible package + if compare_version == 0 then + return exist_pkg + end + end + + return nil + end + + # PRIVATE METHODS/VARIABLES + private + + def sync_package( pkg_name, client, os, force ) + server_pkg = client.pkg_hash_os[os][pkg_name] + local_pkg = @pkg_hash_os[os][pkg_name] + + # if server and local has package + if ( not server_pkg.nil? ) and ( not local_pkg.nil? ) then + version_cmp = Utils.compare_version( local_pkg.version, server_pkg.version ) + if ( version_cmp == 0 ) then + # version is same then skip update + return nil + end + + if ( local_pkg.origin.eql? "local" ) and (not force) then + # local_pkg is generated from local and not force mode then skip update + return nil + end + + pkg = sync_package2( server_pkg, client, os ) + return ["ADD", os, pkg] + # if package exist only server + elsif ( not server_pkg.nil? ) then + pkg = sync_package2( server_pkg, client, os ) + return ["ADD", os, pkg] + # if package exist only local + elsif ( not local_pkg.nil? ) then + # if local pkg is generated from local then skip + if local_pkg.origin.eql? "local" and (not force) then + return nil + end + + # package remove + return ["REMOVE", os, local_pkg] + else + raise RuntimeError,"hash merge error!" + end + + return nil + end + + def sync_package2( pkg, client, os ) + pkg_name = pkg.package_name + + # package update + file_path_list = client.download( pkg_name, os, false ) + + # file download error check + if file_path_list.nil? or file_path_list.empty? then + @log.error("Can't download package file [#{pkg_name}]", Log::LV_USER) + return nil + else + file_path = file_path_list[0] + end - # check package's version - if not dep.match? target_pkg.version then - raise RuntimeError,(error_msg + dep.to_s) - end + # update pkg class + pkg.path = "/binary/#{File.basename(file_path)}" + pkg.origin = client.server_addr + return pkg + + end + + def update_snapshot_info_file(remain_snapshot_list) + if not File.exist? "#{@location}/#{SNAPSHOT_INFO_FILE}" + @log.error "Can not find snapshot info file" + return + end - end + # generate temp file + tmp_file_name = "" + while ( tmp_file_name.empty? ) + tmp_file_name = @location + "/temp/." + Utils.create_uniq_name - error_msg = "[#{pkg.package_name}]'s build dependency not matched in " - for dep in pkg.build_dep_list - if dep.target_os_list.length == 0 then - build_dep_os = os + if File.exist? tmp_file_name then + tmp_file_name = "" + end + end + + # modify snapshot info File + info_file = File.readlines("#{@location}/#{SNAPSHOT_INFO_FILE}") + File.open(tmp_file_name, 'w') do |f| + save_flag = false + info_file.each { |line| + if line =~ /name :/ then + if remain_snapshot_list.include? line.split(':')[1].strip then + save_flag = true else - build_dep_os = dep.target_os_list[0] + save_flag = false end - if @pkg_hash_os[build_dep_os].has_key? dep.package_name then - target_pkg = @pkg_hash_os[build_dep_os][dep.package_name] - else - raise RuntimeError,(error_msg + dep.to_s) - end - - # check package's version - if not dep.match? target_pkg.version then - raise RuntimeError,(error_msg + dep.to_s) - end - - end - - error_msg = "[#{pkg.package_name}]'s source dependency not matched in " - for dep in pkg.source_dep_list - # check source package exist - if not File.exist? "#{@location}/source/#{dep.package_name}" - raise RuntimeError,(error_msg + dep.to_s) - end - end - end + end + + if save_flag then + f.puts line + end + } + end + + FileUtils.mv( tmp_file_name, "#{@location}/#{SNAPSHOT_INFO_FILE}", :force => true ) + end + + def get_all_reverse_depends_pkgs(pkg, checked_list) + depends_list = [] + + @support_os_list.each do |os| + @pkg_hash_os[os].each_value{ |dpkg| + if dpkg.install_dep_list.include? pkg or \ + dpkg.build_dep_list.include? pkg then + depends_list.push opkg + end + + } + end + + depends_list.each do |dpkg| + checked_list.push dpkg + rdepends_list = get_all_reverse_depends_pkgs( dpkg, checked_list ) + end + + return rdepends_list + end + + def reload_distribution_information + if not File.exist?("#{@location}/#{OS_INFO_FILE}") then + return end + + # get support_os_list + @support_os_list = [] + File.open( "#{@location}/#{OS_INFO_FILE}", "r" ) do |f| + f.each_line do |l| + @support_os_list.push l.strip + end + end + + # read binary package_list file + @support_os_list.each do |os| + @pkg_hash_os[os] = {} + pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" + + if File.exist? pkg_list_file then + begin + @pkg_hash_os[os] = Parser.read_repo_pkg_list_from( pkg_list_file ) + rescue => e + @log.error( e.message, Log::LV_USER) + @pkg_hash_os[os] = nil + end + end + end + + # read archive package_list file + @archive_pkg_list = read_archive_pkg_list( "" ) end + + def remove_os(os) + if not @support_os_list.include? os then + @log.error("Can't remove os : #{os} does not exist ", Log::LV_USER) + end + + # update os information + @support_os_list.delete os + @pkg_hash_os.delete os + + # generate temp file + tmp_file_name = "" + while ( tmp_file_name.empty? ) + tmp_file_name = @location + "/temp/." + Utils.create_uniq_name + + if File.exist? tmp_file_name then + tmp_file_name = "" + end + end + + info_file = File.readlines("#{@location}/#{OS_INFO_FILE}") + File.open(tmp_file_name, "w") do |f| + info_file.each do |line| + if not line.strip.eql? os then + f.puts line + end + end + end + + FileUtils.mv( tmp_file_name, "#{@location}/#{OS_INFO_FILE}", :force => true ) + + # delete pkg_list_#{os} file + File.delete( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" ) + end + + def sync_archive_pkg + client = Client.new( @server_url, "#{@location}/source", @log ) + + download_list = client.archive_pkg_list - @archive_pkg_list + + updated_file_list = [] + + # if update list is empty then return empty array + if download_list.empty? then return updated_file_list end + + download_list.each do |pkg| + file = client.download_dep_source(pkg) + if file.nil? + @log.error("Can't download archive package [#{pkg}]", Log::LV_USER) + else + updated_file_list.push pkg + end + end + + return updated_file_list + end + end diff --git a/src/pkg_server/downloader.rb b/src/pkg_server/downloader.rb index 0308208..7e5a8c8 100644 --- a/src/pkg_server/downloader.rb +++ b/src/pkg_server/downloader.rb @@ -31,30 +31,25 @@ require "utils" class FileDownLoader - @@log = nil - - def FileDownLoader.set_logger(logger) - @@log = logger - end - - def FileDownLoader.download(url, path) + def FileDownLoader.download(url, path, logger) ret = false if not File.directory? path then - @@log.error "\"#{path}\" does not exist" + logger.error "\"#{path}\" does not exist" return ret - end - + end + is_remote = Utils.is_url_remote(url) filename = url.split('/')[-1] - fullpath = File.join(path, filename) + logger.info "Downloading #{url}" if is_remote then - ret = system "wget #{url} -O #{fullpath} -nv" + ret = Utils.execute_shell_with_log( "wget #{url} -O #{fullpath} -nv", logger ) + #ret = Utils.execute_shell( "wget #{url} -O #{fullpath} -q") else if not File.exist? url then - @@log.error "\"#{url}\" file does not exist" + logger.error "\"#{url}\" file does not exist" return false else ret = system "cp #{url} #{fullpath}" @@ -62,6 +57,12 @@ class FileDownLoader end # need verify + if ret then + logger.info "Downloaded #{filename}.. OK" + else + logger.info "Failed to download #{filename}" + logger.info " [dist: #{path}]" + end return ret end end diff --git a/src/pkg_server/installer.rb b/src/pkg_server/installer.rb index 84ea930..d9582b8 100644 --- a/src/pkg_server/installer.rb +++ b/src/pkg_server/installer.rb @@ -31,22 +31,21 @@ $LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" require "packageServerConfig" require "log" require "utils" +if Utils.is_windows_like_os( Utils::HOST_OS ) then + require "rubygems" + require "zip/zip" +end class FileInstaller CONFIG_PATH = "#{PackageServerConfig::CONFIG_ROOT}/client" PACKAGE_INFO_DIR = ".info" + PACKAGE_MANIFEST = "pkginfo.manifest" - @@log = nil - - def FileInstaller.set_logger(logger) - @@log = logger - end - - def FileInstaller.install(package_name, package_file_path, type, target_path) + def FileInstaller.install(package_name, package_file_path, type, target_path, logger) if not File.exist? package_file_path then - @@log.error "\"#{package_file_path}\" file does not exist." + logger.error "\"#{package_file_path}\" file does not exist." return false end @@ -55,81 +54,153 @@ class FileInstaller when "binary" then uniq_name = Utils.create_uniq_name path = Utils::HOME + "/tmp/#{uniq_name}" - if Utils::HOST_OS.eql? "windows" then + # windows has limitation for file path length + if Utils.is_windows_like_os( Utils::HOST_OS ) then drive = Utils::HOME.split("/")[0] path = "#{drive}/#{uniq_name}" end - FileUtils.mkdir_p "#{path}" - - if File.directory? path then - log = "##### create temporary dir : #{path} #####\n" - else - log = "##### [Failed] create temporary dir : #{path} #####\n" - return false + if not File.exist? path then FileUtils.mkdir_p "#{path}" end + + if File.directory? path then + log = "## create temporary dir : #{path}\n" + else + logger.error "Failed to create temporary dir" + logger.info " [path: #{path}]" + return false + end + + begin + logger.info "Installing \"#{package_name}\" package.." + logger.info " [file: #{package_file_path}]" + + log = log + "## Extract file : #{package_file_path}\n" + result = extract_file(package_name, package_file_path, path, target_path, logger) + if result == "" or result.nil? then + write_log(target_path, package_name, log) + return false + else log = log + result end + + log = log + "## Move files : \"#{path}\" to \"#{target_path}\"\n" + result = move_dir(package_name, path, target_path, logger) + if result.nil? then + write_log(target_path, package_name, log) + return false + else log = log + result end + + log = log + "## Execute install script\n" + result = execute_install_script(package_name, path, target_path, logger) + if result.nil? then + write_log(target_path, package_name, log) + return false + else log = log + result end + + log = log + "## Move remove script\n" + result = move_remove_script(package_name, path, target_path, logger) + if result.nil? then + write_log(target_path, package_name, log) + return false + else log = log + result end + + log = log + "## Remove temporary dir : #{path} #####\n" + result = Utils.execute_shell_return("rm -rf #{path}") + if result.nil? then + logger.warn "Failed to remove temporary path" + logger.info " [path: #{path}]" + end + rescue Interrupt + logger.error "FileInstaller: Interrupted.." + Utils.execute_shell("rm -rf #{path}") + logger.info "Removed #{path}" + raise Interrupt end - - log = log + "##### extract file : #{package_file_path} #####\n" - log = log + extract_file(package_name, package_file_path, path, target_path) - move_dir(package_name, path, target_path) - - log = log + "##### execute install script #####\n" - log = log + execute_install_script(package_name, path, target_path) - - log = log + "##### move remove script #####\n" - move_remove_script(package_name, path, target_path) - - log = log + "##### remove temporary dir : #{path} #####\n" - Utils.execute_shell("rm -rf #{path}") - + write_log(target_path, package_name, log) +=begin target_config_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}" - FileUtils.mkdir_p(target_config_path) + if not File.exist? target_config_path then FileUtils.mkdir_p(target_config_path) end pkg_inst_log = "#{package_name}_inst.log" pkg_inst_log_path = File.join(target_config_path, pkg_inst_log) File.open(pkg_inst_log_path, "a+") do |f| f.puts log end - +=end when "source" then end # need verify + logger.info "Installed \"#{package_name}\" package.. OK" + logger.info " [path: #{target_path}]" return true; - end + end - def FileInstaller.move_remove_script(package_name, path, target_path) + def FileInstaller.write_log(target_path, package_name, log) + target_config_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}" + if not File.exist? target_config_path then FileUtils.mkdir_p(target_config_path) end + pkg_inst_log = "#{package_name}_inst.log" + pkg_inst_log_path = File.join(target_config_path, pkg_inst_log) + + File.open(pkg_inst_log_path, "a+") do |f| + f.puts log + end + end + + def FileInstaller.move_remove_script(package_name, path, target_path, logger) target_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}" - FileUtils.mkdir_p(target_path) + if not File.exist? target_path then FileUtils.mkdir_p(target_path) end script_file_prefix = "#{path}/remove.*" script_file = Dir.glob(script_file_prefix)[0] + log = "" if not script_file.nil? then - FileUtils.mv(script_file, target_path) - end + result = Utils.execute_shell_return("mv #{script_file} #{target_path}") + if result.nil? then + logger.error "Failed to move a remove script" + logger.info " [file: #{script_file}]" + logger.info " [from: #{path}]" + logger.info " [to: #{target_path}]" + return nil + else log = result.join("") end + logger.info "Moved remove script file.. OK" + log = log + "[file: #{script_file}]\n" + log = log + "[from: #{path}]\n" + log = log + "[to: #{target_path}]\n" + end + + return log end - def FileInstaller.execute_install_script(package_name, path, target_path) + # Does not verify that the script execution is successful. + # Register shortcut should be failed. + def FileInstaller.execute_install_script(package_name, path, target_path, logger) script_file_prefix = "#{path}/install.*" script_file = Dir.glob(script_file_prefix)[0] log = "" if not script_file.nil? then - @@log.info "Execute \"#{script_file}\" file" - if Utils::HOST_OS.eql? "windows" then - cmd = "set INSTALLED_PATH=\"#{target_path}\"& #{script_file}" + logger.info "Execute \"#{script_file}\" file" + if Utils.is_windows_like_os( Utils::HOST_OS ) then + cmd = "set INSTALLED_PATH=\"#{target_path}\"& #{script_file}" else cmd = "INSTALLED_PATH=\"#{target_path}\" #{script_file}" - end + end + logger.info " [cmd: #{cmd}]" log = `#{cmd}` - end + logger.info "Executed install script file.. OK" + log = log + "[file: #{script_file}]\n" + log = log + "[cmd: #{cmd}]\n" + end + return log end - def FileInstaller.execute_remove_script(package_name, target_path) + # Does not verify that the script execution is successful. + # Removing shortcut should be failed. + def FileInstaller.execute_remove_script(package_name, target_path, logger) info_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}" if not File.directory? info_path then - return false + logger.error "\"#{info_path}\" does not exist." + return nil end script_file_prefix = "#{info_path}/remove.*" @@ -137,21 +208,28 @@ class FileInstaller log = "" if not script_file.nil? then - @@log.info "Execute \"#{script_file}\" file" - if Utils::HOST_OS.eql? "windows" then + logger.info "Execute \"#{script_file}\" file" + if Utils.is_windows_like_os( Utils::HOST_OS ) then cmd = "set INSTALLED_PATH=\"#{target_path}\"& #{script_file}" else cmd = "INSTALLED_PATH=\"#{target_path}\" #{script_file}" end + logger.info " [cmd: #{cmd}]" log = `#{cmd}` - end + logger.info "Executed remote script file.. OK" + log = log + "[file: #{script_file}]\n" + log = log + "[cmd: #{cmd}]\n" + end + + return log end - def FileInstaller.remove_pkg_files(package_name, target_path) + def FileInstaller.remove_pkg_files(package_name, target_path, logger) list_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}" if not File.directory? list_path then - return false + logger.error "\"#{list_path}\" does not exist." + return false end list_file_name = "#{list_path}/#{package_name}.list" @@ -176,17 +254,18 @@ class FileInstaller begin Dir.rmdir(file_path) rescue SystemCallError - @@log.warn "\"#{file_path}\" directory is not empty" + logger.warn "\"#{file_path}\" directory is not empty" end else directories.push(file_path) end elsif File.file? file_path then FileUtils.rm_f(file_path) elsif File.symlink? file_path then File.unlink file_path # if files are already removed by remove script, - else @@log.warn "\"#{file_path}\" does not exist" end + else logger.warn "\"#{file_path}\" does not exist" end end end directories.reverse.each do |path| + if not File.directory? path then next end entries = Dir.entries(path) if entries.include? "." then entries.delete(".") end if entries.include? ".." then entries.delete("..") end @@ -194,40 +273,67 @@ class FileInstaller begin Dir.rmdir(path) rescue SystemCallError - @@log.warn "\"#{file_path}\" directory is not empty" + logger.warn "\"#{file_path}\" directory is not empty" end else next end end end - #FileUtils.rm_rf(list_path) Utils.execute_shell("rm -rf #{list_path}") return true end - def FileInstaller.uninstall(package_name, type, target_path) + def FileInstaller.uninstall(package_name, type, target_path, logger) case type when "binary" then - execute_remove_script(package_name, target_path) - remove_pkg_files(package_name, target_path) + result = execute_remove_script(package_name, target_path, logger) + if result.nil? then return false end + if not remove_pkg_files(package_name, target_path, logger) then return false end when "source" then end return true end - def FileInstaller.move_dir(package_name, source_path, target_path) + def FileInstaller.move_dir(package_name, source_path, target_path, logger) config_path = File.join(target_path, PACKAGE_INFO_DIR, package_name) - FileUtils.cp_r Dir.glob("#{source_path}/data/*"), target_path - FileUtils.cp "#{source_path}/pkginfo.manifest", config_path + pkginfo_path = File.join(source_path, PACKAGE_MANIFEST) + data_path = File.join(source_path, "data") + log = "" + + if not File.exist? pkginfo_path then + logger.error "#{PACKAGE_MANIFEST} file does not exist. Check #{source_path}" + return nil + else FileUtils.cp pkginfo_path, config_path end + + if File.exist? data_path then + # if os is linux, use cpio. it is faster than cp + if Utils.is_linux_like_os( Utils::HOST_OS ) then + absolute_path = `readlink -f #{target_path}` + result = Utils.execute_shell_return("cd #{data_path}; find . -depth | cpio -pldm #{absolute_path}") + else + result = Utils.execute_shell_return("cp -r #{data_path}/* #{target_path}") + end + if result.nil? then + logger.error "Failed to move files" + logger.info " [from: #{source_path}]" + logger.info " [to: #{target_path}]" + return nil + end + logger.info "Moved files.. OK" + log = log + "[from: #{source_path}]\n" + log = log + "[to: #{target_path}]\n" + else logger.warn "\"data\" directory does not exist." end + + return log end - def FileInstaller.extract_file(package_name, package_file_path, path, target_path) + def FileInstaller.extract_file(package_name, package_file_path, path, target_path, logger) dirname = File.dirname(package_file_path) filename = File.basename(package_file_path) ext = File.extname(filename) target_config_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}" - FileUtils.mkdir_p(target_config_path) + if not File.exist? target_config_path then FileUtils.mkdir_p(target_config_path) end pkg_file_list = "#{package_name}.list" pkg_file_list_path = File.join(target_config_path, pkg_file_list) temp_pkg_file_list = "temp_file_list" @@ -235,16 +341,20 @@ class FileInstaller show_file_list_command = nil extrach_file_list_command = nil + log = "" case ext when ".zip" then show_file_list_command = "zip -sf #{package_file_path}" - extract_file_list_command = "unzip \"#{package_file_path}\" -d \"#{path}\"" + extract_file_list_command = "unzip -o \"#{package_file_path}\" -d \"#{path}\"" when ".tar" then - show_file_list_command = "tar -sf #{package_file_path}" - extract_file_list_command = "tar xf \"#{package_file_path}\" -C \"#{path}\"" + # path should be unix path if it is used in tar command + _package_file_path = Utils.get_unix_path(package_file_path) + _path = Utils.get_unix_path(path) + show_file_list_command = "tar -tf #{_package_file_path}" + extract_file_list_command = "tar xf \"#{_package_file_path}\" -C \"#{_path}\"" else - @@log.error "\"#{filename}\" is not supported." + logger.error "\"#{filename}\" is not supported." return nil end @@ -259,15 +369,44 @@ class FileInstaller end end end - File.delete(temp_pkg_file_list_path) - log = `#{extract_file_list_command}` - @@log.info "Extracted \"#{filename}\" file.. OK" - if log.nil? then log = "" end + + case ext + when ".zip" then + if Utils.is_windows_like_os( Utils::HOST_OS ) then + log = unzip_file(package_file_path, path) + else + #result = Utils.execute_shell_return(extract_file_list_command) + #if result.nil? then log = nil + #else log = result.join("") end + log = `#{extract_file_list_command}` + end + when ".tar" then + #result = Utils.execute_shell_return(extract_file_list_command) + #if result.nil? then log = nil + #else log = result.join("") end + log = `#{extract_file_list_command}` + end + + if log == "" then log = nil end + if log.nil? then + logger.error "Failed to extract \"#{filename}\" file" + logger.info " [file: #{package_file_path}]" + logger.info " [from: #{path}]" + logger.info " [to: #{target_path}]" + logger.info " [cmd: #{extract_file_list_command}]" + return nil + end + + logger.info "Extracted \"#{filename}\" file.. OK" + log = log + "[file: #{package_file_path}]\n" + log = log + "[from: #{path}]\n" + log = log + "[to: #{target_path}]\n" + log = log + "[cmd: #{extract_file_list_command}]\n" return log end - def FileInstaller.extract_specified_file(package_file_path, target_file, path) + def FileInstaller.extract_a_file(package_file_path, target_file, path, logger) dirname = File.dirname(package_file_path) filename = File.basename(package_file_path) ext = File.extname(filename) @@ -280,11 +419,13 @@ class FileInstaller extract_file_command = "unzip -x #{package_file_path} #{target_file}" end when ".tar" then + # path should be unix path if it is used in tar command + _package_file_path = Utils.get_unix_path(package_file_path) + _path = Utils.get_unix_path(path) if not path.nil? then - path = File.join(path, package_file_path) - extract_file_command = "tar xvf #{package_file_path} #{target_file}" + extract_file_command = "tar xf #{_package_file_path} -C #{_path} #{target_file}" else - extract_file_command = "tar xvf #{package_file_path} #{target_file}" + extract_file_command = "tar xf #{_package_file_path} #{target_file}" end end @@ -297,12 +438,46 @@ class FileInstaller end if File.exist? target_file_path then - @@log.info "Extracted \"#{target_file}\" file.." + logger.info "Extracted \"#{target_file}\" file.." return true else - @@log.info "Failed to extracted \"#{target_file}\" file.." + logger.warn "Failed to extracted \"#{target_file}\" file.." + logger.info " [file: #{package_file_path}]" + logger.info " [path: #{path}]" + logger.info " [cmd: #{extract_file_command}]" return false end end -end + def FileInstaller.unzip_file(zipfile, dest) + log = "" + Zip::ZipFile.open(zipfile) do |zip_file| + zip_file.each do |f| + f_path = File.join(dest, f.name) + FileUtils.mkdir_p(File.dirname(f_path)) + if File.exist?(f_path) then + log = log + "[Warn] Exist file : #{f_path}\n" unless f_path.end_with? "/" + else + zip_file.extract(f, f_path) + if not f_path.end_with? "/" then + log = log + "[info] Extracted file : #{f_path}\n" + end + end + end + end + return log + end + + def FileInstaller.unzip_a_file(zipfile, file, dest) + Zip::ZipFile.open(zipfile) do |zip_file| + zip_file.each do |f| + if f.name.strip == file then + f_path = File.join(dest, f.name) + FileUtils.mkdir_p(File.dirname(f_path)) + zip_file.extract(f, f_path) unless File.exist?(f_path) + break + end + end + end + end +end diff --git a/src/pkg_server/packageServer.rb b/src/pkg_server/packageServer.rb index 86abc39..48dd649 100644 --- a/src/pkg_server/packageServer.rb +++ b/src/pkg_server/packageServer.rb @@ -29,266 +29,250 @@ Contributors: require 'fileutils' $LOAD_PATH.unshift File.dirname(__FILE__) $LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" +$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/build_server" +require "BuildComm" require "packageServerLog" require "packageServerConfig" require "distribution" +require "SocketRegisterListener" require "client" require "utils" require "mail" +require "DistSync" class PackageServer - attr_accessor :id, :location, :log, :integrity + attr_accessor :id, :location, :log, :integrity + attr_accessor :finish, :port + attr_accessor :incoming_path + attr_accessor :distribution_list + attr_accessor :sync_interval, :passwd - # constant - SUPPORTED_OS = ["linux", "windows", "darwin"] + # constant + SERVER_ROOT = "#{PackageServerConfig::CONFIG_ROOT}/pkg_server" + DIBS_LOCK_FILE_PATH = "#{SERVER_ROOT}/.server_loc" # initialize def initialize (id) - @id = id @location = "" @distribution_list = [] # distribution name -> server_url hash @dist_to_server_url = {} - @integrity = "NO" - - if not File.exist?( PackageServerConfig::SERVER_ROOT ) - FileUtils.mkdir_p( PackageServerConfig::SERVER_ROOT ) + @integrity = "YES" + @auto_sync_flag = "NO" + @finish = false + @port = 3333 + @test_time=0 #test time in mili-seconds + @lock_file= nil + @sync_interval = 3600 + @passwd = "" + + update_config_information(id) + + if not File.exist?( SERVER_ROOT ) + FileUtils.mkdir_p( SERVER_ROOT ) end - @log = PackageServerLog.new( "#{PackageServerConfig::SERVER_ROOT}/.#{@id}.log" ) + @log = PackageServerLog.new( @log_file_path ) server_information_initialize() end # create - def create (id, dist_name, server_url, loc = nil ) - @id = id + def create( id, dist_name, server_url, loc = nil ) + update_config_information(id) + if loc.nil? or loc.empty? then - @location = Dir.pwd + "/" + id + @location = Dir.pwd + "/" + @id else - if loc.end_with? "/" then - @location = loc + id - else - @location = loc + "/" + id + if Utils.is_absolute_path(loc) then + @location = File.join(loc, @id) + else + @location = File.expand_path(File.join(Dir.pwd, loc, @id)) end end + # error check : check for already exist in server @id + if File.exist? @config_dir + raise RuntimeError, "Server create fail. server id [#{@id}] is already exist" + end + + # name check + if dist_name.strip.eql? "distribution.info" then + raise RuntimeError, "id \"distribution.info\" is not available" + end + # create locking file - File.open("#{PackageServerConfig::LOCK_FILE}", File::RDWR|File::CREAT, 0644) {|f| - f.flock(File::LOCK_EX) - f.rewind - f.flush - f.truncate(f.pos) - - # error check : check for already exist in server id - if File.exist? "#{PackageServerConfig::SERVER_ROOT}/#{id}" - raise RuntimeError, "Server create fail. server id [#{id}] is already exist" - end - - # error check : check for already exist in server directory - if File.exist? "#{@location}/#{dist_name}" - raise RuntimeError, "Server create fail. directory is already exist [#{@location}/#{dist_name}]" - end - - # create server config directory - FileUtils.mkdir_p "#{PackageServerConfig::SERVER_ROOT}/#{id}" - FileUtils.mkdir_p "#{PackageServerConfig::SERVER_ROOT}/#{id}/incoming" + lock_file = Utils.file_lock(DIBS_LOCK_FILE_PATH) + + # create server config directory + FileUtils.mkdir_p @config_dir + FileUtils.mkdir_p @incoming_path - if (not server_url.empty?) and (not Utils.is_url_remote(server_url)) + if (not server_url.empty?) and \ + (not Utils.is_url_remote(server_url)) and \ + (not Utils.is_absolute_path(server_url)) then # if server_url is local server address then generate absoulte path - if not Utils.is_absolute_path( server_url ) then - if server_url.end_with?("/") then - server_url = Utils::WORKING_DIR + server_url - else - server_url = Utils::WORKING_DIR + "/" + server_url - end - end - end + server_url = File.join(Utils::WORKING_DIR, server_url) + end - # create server configure file - File.open( "#{PackageServerConfig::SERVER_ROOT}/#{id}/config", "w" ) do |f| - f.puts "location : #{@location}" - f.puts "integrity check : NO" - f.puts "server_url : #{dist_name} -> #{server_url}" - end - - # create location's directory - FileUtils.mkdir_p "#{@location}" - - create_distribution_struct( dist_name, server_url ) - } + # create server configure file + File.open( @config_file_path, "w" ) do |f| + f.puts "location : #{@location}" + f.puts "integrity check : #{@integrity}" + f.puts "auto sync : #{@auto_sync_flag}" + f.puts "sync interval : #{@sync_interval}" + f.puts "server_url : #{dist_name} -> #{server_url}" + end + + # create location's directory + FileUtils.mkdir_p "#{@location}" + + create_distribution_struct( dist_name, server_url ) + Utils.file_unlock(lock_file) @log.output( "package server [#{@id}] created successfully", Log::LV_USER ) end - def register( source_pkg_file_path_list, binary_pkg_file_path_list, dist_name, snapshot, test ) + def register( file_path_list, dist_name, snapshot, test_flag, internal_flag = false ) @log.info "package register in server" - if dist_name.empty? then dist_name = get_default_dist_name() end - if dist_name.empty? then raise RuntimeError,"Can't find distribution information" end distribution = get_distribution( dist_name ) # distribution lock - File.open("#{@location}/#{dist_name}/.lock_file", File::RDWR|File::CREAT, 0644) {|f| - f.flock(File::LOCK_EX) - f.rewind - f.flush - f.truncate(f.pos) + @lock_file = Utils.file_lock(distribution.lock_file_path) - source_pkg_file_name_list = [] - updated_os_list = [] - append_pkg_list = [] - used_source_pkg_list = [] - package_list = [] - - # error check - source_pkg_file_path_list.each do |l| - # error check for file exist - if not File.exist? l - raise RuntimeError, "source package file does not exist [#{l}]" - end + updated_os_list = [] + registed_package_list = [] + binary_pkg_file_path_list = [] + link_pkg_file_path_list = [] + archive_pkg_file_path_list = [] + snapshot_name = "" + + file_path_list.each do |f| + # error check for file exist + if not File.exist? f + raise RuntimeError, "package file does not exist [#{f}]" + end - source_pkg_file_name_list.push File.basename( l ) - end - binary_pkg_file_path_list.each do |l| - # error check for file exist - if not File.exist? l - raise RuntimeError, "binary package file does not exist [#{l}]" - end - end + pkg = distribution.get_package_from_file(f) - # register binary package - binary_pkg_file_path_list.each do |l| - # get package class using bianry file - pkg = distribution.get_package_from_file(l) + # binary package + if not pkg.nil? then - if pkg.nil? or pkg.package_name.empty? then - raise "[#{l}]'s pkginfo.manifest file is incomplete." - end - package_list.push pkg - - if test then - if not pkg.source.empty? then - if not source_pkg_file_name_list.include? "#{pkg.source}_#{pkg.version}.tar.gz" - raise "binary package and source package must be upload same time" - end - - pkg.src_path = "/temp/#{pkg.source}_#{pkg.version}.tar.gz" - end - pkg = distribution.register_for_test(l ,pkg ) + # find link package + pkg_os = Utils.get_os_from_package_file(f) + link_pkg = distribution.get_link_package(pkg, pkg_os) + if link_pkg.nil? then + binary_pkg_file_path_list.push f else - if pkg.package_name.empty? or pkg.version.empty? or pkg.os.empty? or pkg.maintainer.empty? then - raise "[#{l}]'s pkginfo.manifest file is incomplete." - # binary only package - elsif pkg.attribute.include? "binary" then - @log.info "binary package [#{l}] is binary only package" - pkg.src_path = "" - elsif pkg.source.empty? then - raise "[#{l}]'s pkginfo.manifest file is incomplete." - # binary package - else - if not source_pkg_file_name_list.include? "#{pkg.source}_#{pkg.version}.tar.gz" - raise "binary package [#{pkg.package_name}]'s source package must be upload same time" - end - - @log.info "binary package [#{l}]'s source package is #{pkg.source}" - used_source_pkg_list.push "#{pkg.source}_#{pkg.version}.tar.gz" - pkg.src_path = "/source/#{pkg.source}_#{pkg.version}.tar.gz" - end - - pkg = distribution.register(l ,pkg ) - updated_os_list.push pkg.os + link_pkg_file_path_list.push [link_pkg.path, File.basename(f)] + pkg.checksum = link_pkg.checksum + pkg.size = link_pkg.size end - - append_pkg_list.push pkg - end - # check install dependency integrity - if not test then distribution.check_integrity end - - source_pkg_file_path_list.each do |source_path| - source_name = File.basename(source_path) - if File.exist? "#{@location}/#{dist_name}/source/#{source_name}" then - @log.warn "source package already exist then does not register" - next - end - - if test then - @log.info "source package [#{source_name}] register in temp/]" - FileUtils.cp( source_path, "#{@location}/#{dist_name}/temp/" ) + # update os information + if pkg.os_list.include? pkg_os then + pkg.os = pkg_os + pkg.os_list = [pkg_os] else - @log.info "source package [#{source_name}] register in source/]" - FileUtils.cp( source_path, "#{@location}/#{dist_name}/source/" ) + raise RuntimeError, "package file name is incorrect [#{f}]" end - end - # register archive pakcage list. - distribution.write_archive_pkg_list( source_pkg_file_name_list - used_source_pkg_list ) + updated_pkg = register_package(distribution, pkg, f, test_flag, internal_flag) + + updated_os_list.push updated_pkg.os + registed_package_list.push updated_pkg + # archive package + else + if test_flag then + @log.error("archive package does not using test mode", Log::LV_USER) + return + end - binary_pkg_file_path_list.each do |l| - if test then - FileUtils.cp( l, "#{@location}/#{dist_name}/temp/" ) - else - FileUtils.cp( l, "#{@location}/#{dist_name}/binary/" ) - end + file_name = File.basename(f) + distribution.register_archive_pkg(file_name) + archive_pkg_file_path_list.push f + end + end + + # check install dependency integrity + if not test_flag and @integrity.eql? "YES" then + registed_package_list.each do |pkg| + distribution.check_package_integrity(pkg) end + end - # write package list for updated os - updated_os_list.uniq! - updated_os_list.each do |os| - distribution.write_pkg_list( os ) + # move file to package server + binary_pkg_file_path_list.each do |l| + if test_flag then + FileUtils.copy_file( l, "#{distribution.location}/temp/#{File.basename(l)}" ) + else + FileUtils.copy_file( l, "#{distribution.location}/binary/#{File.basename(l)}" ) + end + end + + # link to package server + link_pkg_file_path_list.each do |l| + if test_flag then + src_file = File.join(distribution.location, l[0]) + dest_file = File.join(distribution.location, "temp", l[1]) + FileUtils.ln( src_file, dest_file, :force => true ) + else + src_file = File.join(distribution.location, l[0]) + dest_file = File.join(distribution.location, "binary", l[1]) + FileUtils.ln( src_file, dest_file, :force => true ) end + end - # if snapshot mode is true then generate snapshot - if snapshot or test then - @log.info "generaging snapshot" - distribution.generate_snapshot("", "", "") - end + archive_pkg_file_path_list.each do |l| + FileUtils.mv( l, "#{distribution.location}/source/" ) + end - # send email - if not test then - msg_list = [] + # write package list for updated os + updated_os_list.uniq! + updated_os_list.each do |os| + distribution.write_pkg_list(os) + end - package_list.map{ |p| - msg_list.push("%-30s: %08s" % [ p.package_name.strip, p.version.strip ] ) - } - # email just remote package server - # Mail.send_package_registe_mail( msg_list, @id ) - end - } + # register archive pakcage list. + distribution.write_archive_pkg_list() + + # send email + if test_flag then + msg_list = [] + + registed_package_list.each { |p| + msg_list.push("%-30s: %08s" % [ p.package_name.strip, p.version.strip ] ) + } + # email just remote package server + # Mail.send_package_registe_mail( msg_list, @id ) + end + + # if snapshot mode is true then generate snapshot + if snapshot or test_flag then + @log.info "generaging snapshot" + snapshot_name = distribution.generate_snapshot("", "", false) + end + + Utils.file_unlock(@lock_file) @log.output( "package registed successfully", Log::LV_USER) + + return snapshot_name end - def generate_snapshot( snpashot_name, dist_name, base_snapshot, binary_pkg_file_path_list) + def generate_snapshot( snpashot_name, dist_name, base_snapshot ) @log.info "generating snapshot" - if dist_name.empty? then dist_name = get_default_dist_name() end - if dist_name.empty? then raise RuntimeError,"Can't find distribution information" end distribution = get_distribution( dist_name ) - File.open("#{@location}/#{dist_name}/.lock_file", File::RDWR|File::CREAT, 0644) {|f| - f.flock(File::LOCK_EX) - f.rewind - f.flush - f.truncate(f.pos) - - append_pkg_list = [] - binary_pkg_file_path_list.each do |l| - if not File.exist? l then raise RuntimeError,"Can't find binary package file [#{l}]" end - pkg = distribution.get_package_from_file(l) - if l.start_with? "/" - pkg.path = "#{l}" - else - pkg.path = "#{Dir.pwd}/#{l}" - end - append_pkg_list.push pkg - end - - distribution.generate_snapshot( snpashot_name, base_snapshot, append_pkg_list) - } + @lock_file = Utils.file_lock(distribution.lock_file_path) + + snapshot_name = distribution.generate_snapshot( snpashot_name, base_snapshot, true) + + Utils.file_unlock(@lock_file) + + return snapshot_name end def sync( dist_name, mode ) @log.info "sync from server" - if dist_name.empty? then dist_name = get_default_dist_name() end - if dist_name.empty? then raise RuntimeError,"Can't find distribution information" end distribution = get_distribution( dist_name ) if distribution.server_url.empty? then @@ -296,67 +280,69 @@ class PackageServer return end - File.open("#{@location}/#{dist_name}/.lock_file", File::RDWR|File::CREAT, 0644) {|f| - f.flock(File::LOCK_EX) - f.rewind - f.flush - f.truncate(f.pos) - - distribution.sync( mode, "linux" ) - distribution.sync( mode, "windows" ) - distribution.sync( mode, "darwin" ) - distribution.sync_archive_pkg - } + ret = distribution.sync(mode) + if ret then + distribution.generate_snapshot("", "", false) + end - @log.output( "package server [#{@id}]'s distribution [#{dist_name}] has the synchronization.", Log::LV_USER ) + @log.output( "package server [#{@id}]'s distribution [#{dist_name}] has been synchronized.", Log::LV_USER ) end def add_distribution( dist_name, server_url, clone ) - File.open("#{PackageServerConfig::LOCK_FILE}", File::RDWR|File::CREAT, 0644) {|f| - f.flock(File::LOCK_EX) - f.rewind - f.flush - f.truncate(f.pos) - - # error check : check for already exist in server directory - if @dist_to_server_url.keys.include? dist_name.strip then - raise RuntimeError, "distribution already exist : #{dist_name}" - end - if File.exist? "#{@location}/#{dist_name}" - raise RuntimeError, "distribution directory already exist [#{@location}/#{dist_name}]" - end - - if (not server_url.empty?) and (not Utils.is_url_remote(server_url)) - # if server_url is local server address then generate absoulte path - if not Utils.is_absolute_path( server_url ) then - if server_url.end_with?("/") then - server_url = Utils::WORKING_DIR + server_url - else - server_url = Utils::WORKING_DIR + "/" + server_url - end - end - end - - File.open( "#{PackageServerConfig::SERVER_ROOT}/#{@id}/config", "a" ) do |f| - if clone then - @log.info "add distribution using [#{server_url}] in clone mode" - f.puts "server_url : #{dist_name} -> " + lock_file = Utils.file_lock(@server_lock_file_path) + + # error check : check for already exist in server directory + if @dist_to_server_url.keys.include? dist_name.strip then + Utils.file_unlock(@lock_file) + raise RuntimeError, "distribution already exist : #{dist_name}" + end + + # name check + if dist_name.strip.eql? "distribution.info" then + Utils.file_unlock(@lock_file) + raise RuntimeError, "id \"distribution.info\" is not available" + end + + # modify server url + if (not server_url.empty?) and (not Utils.is_url_remote(server_url)) + # if server_url is local server address then generate absoulte path + if not Utils.is_absolute_path( server_url ) then + if server_url.end_with?("/") then + server_url = Utils::WORKING_DIR + server_url else - @log.info "add distribution using [#{server_url}]" - f.puts "server_url : #{dist_name} -> #{server_url}" + server_url = Utils::WORKING_DIR + "/" + server_url end end - - create_distribution_struct( dist_name, server_url ) - } + end + + add_dist_for_config_file(dist_name, server_url, clone) + create_distribution_struct( dist_name, server_url ) + + Utils.file_unlock(lock_file) @log.output( "distribution [#{dist_name}] added successfully", Log::LV_USER ) end - def remove_server( id ) - @log.info( "Package server [#{id}] will be removed and all server information delete", Log::LV_USER) + def add_os(dist_name, os) + dist = get_distribution(dist_name) + + # distribution lock + @lock_file = Utils.file_lock(dist.lock_file_path) + + dist.add_os(os) + + @log.info "generaging snapshot" + dist.generate_snapshot("", "", false) - if File.exist? "#{PackageServerConfig::SERVER_ROOT}/#{id}/config" then - File.open "#{PackageServerConfig::SERVER_ROOT}/#{id}/config" do |f| + Utils.file_unlock(@lock_file) + @log.output( "package server add os [#{os}] successfully", Log::LV_USER ) + end + + def remove_server() + @log.info( "Package server [#{@id}] will be removed and all server information delete", Log::LV_USER) + + lock_file = Utils.file_lock(DIBS_LOCK_FILE_PATH) + if File.exist? @config_file_path then + File.open @config_file_path do |f| f.each_line do |l| if l.start_with?( "location : ") then location= l.split(" : ")[1] @@ -366,78 +352,211 @@ class PackageServer end end else - @log.error( "Can't find server information : #{id}", Log::LV_USER) + @log.error( "Can't find server information : #{@id}", Log::LV_USER) end - FileUtils.rm_rf "#{PackageServerConfig::SERVER_ROOT}/.#{id}.log" - FileUtils.rm_rf "#{PackageServerConfig::SERVER_ROOT}/#{id}" + FileUtils.rm_rf @config_dir + FileUtils.rm_rf @log_file_path + + Utils.file_unlock(lock_file) + @log.output( "package server [#{@id}] removed successfully", Log::LV_USER ) + end + + def remove_dist( dist_name ) + @log.info "remove distribution in server" + distribution = get_distribution( dist_name ) + + lock_file = Utils.file_lock(@server_lock_file_path) + + # modify config file + config_file = File.readlines(@config_file_path) + File.open(@config_file_path, 'w') do |f| + config_file.each { |line| + f.puts(line) if not line =~ /server_url : #{dist_name} ->/ + } + end + + # modify info file + config_file = File.readlines("#{@location}/distribution.info") + File.open("#{@location}/distribution.info", 'w') do |f| + remove_flag = false + config_file.each { |line| + if line.start_with? "name :" then + if line.split(':')[1].strip.eql? dist_name then + remove_flag = true + else + remove_flag = false + end + + end + + # rewrite information for not remove distribution + if not remove_flag then + f.puts line + end + } + end + + # remove distribution directory + FileUtils.rm_rf distribution.location - @log.output( "package server [#{id}] removed successfully", Log::LV_USER ) + # remove distribution struct + @distribution_list.delete distribution + + Utils.file_unlock(lock_file) end - def remove_pkg( id, dist_name, pkg_name_list, os ) + def remove_pkg( dist_name, pkg_name_list, os ) @log.info "package remove in server" - if dist_name.empty? then dist_name = get_default_dist_name() end - if dist_name.empty? then raise RuntimeError,"Can't find distribution information" end distribution = get_distribution( dist_name ) - # distribution lock - File.open("#{@location}/#{dist_name}/.lock_file", File::RDWR|File::CREAT, 0644) {|f| - f.flock(File::LOCK_EX) - f.rewind - f.flush - f.truncate(f.pos) - - distribution.remove_pkg(pkg_name_list, os) - } + lock_file = Utils.file_lock(@server_lock_file_path) + + distribution.remove_pkg(pkg_name_list, os) + + # generate snapshot + @log.info "generaging snapshot" + distribution.generate_snapshot("", "", false) + + Utils.file_unlock(lock_file) @log.output( "package removed successfully", Log::LV_USER ) end + + def remove_snapshot( dist_name, snapshot_list ) + @log.info "remove snapshot in server" + distribution = get_distribution( dist_name ) - def find_source_package_path( dist_name, pkg_file_name_list ) - if dist_name.empty? then dist_name = get_default_dist_name() end - if dist_name.empty? then raise RuntimeError,"Can't find distribution information" end + lock_file = Utils.file_lock(@server_lock_file_path) + + distribution.remove_snapshot(snapshot_list) + + Utils.file_unlock(lock_file) + end + + def clean( dist_name, snapshot_list ) + @log.info "pakcage server clean" distribution = get_distribution( dist_name ) - pkg_file_name_list.each do |pkg| - pkg_path = "#{@location}/#{dist_name}/source/#{pkg}" - if File.exist? pkg_path then - @log.output( "#{pkg}", Log::LV_USER) + lock_file = Utils.file_lock(@server_lock_file_path) + + distribution.clean( snapshot_list ) + + # remove incoming dir + FileUtils.rm_rf incoming_path + FileUtils.mkdir incoming_path + + Utils.file_unlock(lock_file) + end + + # start server daemon + def start( port, passwd ) + @log.info "Package server Start..." + # set port number. default port is 3333 + @port = port + + # set job request listener + @log.info "Setting listener..." + listener = SocketRegisterListener.new(self) + listener.start + + # set auto sync + if @auto_sync_flag.eql? "YES" then + @log.info "Setting auto sync..." + autosync = DistSync.new(self) + autosync.start + end + + # set password + @passwd = passwd + + # main loop + @log.info "Entering main loop..." + if @test_time > 0 then start_time = Time.now end + while( not @finish ) + # sleep + if @test_time > 0 then + curr_time = Time.now + if (curr_time - start_time).to_i > @test_time then + puts "Test time is elapsed!" + break + end else - @log.error( "Can't find [#{pkg}] in source package", Log::LV_USER) + sleep 1 end - end - end + end + end + + # stop server daemon + def stop( port, passwd ) + # set port number. default port is 3333 + @port = port + @finish = false + + client = BuildCommClient.create("127.0.0.1", @port, @log) + if client.nil? then + raise RuntimeError, "Server does not listen in #{@port} port" + end + + client.send("STOP|#{passwd}") + + ret = client.receive_data + if ret[0].strip.eql? "SUCC" then + @log.output( "Package server is stopped", Log::LV_USER) + else + @log.output( "Package server return error message : #{ret}", Log::LV_USER) + end + client.terminate + + end def self.list_id - @@log = PackageServerLog.new( "#{PackageServerConfig::SERVER_ROOT}/.log" ) + @@log = PackageServerLog.new("#{SERVER_ROOT}/.log") - d = Dir.new( PackageServerConfig::SERVER_ROOT ) + d = Dir.new( SERVER_ROOT ) s = d.select {|f| not f.start_with?(".") } s.sort! + server_list = [] @@log.output( "=== server ID list ===", Log::LV_USER) s.each do |id| + if File.extname(id).eql?(".log") then next end + + server_list.push id @@log.output( id, Log::LV_USER) end + @@log.close + FileUtils.rm_rf("#{SERVER_ROOT}/.log") + + return server_list end def self.list_dist( id ) - @@log = PackageServerLog.new( "#{PackageServerConfig::SERVER_ROOT}/.log" ) + @@log = PackageServerLog.new( "#{SERVER_ROOT}/.log" ) @@log.output( "=== ID [#{id}]'s distribution list ===", Log::LV_USER) + dist_list = [] + # read package id information - if File.exist? "#{PackageServerConfig::SERVER_ROOT}/#{id}/config" then - File.open "#{PackageServerConfig::SERVER_ROOT}/#{id}/config" do |f| - f.each_line do |l| - if l.start_with?( "server_url : ") and l.include?( "->" ) then - @@log.output( l.split(" : ")[1].split("->")[0], Log::LV_USER) - end - end - end - else + config_file_path = "#{SERVER_ROOT}/#{id}/config" + if not File.exist? config_file_path raise RuntimeError, "[#{id}] is not server ID" end + + File.open config_file_path do |f| + f.each_line do |l| + if l.start_with?( "server_url : ") and l.include?( "->" ) then + dist_name = l.split(" : ")[1].split("->")[0] + + dist_list.push dist_name + @@log.output( dist_name, Log::LV_USER) + end + end + end + @@log.close + FileUtils.rm_rf("#{SERVER_ROOT}/.log") + + return dist_list end def get_default_dist_name() @@ -447,13 +566,28 @@ class PackageServer return @distribution_list[0].name end + def reload_dist_package() + # create locking file + lock_file = Utils.file_lock(@server_lock_file_path) + @distribution_list.each do |dist| + dist.initialize_pkg_list + end + Utils.file_unlock(lock_file) + end + + def release_lock_file + if not @lock_file.nil? then + Utils.file_unlock(@lock_file) + end + end + # PRIVATE METHODS/VARIABLES private def server_information_initialize # if id is nil or empty then find default id if @id.nil? or @id.empty? - d = Dir.new( PackageServerConfig::SERVER_ROOT ) + d = Dir.new( SERVER_ROOT ) s = d.select {|f| not f.start_with?(".") } if s.length.eql? 1 then @log.info "using default server ID [#{s[0]}]" @@ -464,29 +598,40 @@ class PackageServer end # read package id information - if File.exist? PackageServerConfig::SERVER_ROOT and File.exist? "#{PackageServerConfig::SERVER_ROOT}/#{@id}/config" then - File.open "#{PackageServerConfig::SERVER_ROOT}/#{@id}/config" do |f| + if File.exist? @config_file_path + File.open @config_file_path do |f| f.each_line do |l| - if l.start_with?( "location : ") then - @location = l.split(" : ")[1].strip - elsif l.start_with?( "integrity check : ") then - @integrity = l.split(" : ")[1].strip - elsif l.start_with?( "server_url : " ) then - info = l.split(" : ")[1].split("->") + if l.start_with?( "location :") then + @location = l.split(" :")[1].strip + elsif l.start_with?( "integrity check :") then + @integrity = l.split(" :")[1].strip.upcase + elsif l.start_with?( "auto sync :" ) then + @auto_sync_flag = l.split(" :")[1].strip.upcase + elsif l.start_with?( "sync interval :" ) then + @sync_interval = l.split(" :")[1].strip.to_i + elsif l.start_with?( "server_url :" ) then + info = l.split(" :")[1].split("->") @dist_to_server_url[info[0].strip] = info[1].strip else @log.error "server config file has invalid information [#{l}]" end end end - end - @dist_to_server_url.each do |dist_name, server_url| - @distribution_list.push Distribution.new( dist_name, "#{@location}/#{dist_name}", server_url, self ) + @dist_to_server_url.each do |dist_name, server_url| + @distribution_list.push Distribution.new( dist_name, "#{@location}/#{dist_name}", server_url, self ) + end end end def get_distribution( dist_name ) + if dist_name.nil? or dist_name.empty? then + dist_name = get_default_dist_name() + end + if dist_name.empty? then + raise RuntimeError,"Can't find distribution information" + end + @distribution_list.each do |dist| if dist.name.eql? dist_name.strip return dist @@ -497,12 +642,19 @@ class PackageServer end def create_distribution_struct( dist_name, server_url ) + if File.exist? "#{@location}/#{dist_name}" + raise RuntimeError, "distribution directory already exist [#{@location}/#{dist_name}]" + end + FileUtils.mkdir "#{@location}/#{dist_name}" FileUtils.mkdir "#{@location}/#{dist_name}/binary" FileUtils.mkdir "#{@location}/#{dist_name}/source" FileUtils.mkdir "#{@location}/#{dist_name}/temp" FileUtils.mkdir "#{@location}/#{dist_name}/snapshots" - + File.open("#{@location}/#{dist_name}/#{Distribution::SNAPSHOT_INFO_FILE}", "w") {} + File.open("#{@location}/#{dist_name}/#{Distribution::OS_INFO_FILE}", "w") {} + File.open("#{@location}/#{dist_name}/#{Distribution::ARCHIVE_PKG_FILE}", "w") {} + # generate distribution distribution = Distribution.new( dist_name, "#{@location}/#{dist_name}", server_url, self ) @@ -517,20 +669,64 @@ class PackageServer else @log.info "[#{dist_name}] distribution creation. using local server [#{server_url}]" end - - distribution.sync( false, "linux" ) - distribution.sync( false, "windows" ) - distribution.sync( false, "darwin" ) - distribution.sync_archive_pkg + + distribution.sync(false) + distribution.generate_snapshot("", "", false) else @log.info "generate package server do not using remote package server" # write_pkg_list for empty file - distribution.write_pkg_list( "linux" ) - distribution.write_pkg_list( "windows" ) - distribution.write_pkg_list( "darwin" ) - distribution.write_archive_pkg_list( "" ) + distribution.write_pkg_list(nil) + distribution.write_archive_pkg_list() + end + + # add dist information to distribution.info file + File.open("#{@location}/distribution.info", "a") do |f| + f.puts "name : #{dist_name}" + f.puts "time : #{Time.now.strftime("%Y%m%d%H%M%S")}" + f.puts end end -end + def register_package(distribution, pkg, file_path, test_flag, internal_flag) + # get package class using bianry file + if pkg.nil? or pkg.package_name.empty? then + raise "[#{file_path}]'s pkginfo.manifest file is incomplete." + end + + if not test_flag then + # error check + if pkg.package_name.empty? or pkg.version.empty? \ + or pkg.os.empty? or pkg.maintainer.empty? then + raise "[#{file_path}]'s pkginfo.manifest file is incomplete." + end + + updated_pkg = distribution.register(file_path, pkg, internal_flag ) + else + updated_pkg = distribution.register_for_test(file_path, pkg ) + end + + return updated_pkg + end + + def add_dist_for_config_file(dist_name, server_url, clone) + File.open( @config_file_path, "a" ) do |f| + if clone then + @log.info "add distribution using [#{server_url}] in clone mode" + f.puts "server_url : #{dist_name} -> " + else + @log.info "add distribution using [#{server_url}]" + f.puts "server_url : #{dist_name} -> #{server_url}" + end + end + end + + def update_config_information(id) + @id = id + @config_dir = "#{SERVER_ROOT}/#{@id}" + @log_file_path = "#{SERVER_ROOT}/#{@id}.log" + @config_file_path = "#{@config_dir}/config" + @incoming_path = "#{@config_dir}/incoming" + @server_lock_file_path = "#{@config_dir}/.server_lock" + end +end diff --git a/src/pkg_server/packageServerConfig.rb b/src/pkg_server/packageServerConfig.rb index 7e19000..b2b447a 100644 --- a/src/pkg_server/packageServerConfig.rb +++ b/src/pkg_server/packageServerConfig.rb @@ -31,6 +31,4 @@ require "utils" class PackageServerConfig CONFIG_ROOT = "#{Utils::HOME}/.build_tools" - SERVER_ROOT = "#{PackageServerConfig::CONFIG_ROOT}/pkg_server" - LOCK_FILE = "#{PackageServerConfig::SERVER_ROOT}/.server_loc" end diff --git a/src/pkg_server/serverOptParser.rb b/src/pkg_server/serverOptParser.rb index da06a21..fed7ace 100644 --- a/src/pkg_server/serverOptParser.rb +++ b/src/pkg_server/serverOptParser.rb @@ -31,19 +31,21 @@ $LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common" require "utils" def set_default( options ) - if options[:id].nil? then options[:id] = "" end - if options[:dist].nil? then options[:dist] = "" end - if options[:url].nil? then options[:url] = "" end - if options[:os].nil? then options[:os] = "all" end - if options[:bpkgs].nil? then options[:bpkgs] = [] end - if options[:apkgs].nil? then options[:apkgs] = [] end - if options[:spkgs].nil? then options[:spkgs] = [] end - if options[:snap].nil? then options[:snap] = "" end - if options[:bsnap].nil? then options[:bsnap] = "" end - if options[:gensnap].nil? then options[:gensnap] = false end - if options[:force].nil? then options[:force] = false end - if options[:test].nil? then options[:test] = false end - if options[:clone].nil? then options[:clone] = false end + options[:id] = "" + options[:dist] = "" + options[:url] = "" + options[:os] = "all" + options[:pkgs] = [] + options[:snaps] = [] + options[:bsnap] = "" + options[:port] = "3333" + options[:gensnap] = true + options[:force] = false + options[:test] = false + options[:clone] = false + options[:origin_pkg_name] = "" + options[:origin_pkg_os] = "" + options[:passwd] = "" end def option_error_check( options ) @@ -51,57 +53,99 @@ def option_error_check( options ) case options[:cmd] when "create" if options[:id].empty? or options[:dist].empty? then - raise ArgumentError, "Usage: pkg-svr create -i -d [-u ] [-l ] " - end - when "remove-pkg" - if options[:bpkgs].empty? then - raise ArgumentError, "pkg-svr remove-pkg -i -d -p [-o ]" + "\n" \ - end - when "spkg-path" - if options[:spkgs].empty? then - raise ArgumentError, "Usage: pkg-svr spkg-name -i -d -s " + raise ArgumentError, "Usage: pkg-svr create -n -d [-u ] [-l ] " end when "remove" if options[:id].empty? then - raise ArgumentError, "Usage: pkg-svr remove -i " + raise ArgumentError, "Usage: pkg-svr remove -n " + end + when "remove-pkg" + if options[:pkgs].empty? then + raise ArgumentError, "Usage: pkg-svr remove-pkg -n -d -P [-o ]" + "\n" \ + end + when "remove-snapshot" + if options[:snaps].empty? then + raise ArgumentError, "Usage: pkg-svr remove-snapshot -n -d -s " end when "add-dist" if options[:id].empty? or options[:dist].empty? then - raise ArgumentError, "Usage: pkg-svr add-dist -i -d [-u ] [-c] " + raise ArgumentError, "Usage: pkg-svr add-dist -n -d [-u ] [--clone] " + end + when "add-os" + if options[:os].empty? then + raise ArgumentError, "Usage: pkg-svr add-os -n -d -o ] " + end + when "remove-dist" + if options[:id].empty? or options[:dist].empty? then + raise ArgumentError, "Usage: pkg-svr remove-dist -n -d " end when "register" - if options[:bpkgs].empty? and options[:spkgs].empty? then - raise ArgumentError, "Usage: pkg-svr register -i -d -p -s [-g] [-t] " + if options[:pkgs].empty? then + raise ArgumentError, "Usage: pkg-svr register -n -d -P [--gen] [--test] " end - when "remove" when "gen-snapshot" + if options[:snaps].empty? then + raise ArgumentError, "Usage: pkg-svr gen-snapshot -n -d -s [-b ]" + end + when "start" + if options[:port].empty? then + raise ArgumentError, "Usage: pkg-svr start -n -p [-w ]" + end + when "stop" + if options[:port].empty? then + raise ArgumentError, "Usage: pkg-svr stop -n -p [-w ]" + end when "sync" when "list" + when "clean" else - raise ArgumentError, "input option incorrect : #{options[:cmd]}" + raise ArgumentError, "Input is incorrect : #{options[:cmd]}" end end def option_parse options = {} - banner = "Usage: pkg-svr {create|register|gen-snapshot|sync|add-dist|spkg-path|remove|remove-pkg|list|help} ..." + "\n" \ - + "\t" + "pkg-svr create -i -d [-u ] [-l ] " + "\n" \ - + "\t" + "pkg-svr add-dist -i -d [-u ] [-c] " + "\n" \ - + "\t" + "pkg-svr remove -i " + "\n" \ - + "\t" + "pkg-svr register -i -d -p -s [-g] [-t] " + "\n" \ - + "\t" + "pkg-svr remove-pkg -i -d -p [-o ] " + "\n" \ - + "\t" + "pkg-svr gen-snapshot -i -d [-n ] [-b ] [-p ] " + "\n" \ - + "\t" + "pkg-svr sync -i -d [-f] " + "\n" \ - + "\t" + "pkg-svr spkg-path -i -d -s " + "\n" \ - + "\t" + "pkg-svr list [-i ] " + "\n" - - optparse = OptionParser.new do|opts| + banner = "Package-server administer service command-line tool." + "\n" \ + + "\n" + "Usage: pkg-svr [OPTS] or pkg-svr (-h|-v)" + "\n" \ + + "\n" + "Subcommands:" + "\n" \ + + "\t" + "create Create a package-server." + "\n" \ + + "\t" + "add-dist Add a distribution to package-server." + "\n" \ + + "\t" + "add-os Add supported os." + "\n" \ + + "\t" + "register Register a package in package-server." + "\n" \ + + "\t" + "remove Remove a package-server." + "\n" \ + + "\t" + "remove-dist Remove a distribution to package-server." + "\n" \ + + "\t" + "remove-pkg Remove a package in package-server." + "\n" \ + + "\t" + "remove-snapshot Remove a snapshot in package-server." + "\n" \ + + "\t" + "gen-snapshot Generate a snapshot in package-server." + "\n" \ + + "\t" + "sync Synchronize the package-server from parent package server." + "\n" \ + + "\t" + "start Start the package-server." + "\n" \ + + "\t" + "stop Stop the package-server." + "\n" \ + + "\t" + "clean Delete unneeded package files in package-server." + "\n" \ + + "\t" + "list Show all pack" + "\n" \ + + "\n" + "Subcommand usage:" + "\n" \ + + "\t" + "pkg-svr create -n -d [-u ] [-l ] " + "\n" \ + + "\t" + "pkg-svr add-dist -n -d [-u ] [--clone] " + "\n" \ + + "\t" + "pkg-svr add-os -n -d -o " + "\n" \ + + "\t" + "pkg-svr register -n -d -P [--gen] [--test] " + "\n" \ + + "\t" + "pkg-svr remove -n " + "\n" \ + + "\t" + "pkg-svr remove-dist -n -d " + "\n" \ + + "\t" + "pkg-svr remove-pkg -n -d -P [-o ] " + "\n" \ + + "\t" + "pkg-svr remove-snapshot -n -d -s " + "\n" \ + + "\t" + "pkg-svr gen-snapshot -n -d -s [-b ] " + "\n" \ + + "\t" + "pkg-svr sync -n -d [--force] " + "\n" \ + + "\t" + "pkg-svr clean -n -d [-s ] " + "\n" \ + + "\t" + "pkg-svr start -n -p [-w ]" + "\n" \ + + "\t" + "pkg-svr stop -n -p [-w ]" + "\n" \ + + "\t" + "pkg-svr list [-n ] " + "\n" \ + + "\n" + "Options:" + "\n" + + optparse = OptionParser.new(nil, 32, ' '*8) do|opts| # Set a banner, displayed at the top # of the help screen. opts.banner = banner - opts.on( '-i', '--id ', 'package server id' ) do|name| + opts.on( '-n', '--name ', 'package server name' ) do|name| options[:id] = name end @@ -109,7 +153,7 @@ def option_parse options[:dist] = dist end - opts.on( '-u', '--url ', 'remote server address' ) do|url| + opts.on( '-u', '--url ', 'remote server url: http://127.0.0.1/dibs/unstable' ) do|url| options[:url] = url end @@ -117,80 +161,89 @@ def option_parse options[:os] = os end - opts.on( '-p', '--bpackage ', 'binary package file path list' ) do|bpkgs| - options[:bpkgs] = [] - list = bpkgs.tr(" \t","").split(",") + opts.on( '-P', '--pkgs ', 'package file path list' ) do|pkgs| + if not Utils.multi_argument_test( pkgs, "," ) then + raise ArgumentError, "Package variable parsing error : #{pkgs}" + end + list = pkgs.tr(" \t","").split(",") list.each do |l| - # TODO: is not working - #reg = Regexp.new(l) - #Dir.entries(Utils::WORKING_DIR).select{|x| x =~ reg}.each do |ls| - # options[:bpkgs].push ls - #end if l.start_with? "~" then l = Utils::HOME + l.delete("~") end - options[:bpkgs].push l + options[:pkgs].push l end end - opts.on( '-s', '--spackage ', 'source package file path ' ) do|spkgs| - options[:spkgs] = [] - list = spkgs.tr(" \t","").split(",") - list.each do |l| - if l.start_with? "~" then l = Utils::HOME + l.delete("~") end - options[:spkgs].push l - end + opts.on( '-s', '--snapshot ', 'a snapshot name or snapshot list' ) do|snaplist| + if not Utils.multi_argument_test( snaplist, "," ) then + raise ArgumentError, "Snapshot variable parsing error : #{snaplist}" + end + options[:snaps] = snaplist.split(",") end - - opts.on( '-g', '--generate', 'snapshot is generate' ) do - options[:gensnap] = true - end - opts.on( '-n', '--sname ', 'snapshot name' ) do|snap| - options[:snap] = snap - end - - opts.on( '-b', '--bsnapshot ', 'base snapshot name' ) do|bsnap| + opts.on( '-b', '--base ', 'base snapshot name' ) do|bsnap| options[:bsnap] = bsnap end - opts.on( '-l', '--location ', 'server location' ) do|loc| + opts.on( '-l', '--loc ', 'server location' ) do|loc| options[:loc] = loc end - opts.on( '-f', '--force', 'force update pkg file' ) do - options[:force] = true + opts.on( '-p', '--port ', 'port number' ) do|port| + options[:port] = port end - opts.on( '-t', '--test', 'upload for test' ) do - options[:test] = true + opts.on( '-w', '--passwd ', 'password for package server' ) do|passwd| + options[:passwd] = passwd end - - opts.on( '-c', '--clone', 'clone mode' ) do + + opts.on( '--clone', 'clone mode' ) do options[:clone] = true end + + opts.on( '--force', 'force update pkg file' ) do + options[:force] = true + end - opts.on( '-h', '--help', 'display this information' ) do + opts.on( '--test', 'upload for test' ) do + options[:test] = true + end + + opts.on( '--gen', 'generate snapshot' ) do + options[:gensnap] = true + end + + opts.on( '-h', '--help', 'display help' ) do puts opts exit end - + + opts.on( '-v', '--version', 'display version' ) do + puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version() + exit + end end cmd = ARGV[0] - if cmd.eql? "create" or cmd.eql? "register" or cmd.eql? "sync" \ - or cmd.eql? "gen-snapshot" or cmd.eql? "add-dist" \ - or cmd.eql? "spkg-path" or cmd.eql? "remove" \ - or cmd.eql? "list" or cmd.eql? "remove-pkg" or cmd =~ /(help)|(-h)|(--help)/ then + if cmd.eql? "create" or cmd.eql? "sync" \ + or cmd.eql? "register" \ + or cmd.eql? "gen-snapshot" \ + or cmd.eql? "add-dist" or cmd.eql? "add-os" \ + or cmd.eql? "remove" or cmd.eql? "remove-dist" \ + or cmd.eql? "remove-pkg" or cmd.eql? "remove-snapshot" \ + or cmd.eql? "start" or cmd.eql? "stop" or cmd.eql? "clean" \ + or cmd.eql? "list" \ + or cmd =~ /(-v)|(--version)/ \ + or cmd =~ /(help)|(-h)|(--help)/ then if cmd.eql? "help" then ARGV[0] = "-h" end options[:cmd] = ARGV[0] else - raise ArgumentError, banner + raise ArgumentError, "Usage: pkg-svr [OPTS] or pkg-svr -h" end - optparse.parse! - # default value setting set_default options + + optparse.parse! # option error check option_error_check options diff --git a/test/a/a b/test/a/a deleted file mode 100644 index 7898192..0000000 --- a/test/a/a +++ /dev/null @@ -1 +0,0 @@ -a diff --git a/test/a/package/build.linux b/test/a/package/build.linux deleted file mode 100755 index e056656..0000000 --- a/test/a/package/build.linux +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -e - -clean () -{ - rm -rf $SRCDIR/*.zip - rm -rf $SRCDIR/*.tar.gz -} - -build() -{ - if [ "`cat $ROOTDIR/b`" = "b" ] - then - echo "A: `cat $ROOTDIR/b` == b ... ok" - else - echo "A: `cat $ROOTDIR/b` != b ... fail" - exit 1 - fi - - if [ "`cat $ROOTDIR/c`" = "ca" ] - then - echo "A: `cat $ROOTDIR/c` == ca ... ok" - else - echo "A: `cat $ROOTDIR/c` != ca ... fail" - exit 1 - fi -} - -install() -{ - mkdir -p $SRCDIR/package/a.package.linux/data - cp $SRCDIR/a $SRCDIR/package/a.package.linux/data -} - -$1 -echo "$1 success" diff --git a/test/a/package/pkginfo.manifest b/test/a/package/pkginfo.manifest deleted file mode 100644 index bb9c561..0000000 --- a/test/a/package/pkginfo.manifest +++ /dev/null @@ -1,7 +0,0 @@ -Package: a -Version: 11 -OS: linux -Maintainer: xxx -Build-host-os: linux -Build-dependency: b [linux], c [linux] -Source: a diff --git a/test/b/b b/test/b/b deleted file mode 100644 index 6178079..0000000 --- a/test/b/b +++ /dev/null @@ -1 +0,0 @@ -b diff --git a/test/b/package/build.linux b/test/b/package/build.linux deleted file mode 100755 index 83e5a6c..0000000 --- a/test/b/package/build.linux +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -e - -clean () -{ - rm -rf $SRCDIR/*.zip - rm -rf $SRCDIR/*.tar.gz -} - -build() -{ - if [ "`cat $ROOTDIR/c`" = "ca" ] - then - echo "B: `cat $ROOTDIR/c` == ca ... ok" - else - echo "B: `cat $ROOTDIR/c` != ca ... fail" - exit 1 - fi -} - -install() -{ - mkdir -p $SRCDIR/package/b.package.linux/data - cp $SRCDIR/b $SRCDIR/package/b.package.linux/data -} - -$1 -echo "$1 success" diff --git a/test/b/package/pkginfo.manifest b/test/b/package/pkginfo.manifest deleted file mode 100644 index a164fd8..0000000 --- a/test/b/package/pkginfo.manifest +++ /dev/null @@ -1,7 +0,0 @@ -Package: b -Version: 11 -OS: linux -Maintainer: xxx -Build-host-os: linux -Build-dependency: c [linux] -Source: b diff --git a/test/bin/bin_0.0.0_linux.zip b/test/bin/bin_0.0.0_linux.zip new file mode 100644 index 0000000..9880da1 Binary files /dev/null and b/test/bin/bin_0.0.0_linux.zip differ diff --git a/test/bin/bin_0.0.0_ubuntu-32.zip b/test/bin/bin_0.0.0_ubuntu-32.zip new file mode 100644 index 0000000..d462bfe Binary files /dev/null and b/test/bin/bin_0.0.0_ubuntu-32.zip differ diff --git a/test/bin/bin_0.0.1_linux.zip b/test/bin/bin_0.0.1_linux.zip new file mode 100644 index 0000000..c6a9588 Binary files /dev/null and b/test/bin/bin_0.0.1_linux.zip differ diff --git a/test/bin/bin_0.0.1_ubuntu-32.zip b/test/bin/bin_0.0.1_ubuntu-32.zip new file mode 100644 index 0000000..9c21aca Binary files /dev/null and b/test/bin/bin_0.0.1_ubuntu-32.zip differ diff --git a/test/bin/src.tar.gz b/test/bin/src.tar.gz new file mode 100644 index 0000000..c41dcd8 Binary files /dev/null and b/test/bin/src.tar.gz differ diff --git a/test/build-cli-01.testcase b/test/build-cli-01.testcase index 6e47b8c..f3c8c50 100644 --- a/test/build-cli-01.testcase +++ b/test/build-cli-01.testcase @@ -3,14 +3,38 @@ ../build-cli -h #POST-EXEC #EXPECT -Usage: build-cli {build|resolve|query} ... - build-cli build -g -c [-d ] [-p ] [-o ] [-a ] - build-cli resolve -g -c [-d ] [-p ] [-o ] [-a ] - build-cli query [-d ] [-p ] - -g, --git git repository - -c, --commit git commit id/tag - -d, --domain remote build server ip address. default 127.0.0.1 - -p, --port remote build server port. default 2222 - -o, --os target operating system linux/windows/darwin - -a, --async asynchronous job - -h, --help display this information +Requiest service to build-server command-line tool. + +Usage: build-cli [OPTS] or build-cli (-h|-v) + +Subcommands: +build Build and create package. +resolve Request change to resolve-status for build-conflict. +query Query information about build-server. +query-system Query system information about build-server. +query-project Query project information about build-server. +query-job Query job information about build-server. +cancel Cancel a building project. +register Register the package to the build-server. + +Subcommand usage: +build-cli build -N -d [-o ] [-w ] [--async] +build-cli resolve -N -d [-o ] [-w ] [--async] +build-cli query -d +build-cli query-system -d +build-cli query-project -d +build-cli query-job -d +build-cli cancel -j -d [-w ] +build-cli register -P -d -t [-w ] + +Options: +-N, --project project name +-d, --address build server address: 127.0.0.1:2224 +-o, --os target operating system: ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64 +--async asynchronous job +-j, --job job number +-w, --passwd password for managing project +-P, --pkg package file path +-t, --ftp ftp server url: ftp://dibsftp:dibsftp@127.0.0.1 +-h, --help display help +-v, --version display version diff --git a/test/build-cli-02.testcase b/test/build-cli-02.testcase index c49e5d1..4b13c12 100644 --- a/test/build-cli-02.testcase +++ b/test/build-cli-02.testcase @@ -1,16 +1,29 @@ #PRE-EXEC -mkdir buildsvr01 -cd buildsvr01; ../../build-svr create -n testserver3 -u http://172.21.111.132/emptyserver/unstable -d pkgserver -i emptyserver -../build-svr start -n testserver3 -p 2223 & -sleep 1 #EXEC -../build-cli query -d 127.0.0.1 -p 2223 +../build-cli query -d 127.0.0.1:2223 #POST-EXEC -../build-svr stop -n testserver3 -sleep 1 -../build-svr remove -n testserver3 -rm -rf buildsvr01 #EXPECT -HOST-OS: -MAX_WORKING_JOBS: -* JOB * +* SYSTEM INFO * +HOST-OS: ubuntu-32 +MAX_WORKING_JOBS: 2 + +* FTP * +FTP_ADDR: +FTP_USERNAME: + +* SUPPORTED OS LIST * +ubuntu-32 +windows-32 + +* FRIEND SERVER LIST (WAIT|WORK/MAX) jobs [transfer count] * + + +* PROJECT(S) * +testa NORMAL +testa1 NORMAL +testb NORMAL +testc NORMAL +testd NORMAL +teste REMOTE + +* JOB(S) * diff --git a/test/build-cli-03.testcase b/test/build-cli-03.testcase index c49e5d1..4641ffb 100644 --- a/test/build-cli-03.testcase +++ b/test/build-cli-03.testcase @@ -1,16 +1,30 @@ #PRE-EXEC -mkdir buildsvr01 -cd buildsvr01; ../../build-svr create -n testserver3 -u http://172.21.111.132/emptyserver/unstable -d pkgserver -i emptyserver -../build-svr start -n testserver3 -p 2223 & -sleep 1 #EXEC -../build-cli query -d 127.0.0.1 -p 2223 +../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32 #POST-EXEC -../build-svr stop -n testserver3 -sleep 1 -../build-svr remove -n testserver3 -rm -rf buildsvr01 #EXPECT -HOST-OS: -MAX_WORKING_JOBS: -* JOB * +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... a_0.0.1_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! +Info: Job is FINISHED successfully! +Info: Updating the source info for project "testa" diff --git a/test/build-cli-03_1.testcase b/test/build-cli-03_1.testcase new file mode 100644 index 0000000..f082adf --- /dev/null +++ b/test/build-cli-03_1.testcase @@ -0,0 +1,28 @@ +#PRE-EXEC +echo "This is the test case for omitting os" +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a +#EXEC +../build-cli build -N testa -d 127.0.0.1:2223 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... a_0.0.1_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! diff --git a/test/build-cli-04.testcase b/test/build-cli-04.testcase new file mode 100644 index 0000000..e990744 --- /dev/null +++ b/test/build-cli-04.testcase @@ -0,0 +1,7 @@ +#PRE-EXEC +#EXEC +../build-cli build -N non_exist_project -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Error: Requested project does not exist! +Info: Check project name using "query" command option ! diff --git a/test/build-cli-05.testcase b/test/build-cli-05.testcase new file mode 100644 index 0000000..039eb37 --- /dev/null +++ b/test/build-cli-05.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../build-cli build -N testa -d 127.0.0.1:11113 -o ubuntu-32 +#POST-EXEC +#EXPECT +Connection to server failed! diff --git a/test/build-cli-06.testcase b/test/build-cli-06.testcase new file mode 100644 index 0000000..308410f --- /dev/null +++ b/test/build-cli-06.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../build-cli build -N testa -d 111.11q.111.111:1111 -o ubuntu-32 +#POST-EXEC +#EXPECT +Connection to server failed! diff --git a/test/build-cli-07.testcase b/test/build-cli-07.testcase new file mode 100644 index 0000000..d59ca4c --- /dev/null +++ b/test/build-cli-07.testcase @@ -0,0 +1,11 @@ +#PRE-EXEC +echo "testa project is already built and uploaded in previeous testcase" +#EXEC +../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Error: Version must be increased : +Error: Job is stopped by ERROR diff --git a/test/build-cli-08.testcase b/test/build-cli-08.testcase new file mode 100644 index 0000000..ff51d73 --- /dev/null +++ b/test/build-cli-08.testcase @@ -0,0 +1,32 @@ +#PRE-EXEC +echo "Assume testa project is already built and uploaded in previeous testcase" +#EXEC +../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: * a +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... b_0.0.1_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! +Info: Job is FINISHED successfully! +Info: Updating the source info for project "testb" diff --git a/test/build-cli-09.testcase b/test/build-cli-09.testcase new file mode 100644 index 0000000..676cd46 --- /dev/null +++ b/test/build-cli-09.testcase @@ -0,0 +1,19 @@ +#PRE-EXEC +echo "if build-dep package does not exist in server, will show the error" +echo "Assume testa/testb project is already built and uploaded in previeous testcase" +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P b +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a +#EXEC +../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Error: Unmet dependency found! +Error: * a(ubuntu-32) for build-dependency +Error: Job is stopped by ERROR diff --git a/test/build-cli-10.testcase b/test/build-cli-10.testcase new file mode 100644 index 0000000..7ca3ce7 --- /dev/null +++ b/test/build-cli-10.testcase @@ -0,0 +1,32 @@ +#PRE-EXEC +echo "This is the test case for omitting os" +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a +#EXEC +../build-cli build -N testa -d 127.0.0.1:2223 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... a_0.0.1_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! +Info: Job is FINISHED successfully! +Info: Updating the source info for project "testa" diff --git a/test/build-cli-11.testcase b/test/build-cli-11.testcase new file mode 100644 index 0000000..87e1de1 --- /dev/null +++ b/test/build-cli-11.testcase @@ -0,0 +1,11 @@ +#PRE-EXEC +echo "if there doe not exist server to build, error" +#EXEC +../build-cli build -N testa -d 127.0.0.1:2223 -o windows-32 +#POST-EXEC +#EXPECT +Info: Added new job "5" for windows-32! +Info: Initializing job... +Error: No servers that are able to build your packages. +Error: Host-OS (windows-32) is not supported in build server. +Error: Job is stopped by ERROR diff --git a/test/build-cli-12.testcase b/test/build-cli-12.testcase new file mode 100644 index 0000000..7c4154b --- /dev/null +++ b/test/build-cli-12.testcase @@ -0,0 +1,11 @@ +#PRE-EXEC +echo "wrong os name in build command" +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a +#EXEC +../build-cli build -N testa -d 127.0.0.1:2223 -o wrong_os_name +#POST-EXEC +#EXPECT +Error: Unsupported OS name "wrong_os_name" is used! +Error: Check the following supported OS list. +* ubuntu-32 +* windows-32 diff --git a/test/build-cli-12_1.testcase b/test/build-cli-12_1.testcase new file mode 100644 index 0000000..efaa7cf --- /dev/null +++ b/test/build-cli-12_1.testcase @@ -0,0 +1,10 @@ +#PRE-EXEC +echo "wrong os name in resolve command" +#EXEC +../build-cli resolve -N testa -d 127.0.0.1:2223 -o wrong_os_name +#POST-EXEC +#EXPECT +Error: Unsupported OS name "wrong_os_name" is used! +Error: Check the following supported OS list. +* ubuntu-32 +* windows-32 diff --git a/test/build-cli-13.testcase b/test/build-cli-13.testcase new file mode 100644 index 0000000..b0bbc1f --- /dev/null +++ b/test/build-cli-13.testcase @@ -0,0 +1,38 @@ +#PRE-EXEC +echo "Assume that testc project has the password (1111)" +echo "Assume that testa,testb which are depended by testc are built and uploaded" +echo "For, work around solution, removed cache" +rm -rf buildsvr01/projects/testa/cache +../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32 +../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32 +#EXEC +../build-cli build -N testc -d 127.0.0.1:2223 -w 1111 -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: * a +Info: * b +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... c_0.0.1_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! +Info: Job is FINISHED successfully! +Info: Updating the source info for project "testc" diff --git a/test/build-cli-14.testcase b/test/build-cli-14.testcase new file mode 100644 index 0000000..de41149 --- /dev/null +++ b/test/build-cli-14.testcase @@ -0,0 +1,9 @@ +#PRE-EXEC +echo "Assume that testc project has the password (1111)" +echo "Assume that testa,testb which are depended by testc are built and uploaded" +#EXEC +../build-cli build -N testc -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Error: Project's password is not matched! +Error: Use -w option to input your project password diff --git a/test/build-cli-15.testcase b/test/build-cli-15.testcase new file mode 100644 index 0000000..523375a --- /dev/null +++ b/test/build-cli-15.testcase @@ -0,0 +1,9 @@ +#PRE-EXEC +echo "Assume that testc project has the password (1111)" +echo "Assume that testa,testb which are depended by testc are built and uploaded" +#EXEC +../build-cli build -N testc -d 127.0.0.1:2223 -w 2222 -o ubuntu-32 +#POST-EXEC +#EXPECT +Error: Project's password is not matched! +Error: Use -w option to input your project password diff --git a/test/build-cli-16.testcase b/test/build-cli-16.testcase new file mode 100644 index 0000000..fd31b9a --- /dev/null +++ b/test/build-cli-16.testcase @@ -0,0 +1,8 @@ +#PRE-EXEC +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P c +#EXEC +../build-cli build -N testc -d 127.0.0.1:2223 -w 1111 --async -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Above job(s) will be processed asynchronously! diff --git a/test/build-cli-17.testcase b/test/build-cli-17.testcase new file mode 100644 index 0000000..230f75e --- /dev/null +++ b/test/build-cli-17.testcase @@ -0,0 +1,40 @@ +#PRE-EXEC +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P c +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P b +../pkg-svr remove-pkg -n pkgsvr01 -d unstable -P a +#EXEC +../build-cli build -N testa -d 127.0.0.1:2223 --async -o ubuntu-32 +sleep 1 +../build-cli build -N testb -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Above job(s) will be processed asynchronously! +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Waiting for finishing following jobs: +Info: * +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: * a +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... b_0.0.1_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! +Info: Job is FINISHED successfully! +Info: Updating the source info for project "testb" diff --git a/test/build-cli-18.testcase b/test/build-cli-18.testcase new file mode 100644 index 0000000..136daf7 --- /dev/null +++ b/test/build-cli-18.testcase @@ -0,0 +1,31 @@ +#PRE-EXEC +echo "reverse fail" +#EXEC +rm -rf git01/a +cd git01;tar xf a_v2.tar.gz +../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... a_0.0.2_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: * Will check reverse-build for projects: testb(ubuntu-32) +Info: * Added new job for reverse-build ... testb(ubuntu-32) +Info: * Reverse-build FAIL ... testb(ubuntu-32) +Error: Job is stopped by ERROR diff --git a/test/build-cli-19.testcase b/test/build-cli-19.testcase new file mode 100644 index 0000000..f48aab6 --- /dev/null +++ b/test/build-cli-19.testcase @@ -0,0 +1,16 @@ +#PRE-EXEC +#EXEC +../build-cli query-system -d 127.0.0.1:2223 +#POST-EXEC +#EXPECT +* SYSTEM INFO * +HOST-OS: +MAX_WORKING_JOBS: + +* FTP * +FTP_ADDR: +FTP_USERNAME: + +* SUPPORTED OS LIST * +ubuntu-32 +windows-32 diff --git a/test/build-cli-20.testcase b/test/build-cli-20.testcase new file mode 100644 index 0000000..7f048f5 --- /dev/null +++ b/test/build-cli-20.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../build-cli query-project -d 127.0.0.1:2223 +#POST-EXEC +#EXPECT +* PROJECT(S) * diff --git a/test/build-cli-21.testcase b/test/build-cli-21.testcase new file mode 100644 index 0000000..bd1503a --- /dev/null +++ b/test/build-cli-21.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../build-cli query-job -d 127.0.0.1:2223 +#POST-EXEC +#EXPECT +* JOB(S) * diff --git a/test/build-cli-22.testcase b/test/build-cli-22.testcase new file mode 100644 index 0000000..6f4e6d5 --- /dev/null +++ b/test/build-cli-22.testcase @@ -0,0 +1,16 @@ +#PRE-EXEC +echo "Trying to upload a_0.0.1 with different commit-id is already uploaded" +rm -rf git01/c +cd git01;tar xf c_v1_1.tar.gz +#EXEC +../build-cli build -N testc -d 127.0.0.1:2223 -o ubuntu-32 -w 1111 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Error: Source code has been changed without increasing version! +Error: * Version : +Error: * Before : +Error: * Current : +Error: Job is stopped by ERROR diff --git a/test/build-cli-23.testcase b/test/build-cli-23.testcase new file mode 100644 index 0000000..9645f35 --- /dev/null +++ b/test/build-cli-23.testcase @@ -0,0 +1,25 @@ +#PRE-EXEC +cd git01;tar xf a_v2.tar.gz +cd git01;tar xf b_v2.tar.gz +cd git01;tar xf c_v2.tar.gz +#EXEC +../build-cli build -N testa,testb,testc -d 127.0.0.1:2223 -o ubuntu-32 -w 1111 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Invoking a thread for MULTI-BUILD Job +Info: New Job +Info: Added new job "testa" for ubuntu-32! +Info: Added new job "testb" for ubuntu-32! +Info: Added new job "testc" for ubuntu-32! +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! diff --git a/test/build-cli-24.testcase b/test/build-cli-24.testcase new file mode 100644 index 0000000..e858d32 --- /dev/null +++ b/test/build-cli-24.testcase @@ -0,0 +1,12 @@ +#PRE-EXEC +echo "This test case must be execute right after testcase 22" +#EXEC +../build-cli build -N testa,testb,testc -d 127.0.0.1:2223 -o ubuntu-32 -w 1111 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Failed to initialize sub-job +Info: Failed to initialize sub-job +Info: Failed to initialize sub-job +Error: Job is stopped by ERROR diff --git a/test/build-cli-25.testcase b/test/build-cli-25.testcase new file mode 100644 index 0000000..a9287f8 --- /dev/null +++ b/test/build-cli-25.testcase @@ -0,0 +1,20 @@ +#PRE-EXEC +cd git01;tar xf a_v3.tar.gz +#EXEC +../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32,windows-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Invoking a thread for MULTI-BUILD Job +Info: New Job +Info: Added new job "testa" for ubuntu-32! +Info: Added new job "testa" for windows-32! +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! diff --git a/test/build-cli-26.testcase b/test/build-cli-26.testcase new file mode 100644 index 0000000..b3c0684 --- /dev/null +++ b/test/build-cli-26.testcase @@ -0,0 +1,45 @@ +#PRE-EXEC +echo "testa, testb: build because of version change" +echo "testc, testa1, testd: rebuild with same version" + +cd git01;tar xf a_v4.tar.gz +cd git01;tar xf b_v4.tar.gz +cd git01;tar xf c_v4.tar.gz +#EXEC +../build-svr fullbuild -n testserver3 +#POST-EXEC +#EXPECT +Info: Initializing job... +Info: Invoking a thread for MULTI-BUILD Job +Info: New Job +Info: Added new job +Info: Added new job +Info: Added new job +Info: Added new job +Info: Added new job +Info: Added new job +Info: Added new job +Info: Added new job +Info: Added new job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: * Sub-Job +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! diff --git a/test/build-cli-27.testcase b/test/build-cli-27.testcase new file mode 100644 index 0000000..b40b9b7 --- /dev/null +++ b/test/build-cli-27.testcase @@ -0,0 +1,7 @@ +#PRE-EXEC +cd git01;tar xf c_v5.tar.gz +#EXEC +../build-cli build -N testc -d 127.0.0.1:2223 -o li_* -w 1111 +#POST-EXEC +#EXPECT +Error: There is no OS supported by the build server. diff --git a/test/build-cli-28.testcase b/test/build-cli-28.testcase new file mode 100644 index 0000000..04cc21e --- /dev/null +++ b/test/build-cli-28.testcase @@ -0,0 +1,33 @@ +#PRE-EXEC +echo "wild card" +#EXEC +../build-cli build -N testc -d 127.0.0.1:2223 -o ubuntu-* -w 1111 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: * a +Info: * b +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... c_0.0.5_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! +Info: Job is FINISHED successfully! +Info: Updating the source info for project "testc" diff --git a/test/build-cli-29.testcase b/test/build-cli-29.testcase new file mode 100644 index 0000000..c927bae --- /dev/null +++ b/test/build-cli-29.testcase @@ -0,0 +1,38 @@ +#PRE-EXEC +echo "reverse success" +#EXEC +rm -rf git01/a +cd git01;tar xf a_v5.tar.gz +../build-cli build -N testa -d 127.0.0.1:2223 -o ubuntu-32 +#POST-EXEC +#EXPECT +Info: Added new job +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for building Job +Info: New Job +Info: Checking build dependency ... +Info: Checking install dependency ... +Info: Started to build this job... +Info: JobBuilder +Info: Downloding client is initializing... +Info: Installing dependent packages... +Info: Downloading dependent source packages... +Info: Make clean... +Info: Make build... +Info: Make install... +Info: Generatiing pkginfo.manifest... +Info: Zipping... +Info: Creating package file ... a_0.0.5_ubuntu-32.zip +Info: Checking reverse build dependency ... +Info: * Will check reverse-build for projects: +Info: * Added new job for reverse-build ... +Info: * Added new job for reverse-build ... +Info: * Reverse-build OK ... +Info: * Reverse-build OK ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! +Info: Job is FINISHED successfully! +Info: Updating the source info for project "testa" diff --git a/test/build-svr-01.testcase b/test/build-svr-01.testcase new file mode 100644 index 0000000..e345367 --- /dev/null +++ b/test/build-svr-01.testcase @@ -0,0 +1,11 @@ +#PRE-EXEC +rm -rf ~/.build_tools/build_server/testserver3 +rm -rf buildsvr01 +mkdir buildsvr01 +#EXEC +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Created new build server: "testserver3" diff --git a/test/build-svr-02.testcase b/test/build-svr-02.testcase new file mode 100644 index 0000000..cbedb98 --- /dev/null +++ b/test/build-svr-02.testcase @@ -0,0 +1,45 @@ +#PRE-EXEC +#EXEC +../build-svr -h +#POST-EXEC +#EXPECT +Build-server administer service command-line tool. + +Usage: build-svr [OPTS] or build-svr -h + +Subcommands: + create Create the build-server. + remove Remove the build-server. + start Start the build-server. + stop Stop the build-server. + add-svr Add build-server for support multi-OS or distribute build job. + add-prj Register information for project what you want build berfore building a project. + register Register the package to the build-server. + fullbuild Build all your projects and upload them to package server. + +Subcommand usage: + build-svr create -n -u -d -t + build-svr remove -n + build-svr start -n -p + build-svr stop -n + build-svr add-svr -n -d + build-svr add-prj -n -N [-g ] [-b ] [-P ] [-w ] [-o ] + build-svr add-os -n -o + build-svr register -n -P + build-svr fullbuild -n + +Options: + -n, --name build server name + -u, --url package server url: http://127.0.0.1/dibs/unstable + -d, --address server address: 127.0.0.1:2224 + -p, --port server port number: 2224 + -P, --pkg package file path or name + -o, --os ex) linux,windows + -N, --pname project name + -g, --git git repository + -b, --branch git branch + -w, --passwd password for managing project + -t, --ftp ftp server url: ftp://dibsftp:dibsftp@127.0.0.1:1024 + -h, --help display this information + -v, --version display version + diff --git a/test/build-svr-03.testcase b/test/build-svr-03.testcase new file mode 100644 index 0000000..6038be5 --- /dev/null +++ b/test/build-svr-03.testcase @@ -0,0 +1,13 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +../build-svr add-svr -n testserver3 -d 127.0.0.1:2223 +cat ~/.build_tools/build_server/testserver3/friends +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Friend server is added successfully! +127.0.0.1,2223 diff --git a/test/build-svr-04.testcase b/test/build-svr-04.testcase new file mode 100644 index 0000000..5a7d65e --- /dev/null +++ b/test/build-svr-04.testcase @@ -0,0 +1,13 @@ +#PRE-EXEC +rm -rf ~/.build_tools/build_server/testserver3 +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +echo "TEST_TIME=3" >> ~/.build_tools/build_server/testserver3/server.cfg +../build-svr start -n testserver3 -p 2223 +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Test time is elapsed! diff --git a/test/build-svr-05.testcase b/test/build-svr-05.testcase new file mode 100644 index 0000000..634cb10 --- /dev/null +++ b/test/build-svr-05.testcase @@ -0,0 +1,14 @@ +#PRE-EXEC +mkdir buildsvr01 +rm -rf ~/.build_tools/build_server/testserver3 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +../build-svr start -n testserver3 -p 2223 & +#EXEC +sleep 1 +../build-svr stop -n testserver3 +sleep 1 +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Server will be down! diff --git a/test/build-svr-06.testcase b/test/build-svr-06.testcase new file mode 100644 index 0000000..b0f302a --- /dev/null +++ b/test/build-svr-06.testcase @@ -0,0 +1,10 @@ +#PRE-EXEC +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +../build-svr stop -n testserver3 +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Server is not running! diff --git a/test/build-svr-07.testcase b/test/build-svr-07.testcase new file mode 100644 index 0000000..856e52b --- /dev/null +++ b/test/build-svr-07.testcase @@ -0,0 +1,9 @@ +#PRE-EXEC +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +../build-svr remove -n testserver3 +#POST-EXEC +rm -rf buildsvr01 +#EXPECT +Removed the server diff --git a/test/build-svr-08.testcase b/test/build-svr-08.testcase new file mode 100644 index 0000000..c18ffa2 --- /dev/null +++ b/test/build-svr-08.testcase @@ -0,0 +1,12 @@ +#PRE-EXEC +rm -rf ~/.build_tools/build_server/testserver3 +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Creating server failed. The server id is already exist diff --git a/test/build-svr-09.testcase b/test/build-svr-09.testcase new file mode 100644 index 0000000..0554f0b --- /dev/null +++ b/test/build-svr-09.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../build-svr remove -n testserverxx +#POST-EXEC +#EXPECT +does not exist! diff --git a/test/build-svr-10.testcase b/test/build-svr-10.testcase new file mode 100644 index 0000000..126e55f --- /dev/null +++ b/test/build-svr-10.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../build-svr start -n testserverxx +#POST-EXEC +#EXPECT +does not exist! diff --git a/test/build-svr-11.testcase b/test/build-svr-11.testcase new file mode 100644 index 0000000..1632086 --- /dev/null +++ b/test/build-svr-11.testcase @@ -0,0 +1,12 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +../build-svr add-os -n testserver3 -o linux +#EXEC +../build-svr add-prj -n testserver3 -N testa -g test_git -b test_branch +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Adding project succeeded! diff --git a/test/build-svr-12.testcase b/test/build-svr-12.testcase new file mode 100644 index 0000000..69e3cd5 --- /dev/null +++ b/test/build-svr-12.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../build-svr add-prj -n testserverxxx -N testa -g test_git -b test_branch +#POST-EXEC +#EXPECT +does not exist! diff --git a/test/build-svr-13.testcase b/test/build-svr-13.testcase new file mode 100644 index 0000000..110677a --- /dev/null +++ b/test/build-svr-13.testcase @@ -0,0 +1,11 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +../build-svr add-prj -n testserver3 -N testa -g test_git -b test_branch -w 1111 +cat buildsvr01/projects/testa/build | grep PASSWD +#POST-EXEC +#EXPECT +Adding project succeeded! +PASSWD=1111 diff --git a/test/build-svr-14.testcase b/test/build-svr-14.testcase new file mode 100644 index 0000000..0d920e9 --- /dev/null +++ b/test/build-svr-14.testcase @@ -0,0 +1,12 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +../build-svr add-os -n testserver3 -o linux +#EXEC +../build-svr add-prj -n testserver3 -N testx -g test_git -b test_branch -o linux +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Adding project succeeded! diff --git a/test/build-svr-15.testcase b/test/build-svr-15.testcase new file mode 100644 index 0000000..7574a81 --- /dev/null +++ b/test/build-svr-15.testcase @@ -0,0 +1,19 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +../build-svr register -n testserver3 -P bin/bin_0.0.0_linux.zip +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Info: Initializing job... +Info: Checking package version ... +Info: Invoking a thread for REGISTER Job +Info: New Job +Info: Checking reverse build dependency ... +Info: Uploading ... +Info: Upload succeeded. Sync local pkg-server again... +Info: Snapshot: +Info: Job is completed! diff --git a/test/build-svr-16.testcase b/test/build-svr-16.testcase new file mode 100644 index 0000000..d2ade40 --- /dev/null +++ b/test/build-svr-16.testcase @@ -0,0 +1,15 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +../build-svr register -n testserver3 -P bin/bin_0.0.0_linux.zip +#EXEC +../build-svr register -n testserver3 -P bin/bin_0.0.0_linux.zip +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Info: Initializing job... +Info: Checking package version ... +Error: Version must be increased : +Error: Job is stopped by ERROR diff --git a/test/build-svr-17.testcase b/test/build-svr-17.testcase new file mode 100644 index 0000000..8f72321 --- /dev/null +++ b/test/build-svr-17.testcase @@ -0,0 +1,13 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +../build-svr add-os -n testserver3 -o linux +cat ~/.build_tools/build_server/testserver3/supported_os_list +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Target OS is added successfully! +linux diff --git a/test/build-svr-18.testcase b/test/build-svr-18.testcase new file mode 100644 index 0000000..21beedd --- /dev/null +++ b/test/build-svr-18.testcase @@ -0,0 +1,13 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +#EXEC +../build-svr add-os -n testserver3 -o linux +../build-svr add-os -n testserver3 -o linux +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Target OS is added successfully! +Target OS already exists in list! diff --git a/test/build-svr-19.testcase b/test/build-svr-19.testcase new file mode 100644 index 0000000..3146917 --- /dev/null +++ b/test/build-svr-19.testcase @@ -0,0 +1,16 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +../build-svr add-os -n testserver3 -o linux +../build-svr add-os -n testserver3 -o windows +#EXEC +../build-svr add-prj -n testserver3 -N new_project -g new_git -b new_branch -o wrong_os_name +#POST-EXEC +../build-svr remove -n testserver3 +rm -rf buildsvr01 +#EXPECT +Unsupported OS name "wrong_os_name" is used! +Check the following supported OS list: + * linux + * windows diff --git a/test/build-svr-20.testcase b/test/build-svr-20.testcase new file mode 100644 index 0000000..460e079 --- /dev/null +++ b/test/build-svr-20.testcase @@ -0,0 +1,20 @@ +#PRE-EXEC +rm -rf buildsvr01 +mkdir buildsvr01 +cd buildsvr01; ../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://dibsftp:coreps2@172.21.111.132 +../build-svr add-os -n testserver3 -o linux +cp bin/bin_0.0.0_linux.zip bin/bin_0.0.0_wrongosname.zip +../build-svr start -n testserver3 -p 2223 & +#EXEC +sleep 1 +../build-svr register -n testserver3 -P bin/bin_0.0.0_wrongosname.zip +#POST-EXEC +../build-svr stop -n testserver3 +sleep 1 +../build-svr remove -n testserver3 +rm -rf buildsvr01 +rm -rf bin/bin/bin_0.0.0_wrongosname.zip +#EXPECT +Info: Initializing job... +Error: Unsupported OS "wrongosname" is used! +Error: Job is stopped by ERROR diff --git a/test/buildcli.testsuite b/test/buildcli.testsuite index 06947c4..9904eb3 100644 --- a/test/buildcli.testsuite +++ b/test/buildcli.testsuite @@ -1,2 +1,30 @@ build-cli-01.testcase build-cli-02.testcase +build-cli-03.testcase +build-cli-04.testcase +build-cli-05.testcase +build-cli-06.testcase +build-cli-07.testcase +build-cli-08.testcase +build-cli-09.testcase +build-cli-10.testcase +build-cli-11.testcase +build-cli-12.testcase +build-cli-12_1.testcase +build-cli-13.testcase +build-cli-14.testcase +build-cli-15.testcase +build-cli-16.testcase +build-cli-17.testcase +build-cli-18.testcase +build-cli-19.testcase +build-cli-20.testcase +build-cli-21.testcase +build-cli-22.testcase +build-cli-23.testcase +build-cli-24.testcase +build-cli-25.testcase +build-cli-26.testcase +build-cli-27.testcase +build-cli-28.testcase +build-cli-29.testcase diff --git a/test/buildserver.testsuite b/test/buildserver.testsuite index f908bff..d3b6e7a 100644 --- a/test/buildserver.testsuite +++ b/test/buildserver.testsuite @@ -1,6 +1,18 @@ -buildserver01.testcase -buildserver02.testcase -buildserver03.testcase -buildserver04.testcase -buildserver05.testcase -buildserver06.testcase +build-svr-01.testcase +build-svr-02.testcase +build-svr-03.testcase +build-svr-04.testcase +build-svr-05.testcase +build-svr-06.testcase +build-svr-07.testcase +build-svr-08.testcase +build-svr-09.testcase +build-svr-10.testcase +build-svr-11.testcase +build-svr-12.testcase +build-svr-13.testcase +build-svr-14.testcase +build-svr-17.testcase +build-svr-18.testcase +build-svr-19.testcase +build-svr-20.testcase diff --git a/test/buildserver01.testcase b/test/buildserver01.testcase deleted file mode 100644 index fa32179..0000000 --- a/test/buildserver01.testcase +++ /dev/null @@ -1,11 +0,0 @@ -#PRE-EXEC -rm -rf ~/.build_tools/build_server/testserver3 -rm -rf buildsvr01 -mkdir buildsvr01 -#EXEC -cd buildsvr01; ../../build-svr create -n testserver3 -u http://172.21.111.132/testserver3/unstable -d pkgserver -i testserver3 -#POST-EXEC -../build-svr remove -n testserver3 -rm -rf buildsvr01 -#EXPECT -reated new build server: "testserver3" diff --git a/test/buildserver02.testcase b/test/buildserver02.testcase deleted file mode 100644 index e490e82..0000000 --- a/test/buildserver02.testcase +++ /dev/null @@ -1,18 +0,0 @@ -#PRE-EXEC -#EXEC -../build-svr -h -#POST-EXEC -#EXPECT -Usage: build-svr {create|remove|start|build|help} ... - build-svr create -n -u -d -i - build-svr remove -n - build-svr start -n [-p - build-svr add -n [-d -p ] - -n, --name build server name - -u, --url package server URL: http://xxx/yyy/zzz - -d package svr or friend svr ip or ssh alias - --domain - -i, --id package server id - -p, --port port - -h, --help display this information diff --git a/test/buildserver03.testcase b/test/buildserver03.testcase deleted file mode 100644 index de131bf..0000000 --- a/test/buildserver03.testcase +++ /dev/null @@ -1,13 +0,0 @@ -#PRE-EXEC -rm -rf buildsvr01 -mkdir buildsvr01 -cd buildsvr01; ../../build-svr create -n testserver3 -u http://172.21.111.132/testserver3/unstable -d pkgserver -i testserver3 -#EXEC -../build-svr add -n testserver3 -d 127.0.0.1 -p 2223 -cat ~/.build_tools/build_server/testserver3/friends -#POST-EXEC -../build-svr remove -n testserver3 -rm -rf buildsvr01 -#EXPECT -Friend server is added successfully! -127.0.0.1,2223 diff --git a/test/buildserver04.testcase b/test/buildserver04.testcase deleted file mode 100644 index bdad4d1..0000000 --- a/test/buildserver04.testcase +++ /dev/null @@ -1,12 +0,0 @@ -#PRE-EXEC -rm -rf buildsvr01 -mkdir buildsvr01 -cd buildsvr01; ../../build-svr create -n testserver3 -u http://172.21.111.132/emptyserver/unstable -d pkgserver -i emptyserver -#EXEC -echo "TEST_TIME=3" >> ~/.build_tools/build_server/testserver3/server.cfg -../build-svr start -n testserver3 -p 2223 -#POST-EXEC -../build-svr remove -n testserver3 -rm -rf buildsvr01 -#EXPECT -Test time is elapsed! diff --git a/test/buildserver05.testcase b/test/buildserver05.testcase deleted file mode 100644 index d15c978..0000000 --- a/test/buildserver05.testcase +++ /dev/null @@ -1,14 +0,0 @@ -#PRE-EXEC -rm -rf buildsvr01 -mkdir buildsvr01 -cd buildsvr01; ../../build-svr create -n testserver3 -u http://172.21.111.132/emptyserver/unstable -d pkgserver -i emptyserver -../build-svr start -n testserver3 -p 2223 & -#EXEC -sleep 2 -../build-svr stop -n testserver3 -sleep 1 -#POST-EXEC -../build-svr remove -n testserver3 -rm -rf buildsvr01 -#EXPECT -Server will be down! diff --git a/test/buildserver06.testcase b/test/buildserver06.testcase deleted file mode 100644 index d95913d..0000000 --- a/test/buildserver06.testcase +++ /dev/null @@ -1,10 +0,0 @@ -#PRE-EXEC -mkdir buildsvr01 -cd buildsvr01; ../../build-svr create -n testserver3 -u http://172.21.111.132/emptyserver/unstable -d pkgserver -i emptyserver -#EXEC -../build-svr stop -n testserver3 -#POST-EXEC -../build-svr remove -n testserver3 -rm -rf buildsvr01 -#EXPECT -Server is not running! diff --git a/test/buildsvr.init b/test/buildsvr.init new file mode 100644 index 0000000..5b294f6 --- /dev/null +++ b/test/buildsvr.init @@ -0,0 +1,32 @@ +#!/bin/sh +rm -rf buildsvr01 +rm -rf ~/.build_tools/build_server/testserver3 +mkdir buildsvr01 +cd buildsvr01 +../../build-svr remove -n testserver3 +../../build-svr create -n testserver3 -u `pwd`/../pkgsvr01/unstable -d 127.0.0.1:3333 -t ftp://ftpuser:ftpuser@127.0.0.1 +../../build-svr add-svr -n testserver3 -d 127.0.0.1:2224 +../../build-svr add-svr -n testserver3 -u `pwd`/../pkgsvr02/unstable +cd .. +cd git01 +rm -rf a +rm -rf a1 +rm -rf b +rm -rf c +rm -rf d +tar xvf a_v1.tar.gz +tar xvf b_v1.tar.gz +tar xvf c_v1.tar.gz +tar xvf d_v0.tar.gz +tar xvf a1_v1.tar.gz +cd .. +../build-svr add-os -n testserver3 -o ubuntu-32 +../build-svr add-os -n testserver3 -o windows-32 +../build-svr add-prj -n testserver3 -N testa -g `pwd`/git01/a -b master +../build-svr add-prj -n testserver3 -N testb -g `pwd`/git01/b -b master +../build-svr add-prj -n testserver3 -N testc -g `pwd`/git01/c -b master -w 1111 +../build-svr add-prj -n testserver3 -N testd -g `pwd`/git01/d -b master -o ubuntu-32 +../build-svr add-prj -n testserver3 -N teste -P bin +../build-svr add-prj -n testserver3 -N testa1 -g `pwd`/git01/a1 -b master +../pkg-svr register -n pkgsvr01 -d unstable -P bin/bin_0.0.0_ubuntu-32.zip +../build-svr start -n testserver3 -p 2223 diff --git a/test/c/c b/test/c/c deleted file mode 100644 index 16fc679..0000000 --- a/test/c/c +++ /dev/null @@ -1 +0,0 @@ -ca diff --git a/test/c/package/build.linux b/test/c/package/build.linux deleted file mode 100755 index b774e2d..0000000 --- a/test/c/package/build.linux +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -e - -clean () -{ - rm -rf $SRCDIR/*.zip - rm -rf $SRCDIR/*.tar.gz -} - -build () -{ - echo "C: clean build (no dependency) ok" -} - -install () -{ - mkdir -p $SRCDIR/package/c.package.linux/data - cp $SRCDIR/c $SRCDIR/package/c.package.linux/data -} - -$1 -echo "$1 success" diff --git a/test/c/package/pkginfo.manifest b/test/c/package/pkginfo.manifest deleted file mode 100644 index ede7a91..0000000 --- a/test/c/package/pkginfo.manifest +++ /dev/null @@ -1,6 +0,0 @@ -Package: c -Version: 11 -OS: linux -Build-host-os: linux -Maintainer: xx -Source: c diff --git a/test/git01/a.tar.gz b/test/git01/a.tar.gz new file mode 100644 index 0000000..be4b4ef Binary files /dev/null and b/test/git01/a.tar.gz differ diff --git a/test/git01/a1_v1.tar.gz b/test/git01/a1_v1.tar.gz new file mode 100644 index 0000000..b363c5a Binary files /dev/null and b/test/git01/a1_v1.tar.gz differ diff --git a/test/git01/a_new.tar.gz b/test/git01/a_new.tar.gz new file mode 100644 index 0000000..25a6ba2 Binary files /dev/null and b/test/git01/a_new.tar.gz differ diff --git a/test/git01/a_v1.tar.gz b/test/git01/a_v1.tar.gz new file mode 100644 index 0000000..049b900 Binary files /dev/null and b/test/git01/a_v1.tar.gz differ diff --git a/test/git01/a_v2.tar.gz b/test/git01/a_v2.tar.gz new file mode 100644 index 0000000..6a82258 Binary files /dev/null and b/test/git01/a_v2.tar.gz differ diff --git a/test/git01/a_v3.tar.gz b/test/git01/a_v3.tar.gz new file mode 100644 index 0000000..7915022 Binary files /dev/null and b/test/git01/a_v3.tar.gz differ diff --git a/test/git01/a_v4.tar.gz b/test/git01/a_v4.tar.gz new file mode 100644 index 0000000..25f7b3b Binary files /dev/null and b/test/git01/a_v4.tar.gz differ diff --git a/test/git01/a_v5.tar.gz b/test/git01/a_v5.tar.gz new file mode 100644 index 0000000..9b52084 Binary files /dev/null and b/test/git01/a_v5.tar.gz differ diff --git a/test/git01/b.tar.gz b/test/git01/b.tar.gz new file mode 100644 index 0000000..6d263a1 Binary files /dev/null and b/test/git01/b.tar.gz differ diff --git a/test/git01/b_new.tar.gz b/test/git01/b_new.tar.gz new file mode 100644 index 0000000..2482a1b Binary files /dev/null and b/test/git01/b_new.tar.gz differ diff --git a/test/git01/b_v1.tar.gz b/test/git01/b_v1.tar.gz new file mode 100644 index 0000000..96cb748 Binary files /dev/null and b/test/git01/b_v1.tar.gz differ diff --git a/test/git01/b_v2.tar.gz b/test/git01/b_v2.tar.gz new file mode 100644 index 0000000..42c1b1a Binary files /dev/null and b/test/git01/b_v2.tar.gz differ diff --git a/test/git01/b_v4.tar.gz b/test/git01/b_v4.tar.gz new file mode 100644 index 0000000..60ae7c1 Binary files /dev/null and b/test/git01/b_v4.tar.gz differ diff --git a/test/git01/c.tar.gz b/test/git01/c.tar.gz new file mode 100644 index 0000000..92ddfb7 Binary files /dev/null and b/test/git01/c.tar.gz differ diff --git a/test/git01/c_new.tar.gz b/test/git01/c_new.tar.gz new file mode 100644 index 0000000..10775b4 Binary files /dev/null and b/test/git01/c_new.tar.gz differ diff --git a/test/git01/c_v1.tar.gz b/test/git01/c_v1.tar.gz new file mode 100644 index 0000000..add7043 Binary files /dev/null and b/test/git01/c_v1.tar.gz differ diff --git a/test/git01/c_v1_1.tar.gz b/test/git01/c_v1_1.tar.gz new file mode 100644 index 0000000..01177c7 Binary files /dev/null and b/test/git01/c_v1_1.tar.gz differ diff --git a/test/git01/c_v2.tar.gz b/test/git01/c_v2.tar.gz new file mode 100644 index 0000000..f8d8516 Binary files /dev/null and b/test/git01/c_v2.tar.gz differ diff --git a/test/git01/c_v4.tar.gz b/test/git01/c_v4.tar.gz new file mode 100644 index 0000000..b108b4a Binary files /dev/null and b/test/git01/c_v4.tar.gz differ diff --git a/test/git01/c_v5.tar.gz b/test/git01/c_v5.tar.gz new file mode 100644 index 0000000..a682581 Binary files /dev/null and b/test/git01/c_v5.tar.gz differ diff --git a/test/git01/d.tar.gz b/test/git01/d.tar.gz new file mode 100644 index 0000000..097b1f6 Binary files /dev/null and b/test/git01/d.tar.gz differ diff --git a/test/git01/d_v0.tar.gz b/test/git01/d_v0.tar.gz new file mode 100644 index 0000000..c0a3a6e Binary files /dev/null and b/test/git01/d_v0.tar.gz differ diff --git a/test/packageserver.testsuite b/test/packageserver.testsuite index d63bc7e..7fe78d7 100644 --- a/test/packageserver.testsuite +++ b/test/packageserver.testsuite @@ -1,5 +1,6 @@ packageserver01.testcase packageserver02.testcase +packageserver24.testcase packageserver03.testcase packageserver04.testcase packageserver05.testcase @@ -7,9 +8,7 @@ packageserver06.testcase packageserver07.testcase packageserver08.testcase packageserver09.testcase -packageserver10.testcase packageserver11.testcase -packageserver12.testcase packageserver13.testcase packageserver14.testcase packageserver15.testcase diff --git a/test/packageserver01.testcase b/test/packageserver01.testcase index d0adec5..f1c411e 100644 --- a/test/packageserver01.testcase +++ b/test/packageserver01.testcase @@ -3,30 +3,60 @@ ../pkg-svr -h #POST-EXEC #EXPECT -Usage: pkg-svr {create|register|gen-snapshot|sync|add-dist|spkg-path|remove|remove-pkg|list|help} ... - pkg-svr create -i -d [-u ] [-l ] - pkg-svr add-dist -i -d [-u ] [-c] - pkg-svr remove -i - pkg-svr register -i -d -p -s [-g] [-t] - pkg-svr remove-pkg -i -d -p - pkg-svr gen-snapshot -i -d [-n ] [-b ] [-p ] - pkg-svr sync -i -d [-f] - pkg-svr spkg-path -i -d -s - pkg-svr list [-i ] - -i, --id package server id - -d, --dist package server distribution - -u, --url remote server address - -o, --os target operating system - -p - --bpackage binary package file path list - -s - --spackage source package file path - -g, --generate snapshot is generate - -n, --sname snapshot name - -b base snapshot name - --bsnapshot - -l, --location server location - -f, --force force update pkg file - -t, --test upload for test - -c, --clone clone mode - -h, --help display this information +Package-server administer service command-line tool. + +Usage: pkg-svr [OPTS] or pkg-svr -h + +Subcommands: +create Create a package-server. +add-dist Add a distribution to package-server. +register Register a package in package-server. +remove Remove a package-server. +remove-dist Remove a distribution to package-server. +remove-snapshot Remove a snapshot in package-server. +gen-snapshot Generate a snapshot in package-server. +sync Synchronize the package-server from parent package server. +start Start the package-server. +stop Stop the package-server. +clean Delete unneeded package files in package-server. +list Show all pack + +Subcommand usage: +pkg-svr create -n -d [-u ] [-l ] +pkg-svr add-dist -n -d [-u ] [--clone] +pkg-svr add-os -n -d -o +pkg-svr register -n -d -P [--gen] [--test] +pkg-svr link -n -d --origin-pkg-name --origin-pkg-os --link-os-list +pkg-svr remove -n +pkg-svr remove-dist -n -d +pkg-svr remove-pkg -n -d -P [-o ] +pkg-svr remove-snapshot -n -d -s +pkg-svr gen-snapshot -n -d -s [-b ] +pkg-svr sync -n -d [--force] +pkg-svr clean -n -d [-s ] +pkg-svr start -n -p +pkg-svr stop -n -p +pkg-svr list [-n ] + +Options: +-n, --name package server name +-d, --dist package server distribution +-u, --url remote server url: http://127.0.0.1/dibs/unstable +-o, --os target operating system +-P, --pkgs package file path list +-s, --snapshot a snapshot name or snapshot list +-b, --base base snapshot name +-l, --loc server location +-p, --port port number + --clone clone mode + --force force update pkg file + --test upload for test + --gen generate snapshot + --origin-pkg-name + origin package name + --origin-pkg-os + origin package os + --link-os-list + target os list to link origin file +-h, --help display help +-v, --version display version diff --git a/test/packageserver02.testcase b/test/packageserver02.testcase index dd533bd..408ca26 100644 --- a/test/packageserver02.testcase +++ b/test/packageserver02.testcase @@ -1,6 +1,7 @@ #PRE-EXEC +../pkg-svr remove -n temp_local --force #EXEC -../pkg-svr create -i temp_local -d unstable +../pkg-svr create -n temp_local -d unstable #POST-EXEC #EXPECT package server [temp_local] created successfully diff --git a/test/packageserver03.testcase b/test/packageserver03.testcase index e858351..11eb774 100644 --- a/test/packageserver03.testcase +++ b/test/packageserver03.testcase @@ -1,6 +1,8 @@ #PRE-EXEC +../pkg-svr remove -n temp_remote --force #EXEC -../pkg-svr create -i temp_remote -d unstable -u http://172.21.111.177/tmppkgsvr/tmp +../pkg-svr create -n temp_remote -d unstable -u http://172.21.111.177/tmppkgsvr/tmp #POST-EXEC #EXPECT +snapshot is generated : package server [temp_remote] created successfully diff --git a/test/packageserver04.testcase b/test/packageserver04.testcase index f2539ca..3be6257 100644 --- a/test/packageserver04.testcase +++ b/test/packageserver04.testcase @@ -1,6 +1,7 @@ #PRE-EXEC +../pkg-svr remove -n temp_remote_dup --force #EXEC -../pkg-svr create -i temp_remote_dup -d unstable -u temp_remote/unstable +../pkg-svr create -n temp_remote_dup -d unstable -u temp_remote/unstable #POST-EXEC #EXPECT package server [temp_remote_dup] created successfully diff --git a/test/packageserver05.testcase b/test/packageserver05.testcase index a66a040..da0ad18 100644 --- a/test/packageserver05.testcase +++ b/test/packageserver05.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr add-dist -i temp_local -d stable +../pkg-svr add-dist -n temp_local -d stable #POST-EXEC #EXPECT distribution [stable] added successfully diff --git a/test/packageserver06.testcase b/test/packageserver06.testcase index 9f9e917..cf49f46 100644 --- a/test/packageserver06.testcase +++ b/test/packageserver06.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr sync -i temp_remote -d unstable +../pkg-svr sync -n temp_remote -d unstable #POST-EXEC #EXPECT -package server [temp_remote]'s distribution [unstable] has the synchronization. +package server [temp_remote]'s distribution [unstable] has been synchronized. diff --git a/test/packageserver07.testcase b/test/packageserver07.testcase index 1b7d84c..13bec0c 100644 --- a/test/packageserver07.testcase +++ b/test/packageserver07.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr sync -i temp_remote_dup -d unstable -f +../pkg-svr sync -n temp_remote_dup -d unstable --force #POST-EXEC #EXPECT -package server [temp_remote_dup]'s distribution [unstable] has the synchronization. +package server [temp_remote_dup]'s distribution [unstable] has been synchronized. diff --git a/test/packageserver08.testcase b/test/packageserver08.testcase index 9ecc71a..db9a935 100644 --- a/test/packageserver08.testcase +++ b/test/packageserver08.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr gen-snapshot -i temp_remote +../pkg-svr gen-snapshot -n temp_remote -s snap01 #POST-EXEC #EXPECT snapshot is generated : diff --git a/test/packageserver09.testcase b/test/packageserver09.testcase index 547b101..d8632bf 100644 --- a/test/packageserver09.testcase +++ b/test/packageserver09.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr gen-snapshot -i temp_remote -d unstable +../pkg-svr gen-snapshot -n temp_remote -d unstable -s snap02 #POST-EXEC #EXPECT snapshot is generated : diff --git a/test/packageserver10.testcase b/test/packageserver10.testcase index 34ca9b6..7a5fcb2 100644 --- a/test/packageserver10.testcase +++ b/test/packageserver10.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr gen-snapshot -i temp_remote -d unstable -n test +../pkg-svr gen-snapshot -n temp_remote -d unstable -s test #POST-EXEC #EXPECT snapshot is generated : diff --git a/test/packageserver11.testcase b/test/packageserver11.testcase index a7ba031..247141f 100644 --- a/test/packageserver11.testcase +++ b/test/packageserver11.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr gen-snapshot -i temp_remote -d unstable -n test2 -b test +../pkg-svr gen-snapshot -n temp_remote -d unstable -s snap03 -b snap01 #POST-EXEC #EXPECT snapshot is generated : diff --git a/test/packageserver12.testcase b/test/packageserver12.testcase index e5db467..092eb4e 100644 --- a/test/packageserver12.testcase +++ b/test/packageserver12.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr gen-snapshot -i temp_remote -d unstable -o all -p test_server_pkg_file/smart-build-interface_1.20.1_linux.zip -n test3 +../pkg-svr gen-snapshot -n temp_remote -d unstable -s test3 #POST-EXEC #EXPECT snapshot is generated : diff --git a/test/packageserver13.testcase b/test/packageserver13.testcase index 017edb2..ae8c629 100644 --- a/test/packageserver13.testcase +++ b/test/packageserver13.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr create -i temp_remote_snap -d unstable -u temp_remote/unstable/snapshots/test +../pkg-svr create -n temp_remote_snap -d unstable -u temp_remote/unstable/snapshots/snap01 #POST-EXEC #EXPECT package server [temp_remote_snap] created successfully diff --git a/test/packageserver14.testcase b/test/packageserver14.testcase index 62d6676..06bdd06 100644 --- a/test/packageserver14.testcase +++ b/test/packageserver14.testcase @@ -1,7 +1,7 @@ #PRE-EXEC cp test_server_pkg_file/smart-build-interface* ./ #EXEC -../pkg-svr register -i temp_remote -d unstable -p smart-build-interface_1.20.1_linux.zip -s smart-build-interface_1.20.1.tar.gz -g +../pkg-svr register -n temp_remote -d unstable -P smart-build-interface_1.20.1_linux.zip --gen #POST-EXEC #EXPECT package registed successfully diff --git a/test/packageserver15.testcase b/test/packageserver15.testcase index 351d6aa..af34b96 100644 --- a/test/packageserver15.testcase +++ b/test/packageserver15.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr register -i temp_remote -d unstable -p smart-build-interface_1.20.1_linux.zip -s smart-build-interface_1.20.1.tar.gz -g +../pkg-svr register -n temp_remote -d unstable -P smart-build-interface_1.20.1_linux.zip --gen #POST-EXEC #EXPECT existing package's version is higher than register package diff --git a/test/packageserver16.testcase b/test/packageserver16.testcase index a70d250..4d09776 100644 --- a/test/packageserver16.testcase +++ b/test/packageserver16.testcase @@ -1,7 +1,7 @@ #PRE-EXEC cp test_server_pkg_file/smart-build-interface* ./ #EXEC -../pkg-svr register -i temp_remote_dup -d unstable -p ./temp_remote/unstable/binary/smart-build-interface_1.20.1_linux.zip -s ./temp_remote/unstable/source/smart-build-interface_1.20.1.tar.gz -g -t +../pkg-svr register -n temp_remote_dup -d unstable -P ./temp_remote/unstable/binary/smart-build-interface_1.20.1_linux.zip --gen --test #POST-EXEC #EXPECT package registed successfully diff --git a/test/packageserver17.testcase b/test/packageserver17.testcase index 75ca498..ad1549b 100644 --- a/test/packageserver17.testcase +++ b/test/packageserver17.testcase @@ -1,7 +1,7 @@ #PRE-EXEC cp test_server_pkg_file/smart-build-interface* ./ #EXEC -../pkg-svr remove-pkg -i temp_local -d unstable -p smart-build-interface +../pkg-svr remove-pkg -n temp_local -d unstable -P smart-build-interface #POST-EXEC #EXPECT package removed successfully diff --git a/test/packageserver19.testcase b/test/packageserver19.testcase index d144662..cd5b36b 100644 --- a/test/packageserver19.testcase +++ b/test/packageserver19.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr list -i temp_local +../pkg-svr list -n temp_local #POST-EXEC rm smart-build-interface_1.20.1* #EXPECT diff --git a/test/packageserver20.testcase b/test/packageserver20.testcase index 4747a48..7d47e52 100644 --- a/test/packageserver20.testcase +++ b/test/packageserver20.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr remove -i temp_local -f +../pkg-svr remove -n temp_local --force #POST-EXEC YES #EXPECT diff --git a/test/packageserver21.testcase b/test/packageserver21.testcase index 2fc8eb9..1ae0b53 100644 --- a/test/packageserver21.testcase +++ b/test/packageserver21.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr remove -i temp_remote -f +../pkg-svr remove -n temp_remote --force #POST-EXEC YES #EXPECT diff --git a/test/packageserver22.testcase b/test/packageserver22.testcase index f63decb..3dad192 100644 --- a/test/packageserver22.testcase +++ b/test/packageserver22.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr remove -i temp_remote_dup -f +../pkg-svr remove -n temp_remote_dup --force #POST-EXEC YES #EXPECT diff --git a/test/packageserver23.testcase b/test/packageserver23.testcase index c92b0b9..6e37f2f 100644 --- a/test/packageserver23.testcase +++ b/test/packageserver23.testcase @@ -1,6 +1,6 @@ #PRE-EXEC #EXEC -../pkg-svr remove -i temp_remote_snap -f +../pkg-svr remove -n temp_remote_snap --force #POST-EXEC YES #EXPECT diff --git a/test/packageserver24.testcase b/test/packageserver24.testcase new file mode 100644 index 0000000..2657bab --- /dev/null +++ b/test/packageserver24.testcase @@ -0,0 +1,8 @@ +#PRE-EXEC +#EXEC +../pkg-svr add-os -n temp_local -d unstable -o ubuntu-32 +#POST-EXEC +../pkg-svr add-os -n temp_local -d unstable -o windows-32 +#EXPECT +snapshot is generated : +package server add os [ubuntu-32] successfully diff --git a/test/packageserver25.testcase b/test/packageserver25.testcase new file mode 100644 index 0000000..e399ad8 --- /dev/null +++ b/test/packageserver25.testcase @@ -0,0 +1,6 @@ +#PRE-EXEC +#EXEC +../pkg-svr link -n temp_local -d unstable --origin-pkg-name smart-build-interface --origin-pkg-os ubuntu-10.04-32 --link-os-list windows-7-32 +#POST-EXEC +#EXPECT +package linked successfully diff --git a/test/pkg-cli-download.testcase b/test/pkg-cli-download.testcase index e4f6b2c..f4e41d1 100644 --- a/test/pkg-cli-download.testcase +++ b/test/pkg-cli-download.testcase @@ -7,4 +7,4 @@ ls pkgcli01 #POST-EXEC rm -rf pkgcli01 #EXPECT -base-ide-product_0.20.8_linux.zip +base-ide-product_1.0.2_linux.zip diff --git a/test/pkg-cli-listrpkg.testcase b/test/pkg-cli-listrpkg.testcase index 6a74ae7..84bf751 100644 --- a/test/pkg-cli-listrpkg.testcase +++ b/test/pkg-cli-listrpkg.testcase @@ -3,4 +3,4 @@ ../pkg-cli list-rpkg -u http://172.21.111.132/testserver3/unstable #POST-EXEC #EXPECT -base-ide-product (0.20.8) +base-ide-product (1.0.2) diff --git a/test/pkg-cli-showrpkg.testcase b/test/pkg-cli-showrpkg.testcase index 443aa3e..fc5cf62 100644 --- a/test/pkg-cli-showrpkg.testcase +++ b/test/pkg-cli-showrpkg.testcase @@ -4,5 +4,5 @@ #POST-EXEC #EXPECT Package : base-ide-product -Version : 0.20.8 +Version : 1.0.2 OS : linux diff --git a/test/pkg-list b/test/pkg-list index cf64628..74d4baa 100644 --- a/test/pkg-list +++ b/test/pkg-list @@ -1,24 +1,23 @@ -Package : A +Source : Origin Version : 0.1.0 +Maintainer : taejun.ha + +Package : A OS : linux +C-test : test Build-host-os :linux | windows | darwin -Maintainer : taejun.ha Path : binary/A_0.1.0_linux.zip +C-commic : ask Origin : remote -SHA256 : 52b400554f2a29dec46144af649181cf287c000b4feb65de72055ed9f11924a9 +C-origin : kkk Package: B -Version : 0.2.0 OS : linux Build-host-os :linux | windows | darwin -Maintainer : taejun.ha Install-dependency : C, D, E Build-dependency : F (>= 1.0.0.20101221), E (>= 1.0.0.20101221) Source-dependency : D, scratchbox-aquila-simulator-rootstrap [ linux |windows ](>= 1.0.0.20101221), scratchbox-core [windows|darwin](>= 1.0.17) Path : -Source : Origin -From-server? : true SHA256 : your_checksum Description : this is my first -project -descriotion +C-kim : oks diff --git a/test/pkg-list-local b/test/pkg-list-local new file mode 100644 index 0000000..ce331b6 --- /dev/null +++ b/test/pkg-list-local @@ -0,0 +1,17 @@ +Include : pkg-list + +Package : A +OS : windows +Build-host-os :linux | windows | darwin +Path : binary/A_0.1.0_linux.zip +Origin : remote + +Package: B +OS : windows +Build-host-os :linux | windows | darwin +Install-dependency : C, D, E +Build-dependency : F (>= 1.0.0.20101221), E (>= 1.0.0.20101221) +Source-dependency : D, scratchbox-aquila-simulator-rootstrap [ linux |windows ](>= 1.0.0.20101221), scratchbox-core [windows|darwin](>= 1.0.17) +Path : +SHA256 : your_checksum +Description : this is my first diff --git a/test/pkgsvr.init b/test/pkgsvr.init new file mode 100644 index 0000000..96b4f3f --- /dev/null +++ b/test/pkgsvr.init @@ -0,0 +1,7 @@ +#!/bin/sh +rm -rf ~/.build_tools/pkg_server/pkgsvr01 +rm -rf `pwd`/pkgsvr01 +ruby -d ../pkg-svr create -n pkgsvr01 -d unstable +ruby -d ../pkg-svr add-os -n pkgsvr01 -d unstable -o ubuntu-32 +ruby -d ../pkg-svr add-os -n pkgsvr01 -d unstable -o windows-32 +ruby -d ../pkg-svr start -n pkgsvr01 -p 3333 diff --git a/test/pkgsvr2.init b/test/pkgsvr2.init new file mode 100644 index 0000000..e97681c --- /dev/null +++ b/test/pkgsvr2.init @@ -0,0 +1,7 @@ +#!/bin/sh +rm -rf ~/.build_tools/pkg_server/pkgsvr02 +rm -rf `pwd`/pkgsvr02 +ruby -d ../pkg-svr create -n pkgsvr02 -d unstable +ruby -d ../pkg-svr add-os -n pkgsvr02 -d unstable -o ubuntu-32 +ruby -d ../pkg-svr add-os -n pkgsvr02 -d unstable -o windows-32 +ruby -d ../pkg-svr start -n pkgsvr02 -p 4444 diff --git a/test/regression.rb b/test/regression.rb index 7085799..177cf4d 100755 --- a/test/regression.rb +++ b/test/regression.rb @@ -22,15 +22,13 @@ class TestCase end def is_succeeded?(results) + i = 0 @expected_results.each do |e| found = false - results.each do |r| - if r.include? e then - found = true - break - end + if not results[i].include? e then + return false end - if not found then return false end + i += 1 end return true @@ -88,12 +86,19 @@ def execute( file_name ) cmd = cmd[0..-2] fork_p = true end + # get result if not fork_p then IO.popen("#{cmd} 2>&1") { |io| + # io.each do |line| + # puts "---> #{line}" + # end } else IO.popen("#{cmd} 2>&1 &") { |io| + # io.each do |line| + # puts "---> #{line}" + # end } end #`#{cmd}` @@ -102,12 +107,36 @@ def execute( file_name ) # exec results = [] tcase.exec_cmds.each do |cmd| + fork_p = false + hidden_p = false + if cmd[-1,1] == "&" then + cmd = cmd[0..-2] + fork_p = true + end + if cmd[0,1] == ">" then + cmd = cmd[1..-1] + hidden_p = true + end # get result - IO.popen("#{cmd} 2>&1") { |io| - io.each do |line| - results.push line.strip - end - } + if fork_p then + IO.popen("#{cmd} 2>&1 &") { |io| + io.each do |line| + if not hidden_p then + results.push line.strip + #puts "---> #{line}" + end + end + } + else + IO.popen("#{cmd} 2>&1") { |io| + io.each do |line| + if not hidden_p then + results.push line.strip + #puts "---> #{line}" + end + end + } + end end # check expected result diff --git a/test/test_bserver2c.rb b/test/test_bserver2c.rb deleted file mode 100755 index 0d59deb..0000000 --- a/test/test_bserver2c.rb +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/ruby - -require File.dirname(__FILE__) + "/../src/build_server/BuildServerController" - -BuildServerController.build_git("mbs_server","gerrithost:/slp/sdk/public/native/toolchain/smart-build-interface","origin/unstable","linux", nil) - -#BuildServerController.build_local("temp","/home/bluleo78/git/sbi-slp-public-plugin/toolchains/public/gdb_build","linux") -=begin -#case ARGV[0] -# when "create" then -# pkg_server.create "temp", "unstable", "http://172.21.111.132/pkgserver/", "unstable" -# when "register" then -# #pkg_server.register "/home/taejun/project/sdk-build/test/smart-build-interface_0.19.1_linux.zip", "unstable", "-g" -# pkg_server.register "/home/taejun/project/sdk-build/test/smart-build-interface_0.19.1_linux.zip", "unstable", "" -# when "snapshot" then - pkg_server.snapshot_generate "", "unstable", "", "", "" - when "sync" then - # pkg_server.sync "unstable", "force" - pkg_server.sync "unstable", "" - when "add_distribution" then - pkg_server.add_distribution "test_stable", "stable" - else - puts "First input error : #{ARGV[0]}" -end -=end diff --git a/test/test_bserver3c.rb b/test/test_bserver3c.rb deleted file mode 100755 index 28b1564..0000000 --- a/test/test_bserver3c.rb +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/ruby - -require File.dirname(__FILE__) + "/../src/common/utils" -require File.dirname(__FILE__) + "/../src/build_server/BuildServerController" -$SERVER_CONFIG_ROOT = Utils::HOME + "/.tizen_build_server" - -#BuildServerController.build_git("temp","gerrithost:/slp/sdk/public/native/toolchain/smart-build-interface","origin/unstable","linux") -sleep 5 - -BuildServerController.build_local("temp","/home/bluleo78/git/sbi-slp-public-plugin/toolchains/public/gdb_build","linux","/home/bluleo78/test/test/unstable") -=begin -#case ARGV[0] -# when "create" then -# pkg_server.create "temp", "unstable", "http://172.21.111.132/pkgserver/", "unstable" -# when "register" then -# #pkg_server.register "/home/taejun/project/sdk-build/test/smart-build-interface_0.19.1_linux.zip", "unstable", "-g" -# pkg_server.register "/home/taejun/project/sdk-build/test/smart-build-interface_0.19.1_linux.zip", "unstable", "" -# when "snapshot" then - pkg_server.snapshot_generate "", "unstable", "", "", "" - when "sync" then - # pkg_server.sync "unstable", "force" - pkg_server.sync "unstable", "" - when "add_distribution" then - pkg_server.add_distribution "test_stable", "stable" - else - puts "First input error : #{ARGV[0]}" -end -=end diff --git a/test/test_pkglist_parser.rb b/test/test_pkglist_parser.rb index c299f7f..767dc8c 100755 --- a/test/test_pkglist_parser.rb +++ b/test/test_pkglist_parser.rb @@ -2,8 +2,7 @@ require '../src/common/parser' require '../src/common/package' -alist = Parser.read_pkg_list "pkg-list" -a_list = alist.values -a_list.each do |l| +alist = Parser.read_multy_pkginfo_from "pkg-list-local" +alist.each do |l| l.print -end +end diff --git a/test/test_server b/test/test_server index cb17835..c3f18a0 100755 --- a/test/test_server +++ b/test/test_server @@ -1,63 +1,66 @@ #!/bin/sh echo "============ remove 1 ==============" -../pkg-svr remove -i temp_local +../pkg-svr remove -n temp_local --force echo "============ remove 2 ==============" -../pkg-svr remove -i temp_remote +../pkg-svr remove -n temp_remote --force echo "============ remove 3 ==============" -../pkg-svr remove -i temp_remote_dup +../pkg-svr remove -n temp_remote_dup --force echo "============ remove 4 ==============" -../pkg-svr remove -i temp_remote_snap +../pkg-svr remove -n temp_remote_snap --force echo "============ create 1 ==============" -../pkg-svr create -i temp_local -d unstable +../pkg-svr create -n temp_local -d unstable echo "============ create 2 ==============" -../pkg-svr create -i temp_remote -d unstable -u http://172.21.17.55/dibs/unstable +../pkg-svr create -n temp_remote -d unstable -u http://172.21.17.55/private/develop echo "============ create 3 ==============" -../pkg-svr create -i temp_remote_dup -d unstable -u temp_remote/unstable +../pkg-svr create -n temp_remote_dup -d unstable -u temp_local/unstable echo "============ add dist 1 ==============" -../pkg-svr add-dist -i temp_local -d stable +../pkg-svr add-dist -n temp_local -d stable echo "============ sync 1 ==============" -../pkg-svr sync -i temp_remote -d unstable +../pkg-svr sync -n temp_remote -d unstable echo "============ sync 2 ==============" -../pkg-svr sync -i temp_remote_dup -d unstable -f +../pkg-svr sync -n temp_remote_dup -d unstable --force echo "============ gen snapshot 1 ==============" -../pkg-svr gen-snapshot -i temp_remote +../pkg-svr gen-snapshot -n temp_remote echo "============ gen snapshot 2 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable +../pkg-svr gen-snapshot -n temp_remote -d unstable echo "============ gen snapshot 3 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable -n test +../pkg-svr gen-snapshot -n temp_remote -d unstable -s test echo "============ gen snapshot 4 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable -n test2 -b test +../pkg-svr gen-snapshot -n temp_remote -d unstable -s test2 -b test echo "============ gen snapshot 5 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable -o linux +../pkg-svr gen-snapshot -n temp_remote -d unstable -o linux echo "============ gen snapshot 6 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable -o windows +../pkg-svr gen-snapshot -n temp_remote -d unstable -o windows echo "============ gen snapshot 7 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable -o darwin +../pkg-svr gen-snapshot -n temp_remote -d unstable -o darwin echo "============ gen snapshot 8 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable -o all +../pkg-svr gen-snapshot -n temp_remote -d unstable -o all echo "============ gen snapshot 9 ==============" -../pkg-svr gen-snapshot -i temp_remote -d unstable -o all -p test_server_pkg_file/smart-build-interface_1.20.1_linux.zip -n test3 +../pkg-svr gen-snapshot -n temp_remote -d unstable -o all -P test_server_pkg_file/smart-build-interface_1.20.1_linux.zip -s test3 echo "============ create 4 ==============" -../pkg-svr create -i temp_remote_snap -d unstable -u temp_remote/unstable/snapshots/test +../pkg-svr create -n temp_remote_snap -d unstable -u temp_remote/unstable/snapshots/test echo "============ register 1 ==============" -cp test_server_pkg_file/smart-build-interface* ./ -../pkg-svr register -i temp_remote -d unstable -p smart-build-interface_1.20.1_linux.zip -s smart-build-interface_1.20.1.tar.gz -g -echo "============ spkg path 1 ==============" -../pkg-svr spkg-path -i temp_remote -d unstable -s smart-build-interface_1.20.1.tar.gz +cp test_server_pkg_file/smart-build-interface_*_linux.zip ./ +../pkg-svr register -n temp_local -d unstable -P smart-build-interface_1.20.1_linux.zip echo "============ register 2 ==============" -../pkg-svr register -i temp_remote -d unstable -p smart-build-interface_1.20.1_linux.zip -s smart-build-interface_1.20.1.tar.gz -g +cp test_server_pkg_file/smart-build-interface_*_linux.zip ./ +../pkg-svr register -n temp_remote -d unstable -P smart-build-interface_1.20.1_linux.zip --gen echo "============ register 3 ==============" -../pkg-svr register -i temp_remote_dup -d unstable -p ./temp_remote/unstable/binary/smart-build-interface_1.20.1_linux.zip -s ./temp_remote/unstable/source/smart-build-interface_1.20.1.tar.gz -g -t +cp test_server_pkg_file/smart-build-interface_*_linux.zip ./ +../pkg-svr register -n temp_remote_dup -d unstable -P smart-build-interface_1.20.1_linux.zip --gen --test +echo "============ register 4 ==============" +cp test_server_pkg_file/archive.zip ./ +../pkg-svr register -n temp_local -d unstable -A archive.zip echo "============ remove 3 ==============" -../pkg-svr remove-pkg -i temp_local -d unstable -p smart-build-interface +../pkg-svr remove-pkg -n temp_local -d unstable -P smart-build-interface +echo "============ clean 1 ==============" +../pkg-svr clean -n temp_local -d unstable +echo "============ clean 2 ==============" +../pkg-svr clean -n temp_remote -d unstable -s test,test2,test3 echo "============ list 1 ==============" ../pkg-svr list echo "============ list 2 ==============" -../pkg-svr list -i temp_local -#../pkg-svr remove -i temp +../pkg-svr list -n temp_local +#../pkg-svr remove -n temp -#cleanup -rm smart-build-interface_1.20.1_windows.zip -rm smart-build-interface_1.20.1_linux.zip -rm smart-build-interface_1.20.1.tar.gz diff --git a/test/test_server_pkg_file/archive.zip b/test/test_server_pkg_file/archive.zip new file mode 100644 index 0000000..140bd05 Binary files /dev/null and b/test/test_server_pkg_file/archive.zip differ diff --git a/test/test_server_pkg_file/smart-build-interface_1.20.1.tar.gz b/test/test_server_pkg_file/smart-build-interface_1.20.1.tar.gz deleted file mode 100644 index 96f13b4..0000000 Binary files a/test/test_server_pkg_file/smart-build-interface_1.20.1.tar.gz and /dev/null differ diff --git a/test/test_server_pkg_file/smart-build-interface_1.20.1_linux.zip b/test/test_server_pkg_file/smart-build-interface_1.20.1_linux.zip index 109a144..cc2a217 100644 Binary files a/test/test_server_pkg_file/smart-build-interface_1.20.1_linux.zip and b/test/test_server_pkg_file/smart-build-interface_1.20.1_linux.zip differ diff --git a/tizen-ide/get_ide_sources.sh b/tizen-ide/get_ide_sources.sh index 5f9d7ea..6a04459 100644 --- a/tizen-ide/get_ide_sources.sh +++ b/tizen-ide/get_ide_sources.sh @@ -16,7 +16,11 @@ GIT_LIST=" /sdk/ide/common-eplugin /sdk/ide/eventinjector-eplugin /sdk/ide/nativecommon-eplugin +/sdk/ide/nativeappcommon-eplugin /sdk/ide/nativeapp-eplugin +/sdk/ide/nativecpp-eplugin +/sdk/ide/nativecpp-ext-eplugin +/sdk/ide/native-sample /sdk/ide/nativeplatform-eplugin /sdk/ide/unittest-eplugin /sdk/ide/native-gui-builder-eplugin @@ -185,6 +189,33 @@ function git_checkout_all() { cd ${START_PATH} } +## Command git +function git_command() { + GIT_PATH=$1 + GIT_NAME=${GIT_PATH##*/} + + ## ARG1 : + cd ${ARG1}/${GIT_NAME} + isError "Found git directory ( ${ARG1}/${GIT_NAME} )" + git ${SCRIPT_OPERATION} + isError "Pulled ${GIT_NAME}" +} + +## Command git all +function git_command_all() { + draw_line; echo "Git ${SCRIPT_OPERATION}"; draw_line + + cd ${ARG1} + isError "Checked source directory ( ${ARG1} )" + + for GIT_EACH in ${GIT_LIST} + do + git_command ${GIT_EACH} + done + + cd ${START_PATH} +} + ############################################################### ## Begin script ############################################################### @@ -228,7 +259,12 @@ case ${SCRIPT_OPERATION} in ## process default *) - usage + if [ "$#" == 1 ]; then + ARG1=$(pwd) + git_command_all + else + usage + fi ;; esac diff --git a/upgrade b/upgrade new file mode 100644 index 0000000..b93eefe --- /dev/null +++ b/upgrade @@ -0,0 +1,256 @@ +#!/usr/bin/ruby +=begin + + upgrade + +Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved. + +Contact: +Taejun Ha +Jiil Hyoun +Donghyuk Yang +DongHee Yang +Sungmin Kim + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Contributors: +- S-Core Co., Ltd +=end + +require 'fileutils' +require 'optparse' +$LOAD_PATH.unshift File.dirname(__FILE__)+"/src/common" +$LOAD_PATH.unshift File.dirname(__FILE__)+"/src/build_server" +$LOAD_PATH.unshift File.dirname(__FILE__)+"/src/pkg_server" +require "utils.rb" +require "log.rb" +require "BuildServerController" +require "packageServerLog" + +def option_error_check( options ) + if options[:start] then + if options[:locate].nil? or options[:type].nil? or options[:name].nil? then + raise ArgumentError, "upgrade -l -S -t -n -p " + "\n" + end + else + if options[:locate].nil? or options[:url].nil? then + raise ArgumentError, "upgrade -u [-l ]" + "\n" + end + end +end + +def option_parse + options = {} + banner = "DIBS upgrade service command-line tool." + "\n" \ + + "\n" + "Usage: upgrade -u [-l ]" + "\n" \ + + "\n" + "Options:" + "\n" + + optparse = OptionParser.new(nil, 32, ' '*8) do|opts| + opts.banner = banner + + options[:locate] = File.dirname(__FILE__) + opts.on( '-l', '--locate ', 'located dibs path' ) do|locate| + options[:locate] = locate + end + + opts.on( '-u', '--url ', 'package server url: http://127.0.0.1/dibs/unstable' ) do|url| + options[:url] = url + end + + opts.on( '-I', '--install', 'install, internal option' ) do + options[:install] = true + end + + opts.on( '-S', '--start', 'start server option' ) do + options[:start] = true + end + + opts.on( '-t', '--type ', 'sever type : BUILDSERVER or PACKAGESERVER' ) do|type| + options[:type] = type + end + + opts.on( '-n', '--name ', 'build server name or package server name' ) do|name| + options[:name] = name + end + + options[:port] = 2222 + opts.on( '-p', '--port ', 'server port number: 2224' ) do|port| + options[:port] = port.strip.to_i + end + + opts.on( '-h', '--help', 'display this information' ) do + puts opts.banner + puts " -l, --locate , located dibs path" + puts " -u, --url , package server url: http://127.0.0.1/dibs/unstable" + exit + end + end + + optparse.parse! + + option_error_check options + + return options +end + +#option parsing +begin + option = option_parse +rescue => e + puts e.message + exit 0 +end + +# Upgrade DIBS +begin + install_opt = option[:install] + sub_cmd = option[:sub_cmd] + dibs_path = option[:locate] + pkg_svr_url= option[:url] + start_opt = option[:start] + svr_type = option[:type] + svr_name = option[:name] + svr_port = option[:port] + + DIBS_PKG_NAME = "dibs" + BACKUP_ROOT = Utils::HOME + "/.build_tools/backup" + PREV_VER_PATH = BACKUP_ROOT + "/prev_ver" + NEW_VER_PATH = BACKUP_ROOT + "/new_ver" + UPGRADE_CMD = "#{PREV_VER_PATH}/upgrade" + BUILD_CONFIG_ROOT = "#{Utils::HOME}/.build_tools/build_server/#{svr_name}" + BUILD_FRIENDS_FILE = "#{BUILD_CONFIG_ROOT}/friends" + + if not File.exist? BACKUP_ROOT then FileUtils.mkdir_p(BACKUP_ROOT) end + log = PackageServerLog.new( "#{BACKUP_ROOT}/log" ) + + if not install_opt then + puts "" + log.info("Upgrade Start...", Log::LV_USER) + + # Backup current dibs + if File.exist? PREV_VER_PATH then FileUtils.rm_rf(PREV_VER_PATH) end + if File.exist? NEW_VER_PATH then FileUtils.rm_rf(NEW_VER_PATH) end + FileUtils.mkdir_p(PREV_VER_PATH) + FileUtils.mkdir_p(NEW_VER_PATH) + FileUtils.cp_r("#{dibs_path}/.", PREV_VER_PATH, :preserve => true) + log.info("Backup DIBS [#{dibs_path}] -> [#{PREV_VER_PATH}]", Log::LV_USER) + + # Run Upgrade + if start_opt and svr_type.eql? "BUILDSERVER" then + cmd = "#{UPGRADE_CMD} -I -l #{dibs_path} -S -t #{svr_type} -n #{svr_name} -p #{svr_port}" + else + cmd = "#{UPGRADE_CMD} -I -l #{dibs_path} -u #{pkg_svr_url}" + end + + cmd = Utils.execute_shell_generate(cmd) + Utils.spawn(cmd) + + else + # Get SERVER INFORMATION + if start_opt and svr_type.eql? "BUILDSERVER" then + build_server = BuildServerController.get_server(svr_name) + pkg_svr_url = build_server.pkgserver_url + log.info("Build server : [#{svr_name}][#{svr_port}]", Log::LV_USER) + end + log.info("Package Server : [#{pkg_svr_url}]", Log::LV_USER) + log.info("DIBS Path : [#{dibs_path}]", Log::LV_USER) + + # Download DIBS Package + client = Client.new( pkg_svr_url, NEW_VER_PATH, log) + client.update() + client.install( DIBS_PKG_NAME, Utils::HOST_OS, true, true) + + # Copy Current path + if File.exist? "#{dibs_path}" then + FileUtils.rm_rf("#{dibs_path}") + #FileUtils.mkdir_p("#{dibs_path}") + end + if File.exist? "#{NEW_VER_PATH}/tools/dibs" then + FileUtils.cp_r("#{NEW_VER_PATH}/tools/dibs/.", "#{dibs_path}", :preserve => true) + else + log.error("Not installed package error.", Log::LV_USER) + exit(1) + end + + # Execute start command + if start_opt + if svr_type.eql? "BUILDSERVER" then + # get friends server information + if File.exist? BUILD_FRIENDS_FILE then + File.open( BUILD_FRIENDS_FILE, "r" ) do |f| + f.each_line do |l| + if l.split(",").count < 2 then + next + end + + ip = l.split(",")[0].strip + port = l.split(",")[1].strip + + build_client = BuildCommClient.create( ip, port ) + if build_client.nil? then + log.info("Friend Server #{ip}:#{port} is not running!", Log::LV_USER) + next + end + + # send request + if build_client.send "UPGRADE|#{build_server.password}" then + # recevie & print + mismatched = false + result = build_client.read_lines do |l| + log.error(l, Log::LV_USER) + if l.include? "Password mismatched!" then + mismatched = true + end + end +=begin + if result and not mismatched then + log.info("Friend Server #{ip}:#{port} upgrade failed!", Log::LV_USER) + else + log.info("Friend Server #{ip}:#{port} upgrade requested!", Log::LV_USER) + end +=end + end + + # terminate + build_client.terminate + end + end + else + log.info("No Friend Server.", Log::LV_USER) + end + + # Start Build server + cmd = Utils.execute_shell_generate("#{dibs_path}/build-svr start -n #{svr_name} -p #{svr_port}") + Utils.spawn(cmd) + + log.info("Upgrade Complete", Log::LV_USER) + log.info("Start Build server [#{cmd}]", Log::LV_USER) + + else # PACKAGE SERVER + # Start Build server + cmd = Utils.execute_shell_generate("#{dibs_path}/pkg-svr start -n #{svr_name} -p #{svr_port}") + Utils.spawn(cmd) + + log.info("Upgrade Complete", Log::LV_USER) + log.info("Start Package server [#{cmd}]", Log::LV_USER) + end + else + log.info("Upgrade Complete", Log::LV_USER) + end + end +rescue => e + log.error(e.message, Log::LV_USER) + #puts e.message +end +