Make a copy of scripts and files in ci from scripts/command
authorHyung-Kyu Choi <hk0110.choi@samsung.com>
Fri, 27 Apr 2018 06:55:01 +0000 (15:55 +0900)
committer서상민/동작제어Lab(SR)/Senior Engineer/삼성전자 <sangmin7.seo@samsung.com>
Fri, 27 Apr 2018 07:26:55 +0000 (16:26 +0900)
- Make a copy of scripts and files in ci from scripts/command
- This files will be used with currrent CI and
  will be removed when CI is updated

Signed-off-by: Hyung-Kyu Choi <hk0110.choi@samsung.com>
ci/docker_build_tizen_cross.sh [new file with mode: 0755]
ci/docker_build_ubuntu_svace.sh [new file with mode: 0755]
ci/docker_coverage_report.sh [new file with mode: 0755]
ci/docker_cross_test_coverage_build.sh [new file with mode: 0755]
ci/docker_gbs_build.sh [new file with mode: 0755]
ci/docker_run_test.sh [new file with mode: 0755]
ci/format-checker.sh [new file with mode: 0755]
ci/gbs.conf [new file with mode: 0644]
ci/gen_coverage_report.sh [new file with mode: 0755]
ci/imported_url.txt [new file with mode: 0644]
ci/lcov-to-covertura-xml.sh [new file with mode: 0755]

diff --git a/ci/docker_build_tizen_cross.sh b/ci/docker_build_tizen_cross.sh
new file mode 100755 (executable)
index 0000000..41d8031
--- /dev/null
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+# default DOCKER_IMAGE_NAME=nnfw_docker
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker}
+echo "Using docker image $DOCKER_IMAGE_NAME"
+
+SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+HOST_HOME=$SCRIPT_ROOT/../
+DOCKER_HOME=/home
+
+GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES+=" -v $HOST_HOME:$DOCKER_HOME"
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs/armel"
+DOCKER_ENV_VARS+=" -e HOST_OS=tizen"
+
+DOCKER_RUN_OPTS="--rm"
+DOCKER_RUN_OPTS+=" -w $DOCKER_HOME"
+
+
+TMP_DIR=$HOST_HOME/tmp/
+mkdir -p $TMP_DIR/armel
+
+# prepare rootfs
+wget -nv http://npuarchive.mooo.com/archive/nnfw/rootfs/rootfs_arm_tizen.tar.gz -O $TMP_DIR/rootfs.tar.gz
+chmod 755 $TMP_DIR/rootfs.tar.gz
+
+
+DOCKER_VOLUMES+=" -v $TMP_DIR/:/opt/rootfs"
+
+CMD="tar -zxf /opt/rootfs/rootfs.tar.gz -C /opt/rootfs/armel && make && make install && make build_test_suite"
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME sh -c "$CMD"
+BUILD_RESULT=$?
+
+# change owner of root dir and delete rootfs dir
+NNFW_OWNER_UID=$(stat -c "%u" $HOST_HOME)
+NNFW_OWNER_GID=$(stat -c "%g" $HOST_HOME)
+
+CMD="chown -R $NNFW_OWNER_UID:$NNFW_OWNER_GID $DOCKER_HOME &&  chown -R $NNFW_OWNER_UID:$NNFW_OWNER_GID /opt/rootfs && rm -rf /opt/rootfs/armel"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME sh -c "$CMD"
+
+rm -rf $TMP_DIR
+exit $BUILD_RESULT
diff --git a/ci/docker_build_ubuntu_svace.sh b/ci/docker_build_ubuntu_svace.sh
new file mode 100755 (executable)
index 0000000..48d3350
--- /dev/null
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker}
+
+HOST_HOME=$SCRIPT_ROOT/../
+DOCKER_HOME=/home
+
+GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES+=" -v $HOST_HOME:$DOCKER_HOME"
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+DOCKER_ENV_VARS+=" -e EXT_ACL_FOLDER=/opt/libarmcl"
+
+DOCKER_RUN_OPTS="--rm"
+DOCKER_RUN_OPTS+=" -w $DOCKER_HOME"
+
+TMP_DIR=$HOST_HOME/tmp
+mkdir -p $TMP_DIR/libarmcl
+
+if [ ! -d $SVACE_ANALYZER_DIR ]; then
+  wget -nv http://npuarchive.mooo.com/archive/nnfw/svace/svace-analyzer-2.5-x64-linux.tbz2 -O $TMP_DIR/svace-analyzer-2.5-x64-linux.tbz2
+  mkdir -p $TMP_DIR/svace-analyzer
+  tar -xf $TMP_DIR/svace-analyzer-2.5-x64-linux.tbz2 -C $TMP_DIR/svace-analyzer
+  SVACE_ANALYZER_DIR=$TMP_DIR/svace-analyzer/svace-analyzer-2.5-x64-linux
+fi
+
+which $SVACE_ANALYZER_DIR/bin/svace
+if [[ $? -ne 0 ]]; then
+  echo "cannot find svace-analyzer"
+  exit 1
+fi
+
+pushd $HOST_HOME
+
+# prepare armcl library
+wget http://npuarchive.mooo.com/archive/nnfw/libarmcl/libarmcl.tar.gz -O $TMP_DIR/libarmcl.tar.gz -nv
+tar -zxvf $TMP_DIR/libarmcl.tar.gz -C $TMP_DIR/libarmcl
+
+# prepare rootfs
+wget http://npuarchive.mooo.com/archive/nnfw/rootfs/rootfs_arm_ubuntu.tar.gz -O $TMP_DIR/rootfs.tar.gz -nv
+tar -zxf $TMP_DIR/rootfs.tar.gz -C $TMP_DIR
+
+# prepare svace
+wget -nv http://npuarchive.mooo.com/archive/nnfw/svace/warn-settings.cxx.txt -O $TMP_DIR/warn-settings.cxx.txt
+
+DOCKER_VOLUMES+=" -v $SVACE_ANALYZER_DIR:/opt/svace-analyzer"
+DOCKER_VOLUMES+=" -v $TMP_DIR/arm:/opt/rootfs"
+DOCKER_VOLUMES+=" -v $TMP_DIR/libarmcl:/opt/libarmcl"
+
+# Change .gitmodules to work around the permission issue
+CMD_SUBMODULE="docker/change_gitmodule.sh"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME sh -c "$CMD_SUBMODULE"
+
+if [ -n "$DOCKER_INTERACTIVE" ]; then
+  DOCKER_RUN_OPTS+=" -it"
+  CMD="/bin/bash"
+else
+  CMD="make external_acl && /opt/svace-analyzer/bin/svace init && /opt/svace-analyzer/bin/svace build make runtime"
+fi
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME sh -c "$CMD"
+BUILD_RESULT=$?
+
+# Newly created files during above docker run can have different ownership.
+# This may cause some problems, for example, some jenkins slaves or developers
+# can't remove built files due to lack of permission.
+# To address this issue, let's change owner of all files
+# in NNFW to owner of NNFW.
+NNFW_OWNER_UID=$(stat -c "%u" $HOST_HOME)
+NNFW_OWNER_GID=$(stat -c "%g" $HOST_HOME)
+
+CMD="chown -R $NNFW_OWNER_UID:$NNFW_OWNER_GID $DOCKER_HOME"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
+
+$SVACE_ANALYZER_DIR/bin/svace analyze --warning $TMP_DIR/warn-setting.cxx.txt
+
+popd
+
+rm -rf $TMP_DIR
+exit $BUILD_RESULT
diff --git a/ci/docker_coverage_report.sh b/ci/docker_coverage_report.sh
new file mode 100755 (executable)
index 0000000..a311b2e
--- /dev/null
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker}
+
+SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+HOST_HOME=$SCRIPT_ROOT/../
+DOCKER_HOME=/home
+
+GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES+=" -v $HOST_HOME:$DOCKER_HOME"
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+
+DOCKER_RUN_OPTS="--rm"
+DOCKER_RUN_OPTS+=" -w $DOCKER_HOME"
+
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME sh -c "./ci/gen_coverage_report.sh"
+
+BUILD_RESULT=$?
+
+# Newly created files during above docker run can have different ownership.
+# This may cause some problems, for example, some jenkins slaves or developers
+# can't remove built files due to lack of permission.
+# To address this issue, let's change owner of all files
+# in NNFW to owner of NNFW.
+NNFW_OWNER_UID=$(stat -c "%u" $HOST_HOME)
+NNFW_OWNER_GID=$(stat -c "%g" $HOST_HOME)
+
+CMD="chown -R $NNFW_OWNER_UID:$NNFW_OWNER_GID $DOCKER_HOME"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
+
+exit $BUILD_RESULT
+
diff --git a/ci/docker_cross_test_coverage_build.sh b/ci/docker_cross_test_coverage_build.sh
new file mode 100755 (executable)
index 0000000..9f89970
--- /dev/null
@@ -0,0 +1,68 @@
+#!/bin/bash
+
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker}
+
+SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+HOST_HOME=$SCRIPT_ROOT/../
+DOCKER_HOME=/home
+
+GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES+=" -v $HOST_HOME:$DOCKER_HOME"
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+DOCKER_ENV_VARS+=" -e EXT_ACL_FOLDER=/opt/libarmcl"
+DOCKER_ENV_VARS+=" -e COVERAGE_BUILD=1"
+
+DOCKER_RUN_OPTS="--rm"
+DOCKER_RUN_OPTS+=" -w $DOCKER_HOME"
+
+
+TMP_DIR=$HOST_HOME/tmp
+mkdir -p $TMP_DIR/libarmcl
+
+# prepare armcl library
+wget http://npuarchive.mooo.com/archive/nnfw/libarmcl/libarmcl.tar.gz -O $TMP_DIR/libarmcl.tar.gz -nv
+tar -zxvf $TMP_DIR/libarmcl.tar.gz -C $TMP_DIR/libarmcl
+
+# prepare rootfs
+wget http://npuarchive.mooo.com/archive/nnfw/rootfs/rootfs_arm_ubuntu.tar.gz -O $TMP_DIR/rootfs.tar.gz -nv
+tar -zxf $TMP_DIR/rootfs.tar.gz -C $TMP_DIR
+
+DOCKER_VOLUMES+=" -v $TMP_DIR/arm:/opt/rootfs"
+DOCKER_VOLUMES+=" -v $TMP_DIR/libarmcl:/opt/libarmcl"
+
+# Change .gitmodules to work around the permission issue
+CMD_SUBMODULE="docker/change_gitmodule.sh"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME sh -c "$CMD_SUBMODULE"
+
+if [ -n "$DOCKER_INTERACTIVE" ]; then
+  DOCKER_RUN_OPTS+=" -it"
+  CMD="/bin/bash"
+else
+  CMD="make external_acl && make && make install && make build_coverage_suite"
+fi
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME sh -c "$CMD"
+BUILD_RESULT=$?
+
+# Newly created files during above docker run can have different ownership.
+# This may cause some problems, for example, some jenkins slaves or developers
+# can't remove built files due to lack of permission.
+# To address this issue, let's change owner of all files
+# in NNFW to owner of NNFW.
+NNFW_OWNER_UID=$(stat -c "%u" $HOST_HOME)
+NNFW_OWNER_GID=$(stat -c "%g" $HOST_HOME)
+
+CMD="chown -R $NNFW_OWNER_UID:$NNFW_OWNER_GID $DOCKER_HOME"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
+
+rm -rf $TMP_DIR
+exit $BUILD_RESULT
diff --git a/ci/docker_gbs_build.sh b/ci/docker_gbs_build.sh
new file mode 100755 (executable)
index 0000000..b9a9e17
--- /dev/null
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+MY_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+NNFW_ROOT=$MY_PATH/../
+if [ -z ${DOCKER_IMAGE} ];
+then
+    # use default docker image from https://github.sec.samsung.net/chunseok-lee/gbs_docker
+    DOCKER_IMAGE=nnfw_docker_tizen:latest
+fi
+
+CMD="gbs -c /home/nnfw/ci/gbs.conf  build -A armv7l --profile=profile.tizen --clean --include-all"
+docker run --rm -v $NNFW_ROOT:/home/nnfw -w /home/nnfw ${DOCKER_ENV_VARS:-} ${DOCKER_IMAGE} sh -c "$CMD"
+
diff --git a/ci/docker_run_test.sh b/ci/docker_run_test.sh
new file mode 100755 (executable)
index 0000000..208c63a
--- /dev/null
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+# default DOCKER_IMAGE_NAME=nnfw_docker
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker}
+echo "Using docker image $DOCKER_IMAGE_NAME"
+
+if [ -z "`docker images | grep $DOCKER_IMAGE_NAME`" ]; then
+    echo "Need docker image!"
+    exit 1
+fi
+
+HOST_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )"
+
+DOCKER_PATH=/home/npuci/nnfw
+
+export GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES=" -v /dev/null:/dev/raw1394"
+DOCKER_VOLUMES+=" -v $HOST_PATH:$DOCKER_PATH"
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+
+DOCKER_RUN_OPTS="--rm"
+DOCKER_RUN_OPTS+=" -w $DOCKER_PATH"
+
+CMD="make install"
+
+if [ "$DOCKER_INTERACTIVE" ]; then
+    DOCKER_RUN_OPTS+=" -it"
+    CMD="/bin/bash"
+fi
+
+# Change .gitmodules to work around the permission issue
+CMD_SUBMODULE="docker/change_gitmodule.sh"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD_SUBMODULE
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
+
+BUILD_RESULT=$?
+
+# Newly created files during above docker run can have different ownership.
+# This may cause some problems, for example, some jenkins slaves or developers
+# can't remove built files due to lack of permission.
+# To address this issue, let's change owner of all files
+# in NPU_Compiler to owner of NPU_Compiler.
+NPU_COMPILER_OWNER_UID=$(stat -c "%u" $HOST_PATH)
+NPU_COMPILER_OWNER_GID=$(stat -c "%g" $HOST_PATH)
+
+CMD="chown -R $NPU_COMPILER_OWNER_UID:$NPU_COMPILER_OWNER_GID $DOCKER_PATH"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
+
+exit $BUILD_RESULT
diff --git a/ci/format-checker.sh b/ci/format-checker.sh
new file mode 100755 (executable)
index 0000000..5186b74
--- /dev/null
@@ -0,0 +1,89 @@
+#!/bin/bash
+
+function check_tools() {
+    which clang-format-3.9
+    if [[ $? -ne 0 ]]; then
+        echo "Error: clang-format-3.9 is not available."
+        echo "       Please install clang-format-3.9."
+        exit 1
+    fi
+
+    which yapf
+    if [[ $? -ne 0 ]]; then
+        echo "Error: yapf is not available."
+        echo "       Please install yapf."
+        exit 1
+    fi
+}
+
+function check_cpp_files() {
+    DIRECTORIES_TO_BE_TESTED=$1
+
+    # Check c++ files
+    CPP_FILES_IN_COMPILER=$(find "${DIRECTORIES_TO_BE_TESTED[@]}" -iname '*.h' -o -iname '*.cpp' -o -iname '*.cc')
+
+    if [[ ${#CPP_FILES_IN_COMPILER} -eq 0 ]]; then
+        echo "No cpp files to be checked"
+        return
+    fi
+
+    CPP_FILES_TO_BE_TESTED=$(git ls-files $CPP_FILES_IN_COMPILER)
+    if [[ ${#CPP_FILES_TO_BE_TESTED} -eq 0 ]]; then
+        echo "No changed cpp files to be checked"
+        return
+    fi
+
+    clang-format-3.9 -i $CPP_FILES_TO_BE_TESTED
+}
+
+function check_python_files() {
+    DIRECTORIES_TO_BE_TESTED=$1
+
+    # Check python files
+    PYTHON_FILES_IN_COMPILER=$(find "${DIRECTORIES_TO_BE_TESTED[@]}" -iname '*.py')
+
+    if [[ ${#PYTHON_FILES_IN_COMPILER} -eq 0 ]]; then
+        echo "No python files to be checked"
+        return
+    fi
+
+    PYTHON_FILES_TO_BE_TESTED=$(git ls-files $PYTHON_FILES_IN_COMPILER)
+    if [[ ${#PYTHON_FILES_TO_BE_TESTED} -eq 0 ]]; then
+        echo "No changed python files to be checked"
+        return
+    fi
+
+    yapf -i --style='{based_on_style: pep8, column_limit: 90}' $PYTHON_FILES_TO_BE_TESTED
+}
+
+echo "Make sure commit all changes before running this checker."
+
+check_tools
+
+DIRECTORIES_TO_BE_TESTED=()
+
+for DIR_TO_BE_TESTED in $(find -name '.FORMATCHECKED' -exec dirname {} \;); do
+    DIRECTORIES_TO_BE_TESTED+=("$DIR_TO_BE_TESTED")
+done
+
+if [[ ${#DIRECTORIES_TO_BE_TESTED[@]} -eq 0 ]]; then
+    echo "No directories to be checked"
+    exit 0
+fi
+
+check_cpp_files $DIRECTORIES_TO_BE_TESTED
+check_python_files $DIRECTORIES_TO_BE_TESTED
+
+git diff > format.patch
+PATCHFILE_SIZE=$(stat -c%s format.patch)
+if [[ $PATCHFILE_SIZE -ne 0 ]]; then
+    echo "[FAILED] Format checker failed and update code to follow convention."
+    echo "         You can find changes in format.patch"
+    exit 1
+else
+    echo "[PASSED] Format checker succeed."
+    exit 0
+fi
+
+echo "Error: Something went wrong."
+exit 1
diff --git a/ci/gbs.conf b/ci/gbs.conf
new file mode 100644 (file)
index 0000000..ad6cc4f
--- /dev/null
@@ -0,0 +1,25 @@
+[general]
+#Current profile name which should match a profile section name
+profile = profile.tizen
+
+[profile.tizen]
+user=obs_viewer
+passwdx = QlpoOTFBWSZTWWV18UwAAAKDgAAAkiCZgCAAMQZMQQDJ6jQwAvxdyRThQkGV18Uw
+obs = obs.tizen
+repos = repo.tizen_base,repo.tizen_mobile
+buildroot = /home/GBS-ROOT/
+
+[obs.tizen]
+url = http://api.tizen.org
+
+# FIXME: for libarmcl-devel, tensorflow-lite
+[repo.localacl]
+url = http://npuarchive.mooo.com/archive/nnfw/repo/
+
+[repo.tizen_mobile]
+url = http://download.tizen.org/snapshots/tizen/unified/latest/repos/standard/packages/
+
+[repo.tizen_base]
+url =  http://download.tizen.org/snapshots/tizen/base/latest/repos/standard/packages/
+
+
diff --git a/ci/gen_coverage_report.sh b/ci/gen_coverage_report.sh
new file mode 100755 (executable)
index 0000000..4401e2b
--- /dev/null
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+# This file is based on https://github.sec.samsung.net/STAR/nncc/pull/80
+
+SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+NNFW_ROOT=$SCRIPT_ROOT/..
+
+LCOV_PATH=$(command -v lcov)
+GENHTML_PATH=$(command -v genhtml)
+
+if [[ -z "${LCOV_PATH}" ]]; then
+  echo "ERROR: 'lcov' is not found"
+  exit 255
+fi
+
+if [[ -z "${GENHTML_PATH}" ]]; then
+  echo "ERROR: 'genhtml' is not found"
+  exit 255
+fi
+
+OUTPUT_PATH="$1"
+
+if [[ -z "${OUTPUT_PATH}" ]]; then
+  OUTPUT_PATH="$NNFW_ROOT/coverage"
+fi
+
+if [[ -e "${OUTPUT_PATH}" ]]; then
+  echo "ERROR: '${OUTPUT_PATH}' already exists"
+  exit 255
+fi
+
+mkdir -p "${OUTPUT_PATH}"
+
+RAW_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.raw.info"
+SRC_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.src.info"
+FINAL_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.info"
+HTML_PATH="${OUTPUT_PATH}/html"
+COVERTURA_PATH="${OUTPUT_PATH}/nnfw_coverage.xml"
+
+"${LCOV_PATH}" -c -d "${NNFW_ROOT}" -o "${RAW_COVERAGE_INFO_PATH}"
+"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${SRC_COVERAGE_INFO_PATH}" '/home/src/*'
+"${LCOV_PATH}" -a "${SRC_COVERAGE_INFO_PATH}" -o "${FINAL_COVERAGE_INFO_PATH}"
+"${LCOV_PATH}" -r "${FINAL_COVERAGE_INFO_PATH}" -o "${FINAL_COVERAGE_INFO_PATH}" '/home/src/runtime/test/*'
+"${LCOV_PATH}" -r "${FINAL_COVERAGE_INFO_PATH}" -o "${FINAL_COVERAGE_INFO_PATH}" '/home/src/runtime/ref/nn/depend/*'
+"${GENHTML_PATH}" "${FINAL_COVERAGE_INFO_PATH}" --output-directory "${HTML_PATH}"
+
+tar -zcf "${OUTPUT_PATH}"/coverage_report.tar.gz "${HTML_PATH}"
+$NNFW_ROOT/ci/lcov-to-covertura-xml.sh "${FINAL_COVERAGE_INFO_PATH}" -o "${COVERTURA_PATH}"
diff --git a/ci/imported_url.txt b/ci/imported_url.txt
new file mode 100644 (file)
index 0000000..b3a27cf
--- /dev/null
@@ -0,0 +1,3 @@
+# This file contains urls of files which is imported from public origin.
+
+1. lcov-to-covertura-xml.sh : https://github.com/eriwen/lcov-to-cobertura-xml
diff --git a/ci/lcov-to-covertura-xml.sh b/ci/lcov-to-covertura-xml.sh
new file mode 100755 (executable)
index 0000000..7aae6d1
--- /dev/null
@@ -0,0 +1,414 @@
+#!/usr/bin/env python
+
+# Copyright 2011-2012 Eric Wendelin
+#
+# This is free software, licensed under the Apache License, Version 2.0,
+# available in the accompanying LICENSE.txt file.
+
+"""
+Converts lcov line coverage output to Cobertura-compatible XML for CI
+"""
+
+import re
+import sys
+import os
+import time
+import subprocess
+from xml.dom import minidom
+from optparse import OptionParser
+
+from distutils.spawn import find_executable
+
+CPPFILT = "c++filt"
+HAVE_CPPFILT = False
+
+if find_executable(CPPFILT) is not None:
+    HAVE_CPPFILT = True
+
+VERSION = '1.6'
+__all__ = ['LcovCobertura']
+
+
+class Demangler(object):
+    def __init__(self):
+        self.pipe = subprocess.Popen(
+            CPPFILT, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+
+    def demangle(self, name):
+        self.pipe.stdin.write(name + "\n")
+        return self.pipe.stdout.readline().rstrip()
+
+
+class LcovCobertura(object):
+    """
+    Converts code coverage report files in lcov format to Cobertura's XML
+    report format so that CI servers like Jenkins can aggregate results and
+    determine build stability etc.
+
+    >>> from lcov_cobertura import LcovCobertura
+    >>> LCOV_INPUT = 'your lcov input'
+    >>> converter = LcovCobertura(LCOV_INPUT)
+    >>> cobertura_xml = converter.convert()
+    >>> print(cobertura_xml)
+    """
+
+    def __init__(self, lcov_data, base_dir='.', excludes=None, demangle=False):
+        """
+        Create a new :class:`LcovCobertura` object using the given `lcov_data`
+        and `options`.
+
+        :param lcov_data: Path to LCOV data file
+        :type lcov_data: string
+        :param base_dir: Path upon which to base all sources
+        :type base_dir: string
+        :param excludes: list of regexes to packages as excluded
+        :type excludes: [string]
+        :param demangle: whether to demangle function names using c++filt
+        :type demangle: bool
+        """
+
+        if not excludes:
+            excludes = []
+        self.lcov_data = lcov_data
+        self.base_dir = base_dir
+        self.excludes = excludes
+        if demangle:
+            demangler = Demangler()
+            self.format = demangler.demangle
+        else:
+            self.format = lambda x: x
+
+    def convert(self):
+        """
+        Convert lcov file to cobertura XML using options from this instance.
+        """
+        coverage_data = self.parse()
+        return self.generate_cobertura_xml(coverage_data)
+
+    def parse(self):
+        """
+        Generate a data structure representing it that can be serialized in any
+        logical format.
+        """
+
+        coverage_data = {
+            'packages': {},
+            'summary': {'lines-total': 0, 'lines-covered': 0,
+                        'branches-total': 0, 'branches-covered': 0},
+            'timestamp': str(int(time.time()))
+        }
+        package = None
+        current_file = None
+        file_lines_total = 0
+        file_lines_covered = 0
+        file_lines = {}
+        file_methods = {}
+        file_branches_total = 0
+        file_branches_covered = 0
+
+        for line in self.lcov_data.split('\n'):
+            if line.strip() == 'end_of_record':
+                if current_file is not None:
+                    package_dict = coverage_data['packages'][package]
+                    package_dict['lines-total'] += file_lines_total
+                    package_dict['lines-covered'] += file_lines_covered
+                    package_dict['branches-total'] += file_branches_total
+                    package_dict['branches-covered'] += file_branches_covered
+                    file_dict = package_dict['classes'][current_file]
+                    file_dict['lines-total'] = file_lines_total
+                    file_dict['lines-covered'] = file_lines_covered
+                    file_dict['lines'] = dict(file_lines)
+                    file_dict['methods'] = dict(file_methods)
+                    file_dict['branches-total'] = file_branches_total
+                    file_dict['branches-covered'] = file_branches_covered
+                    coverage_data['summary']['lines-total'] += file_lines_total
+                    coverage_data['summary']['lines-covered'] += file_lines_covered
+                    coverage_data['summary']['branches-total'] += file_branches_total
+                    coverage_data['summary']['branches-covered'] += file_branches_covered
+
+            line_parts = line.split(':', 1)
+            input_type = line_parts[0]
+
+            if input_type == 'SF':
+                # Get file name
+                file_name = line_parts[-1].strip()
+                relative_file_name = os.path.relpath(file_name, self.base_dir)
+                package = '.'.join(relative_file_name.split(os.path.sep)[0:-1])
+                class_name = '.'.join(relative_file_name.split(os.path.sep))
+                if package not in coverage_data['packages']:
+                    coverage_data['packages'][package] = {
+                        'classes': {}, 'lines-total': 0, 'lines-covered': 0,
+                        'branches-total': 0, 'branches-covered': 0
+                    }
+                coverage_data['packages'][package]['classes'][
+                    relative_file_name] = {
+                        'name': class_name, 'lines': {}, 'lines-total': 0,
+                        'lines-covered': 0, 'branches-total': 0,
+                        'branches-covered': 0
+                }
+                package = package
+                current_file = relative_file_name
+                file_lines_total = 0
+                file_lines_covered = 0
+                file_lines.clear()
+                file_methods.clear()
+                file_branches_total = 0
+                file_branches_covered = 0
+            elif input_type == 'DA':
+                # DA:2,0
+                (line_number, line_hits) = line_parts[-1].strip().split(',')
+                line_number = int(line_number)
+                if line_number not in file_lines:
+                    file_lines[line_number] = {
+                        'branch': 'false', 'branches-total': 0,
+                        'branches-covered': 0
+                    }
+                file_lines[line_number]['hits'] = line_hits
+                # Increment lines total/covered for class and package
+                try:
+                    if int(line_hits) > 0:
+                        file_lines_covered += 1
+                except:
+                    pass
+                file_lines_total += 1
+            elif input_type == 'BRDA':
+                # BRDA:1,1,2,0
+                (line_number, block_number, branch_number, branch_hits) = line_parts[-1].strip().split(',')
+                line_number = int(line_number)
+                if line_number not in file_lines:
+                    file_lines[line_number] = {
+                        'branch': 'true', 'branches-total': 0,
+                        'branches-covered': 0, 'hits': 0
+                    }
+                file_lines[line_number]['branch'] = 'true'
+                file_lines[line_number]['branches-total'] += 1
+                file_branches_total += 1
+                if branch_hits != '-' and int(branch_hits) > 0:
+                    file_lines[line_number]['branches-covered'] += 1
+                    file_branches_covered += 1
+            elif input_type == 'BRF':
+                file_branches_total = int(line_parts[1])
+            elif input_type == 'BRH':
+                file_branches_covered = int(line_parts[1])
+            elif input_type == 'FN':
+                # FN:5,(anonymous_1)
+                function_line, function_name = line_parts[-1].strip().split(',')
+                file_methods[function_name] = [function_line, '0']
+            elif input_type == 'FNDA':
+                # FNDA:0,(anonymous_1)
+                (function_hits, function_name) = line_parts[-1].strip().split(',')
+                if function_name not in file_methods:
+                    file_methods[function_name] = ['0', '0']
+                file_methods[function_name][-1] = function_hits
+
+        # Exclude packages
+        excluded = [x for x in coverage_data['packages'] for e in self.excludes
+                    if re.match(e, x)]
+        for package in excluded:
+            del coverage_data['packages'][package]
+
+        # Compute line coverage rates
+        for package_data in list(coverage_data['packages'].values()):
+            package_data['line-rate'] = self._percent(
+                package_data['lines-total'],
+                package_data['lines-covered'])
+            package_data['branch-rate'] = self._percent(
+                package_data['branches-total'],
+                package_data['branches-covered'])
+
+        return coverage_data
+
+    def generate_cobertura_xml(self, coverage_data):
+        """
+        Given parsed coverage data, return a String cobertura XML representation.
+
+        :param coverage_data: Nested dict representing coverage information.
+        :type coverage_data: dict
+        """
+
+        dom_impl = minidom.getDOMImplementation()
+        doctype = dom_impl.createDocumentType("coverage", None,
+                                              "http://cobertura.sourceforge.net/xml/coverage-04.dtd")
+        document = dom_impl.createDocument(None, "coverage", doctype)
+        root = document.documentElement
+        summary = coverage_data['summary']
+        self._attrs(root, {
+            'branch-rate': self._percent(summary['branches-total'],
+                                         summary['branches-covered']),
+            'branches-covered': str(summary['branches-covered']),
+            'branches-valid': str(summary['branches-total']),
+            'complexity': '0',
+            'line-rate': self._percent(summary['lines-total'],
+                                       summary['lines-covered']),
+            'lines-covered': str(summary['lines-covered']),
+            'lines-valid': str(summary['lines-total']),
+            'timestamp': coverage_data['timestamp'],
+            'version': '2.0.3'
+        })
+
+        sources = self._el(document, 'sources', {})
+        source = self._el(document, 'source', {})
+        source.appendChild(document.createTextNode(self.base_dir))
+        sources.appendChild(source)
+
+        root.appendChild(sources)
+
+        packages_el = self._el(document, 'packages', {})
+
+        packages = coverage_data['packages']
+        for package_name, package_data in list(packages.items()):
+            package_el = self._el(document, 'package', {
+                'line-rate': package_data['line-rate'],
+                'branch-rate': package_data['branch-rate'],
+                'name': package_name,
+                'complexity': '0',
+            })
+            classes_el = self._el(document, 'classes', {})
+            for class_name, class_data in list(package_data['classes'].items()):
+                class_el = self._el(document, 'class', {
+                    'branch-rate': self._percent(class_data['branches-total'],
+                                                 class_data['branches-covered']),
+                    'complexity': '0',
+                    'filename': class_name,
+                    'line-rate': self._percent(class_data['lines-total'],
+                                               class_data['lines-covered']),
+                    'name': class_data['name']
+                })
+
+                # Process methods
+                methods_el = self._el(document, 'methods', {})
+                for method_name, (line, hits) in list(class_data['methods'].items()):
+                    method_el = self._el(document, 'method', {
+                        'name': self.format(method_name),
+                        'signature': '',
+                        'line-rate': '1.0' if int(hits) > 0 else '0.0',
+                        'branch-rate': '1.0' if int(hits) > 0 else '0.0',
+                    })
+                    method_lines_el = self._el(document, 'lines', {})
+                    method_line_el = self._el(document, 'line', {
+                        'hits': hits,
+                        'number': line,
+                        'branch': 'false',
+                    })
+                    method_lines_el.appendChild(method_line_el)
+                    method_el.appendChild(method_lines_el)
+                    methods_el.appendChild(method_el)
+
+                # Process lines
+                lines_el = self._el(document, 'lines', {})
+                lines = list(class_data['lines'].keys())
+                lines.sort()
+                for line_number in lines:
+                    line_el = self._el(document, 'line', {
+                        'branch': class_data['lines'][line_number]['branch'],
+                        'hits': str(class_data['lines'][line_number]['hits']),
+                        'number': str(line_number)
+                    })
+                    if class_data['lines'][line_number]['branch'] == 'true':
+                        total = int(class_data['lines'][line_number]['branches-total'])
+                        covered = int(class_data['lines'][line_number]['branches-covered'])
+                        percentage = int((covered * 100.0) / total)
+                        line_el.setAttribute('condition-coverage',
+                                             '{0}% ({1}/{2})'.format(
+                                                 percentage, covered, total))
+                    lines_el.appendChild(line_el)
+
+                class_el.appendChild(methods_el)
+                class_el.appendChild(lines_el)
+                classes_el.appendChild(class_el)
+            package_el.appendChild(classes_el)
+            packages_el.appendChild(package_el)
+        root.appendChild(packages_el)
+
+        return document.toprettyxml()
+
+    def _el(self, document, name, attrs):
+        """
+        Create an element within document with given name and attributes.
+
+        :param document: Document element
+        :type document: Document
+        :param name: Element name
+        :type name: string
+        :param attrs: Attributes for element
+        :type attrs: dict
+        """
+        return self._attrs(document.createElement(name), attrs)
+
+    def _attrs(self, element, attrs):
+        """
+        Set attributes on given element.
+
+        :param element: DOM Element
+        :type element: Element
+        :param attrs: Attributes for element
+        :type attrs: dict
+        """
+        for attr, val in list(attrs.items()):
+            element.setAttribute(attr, val)
+        return element
+
+    def _percent(self, lines_total, lines_covered):
+        """
+        Get the percentage of lines covered in the total, with formatting.
+
+        :param lines_total: Total number of lines in given module
+        :type lines_total: number
+        :param lines_covered: Number of lines covered by tests in module
+        :type lines_covered: number
+        """
+
+        if lines_total == 0:
+            return '0.0'
+        return str(float(float(lines_covered) / float(lines_total)))
+
+
+def main(argv=None):
+    """
+    Converts LCOV coverage data to Cobertura-compatible XML for reporting.
+
+    Usage:
+        lcov_cobertura.py lcov-file.dat
+        lcov_cobertura.py lcov-file.dat -b src/dir -e test.lib -o path/out.xml
+
+    By default, XML output will be written to ./coverage.xml
+    """
+    if argv is None:
+        argv = sys.argv
+    parser = OptionParser()
+    parser.usage = ('lcov_cobertura.py lcov-file.dat [-b source/dir] '
+                    '[-e <exclude packages regex>] [-o output.xml] [-d]')
+    parser.description = 'Converts lcov output to cobertura-compatible XML'
+    parser.add_option('-b', '--base-dir', action='store',
+                      help='Directory where source files are located',
+                      dest='base_dir', default='.')
+    parser.add_option('-e', '--excludes',
+                      help='Comma-separated list of regexes of packages to exclude',
+                      action='append', dest='excludes', default=[])
+    parser.add_option('-o', '--output',
+                      help='Path to store cobertura xml file',
+                      action='store', dest='output', default='coverage.xml')
+    parser.add_option('-d', '--demangle',
+                      help='Demangle C++ function names using %s' % CPPFILT,
+                      action='store_true', dest='demangle', default=False)
+    (options, args) = parser.parse_args(args=argv)
+
+    if options.demangle and not HAVE_CPPFILT:
+        raise RuntimeError("C++ filter executable (%s) not found!" % CPPFILT)
+
+    if len(args) != 2:
+        print(main.__doc__)
+        sys.exit(1)
+
+    try:
+        with open(args[1], 'r') as lcov_file:
+            lcov_data = lcov_file.read()
+            lcov_cobertura = LcovCobertura(lcov_data, options.base_dir, options.excludes, options.demangle)
+            cobertura_xml = lcov_cobertura.convert()
+        with open(options.output, mode='wt') as output_file:
+            output_file.write(cobertura_xml)
+    except IOError:
+        sys.stderr.write("Unable to convert %s to Cobertura XML" % args[1])
+
+if __name__ == '__main__':
+    main()