--- /dev/null
+#!/usr/bin/python3
+
+import argparse
+import datetime
+import filecmp
+import hashlib
+import logging
+import os
+import re
+import shutil
+import stat
+import subprocess
+import sys
+
+COMMON_BIN_PATH = "../../common/bin/"
+DELTA_PARTIAL = "DELTA_PARTIAL"
+LOGFILE = "Delta.log"
+PARENT_DIR = ".."
+RM_DIR = "whiteout"
+
+
+def ensure_dir_exists(path):
+ if not os.path.exists(path):
+ os.makedirs(path)
+ elif os.path.isfile(path):
+ raise FileExistsError
+
+
+def path_leaf(path):
+ head, tail = os.path.split(path)
+ return tail
+
+
+def ishardlink(path):
+ if os.stat(path).st_nlink > 1:
+ return True
+ return False
+
+
+def get_inode(path):
+ return os.stat(path).st_ino
+
+
+class DeltaPartialGenerator:
+ class OperationsCount:
+ def __init__(self):
+ self.sym_diff_cnt = 0
+ self.sym_new_cnt = 0
+ self.hard_diff_cnt = 0
+ self.hard_new_cnt = 0
+ self.del_cnt = 0
+ self.new_cnt = 0
+ self.diff_cnt = 0
+ self.move_cnt = 0
+
+ class ConstantStrings:
+ def __init__(self, PART_NAME, BASE_OLD, BASE_NEW, OUT_DIR):
+ self.PART_NAME = PART_NAME
+ self.BASE_OLD = BASE_OLD
+ self.BASE_NEW = BASE_NEW
+ self.OUT_DIR = OUT_DIR
+
+ class OldNewEntriesData:
+ def __init__(self, BASE_OLD, BASE_NEW):
+ self.old_files, self.old_dirs = self.get_entries_data(BASE_OLD)
+ self.new_files, self.new_dirs = self.get_entries_data(BASE_NEW)
+ self.new_hardlinks = self.get_hardlinks(BASE_NEW)
+ self.old_hardlinks = self.get_hardlinks(BASE_OLD)
+
+ def get_entries_data(self, path):
+ all_files = []
+ all_dirs = []
+ for root, directories, filenames in os.walk(path, topdown=False, followlinks=False):
+ for directory in directories:
+ DirName = os.path.join(root, directory)
+ if os.path.islink(DirName):
+ logging.debug("This is symlink pointing to dir - %s" % DirName)
+ all_files.append(os.path.relpath(DirName, path))
+ elif not os.listdir(DirName):
+ all_dirs.append(os.path.relpath(DirName, path))
+ else:
+ all_dirs.append(os.path.relpath(DirName, path))
+ for filename in filenames:
+ FileName = os.path.join(root, filename)
+ all_files.append(os.path.relpath(FileName, path))
+
+ all_files.sort()
+ all_dirs.sort()
+ return all_files, all_dirs
+
+ def get_hardlinks(self, base):
+ hardlinks_dict = {}
+ inodes_dict = {}
+
+ for root, direcotories, files in os.walk(base, topdown=True, followlinks=False):
+ for file in sorted(files):
+ file_name = os.path.join(root, file)
+ if not os.path.islink(file_name) and ishardlink(file_name):
+ inode = get_inode(file_name)
+ rel_path = os.path.relpath(file_name, base)
+ if inode not in inodes_dict:
+ inodes_dict[inode] = rel_path
+ else:
+ hardlinks_dict[rel_path] = inodes_dict[inode]
+
+ return hardlinks_dict
+
+ class EntriesLists:
+ def __init__(self):
+ self.added_files =[]
+ self.removed_files = []
+ self.added_dirs = []
+ self.removed_dirs = []
+ self.changed_files = []
+ self.unchanged_files = []
+
+ def __init__(self, PART_NAME, BASE_OLD, BASE_NEW, OUT_DIR):
+ self.operations_count = self.OperationsCount()
+ self.constant_strings = self.ConstantStrings(PART_NAME, BASE_OLD, BASE_NEW, OUT_DIR)
+ self.old_new_entries_data = self.OldNewEntriesData(BASE_OLD, BASE_NEW)
+ self.entries_lists = self.EntriesLists()
+
+ def add_file(self, src, dst, linkto=None):
+ dstfolder = os.path.dirname(dst)
+ ensure_dir_exists(dstfolder)
+ if linkto is None:
+ shutil.copy(src, dst)
+ else:
+ os.symlink(linkto, dst)
+
+ def generate_entries_lists(self):
+ for elt in self.old_new_entries_data.new_files:
+ if elt not in self.old_new_entries_data.old_files:
+ self.entries_lists.added_files.append(elt)
+ logging.info("Added files %s" % elt)
+
+ for elt in self.old_new_entries_data.old_files:
+ if elt not in self.old_new_entries_data.new_files:
+ self.entries_lists.removed_files.append(elt)
+ logging.info("Removed files %s" % elt)
+
+ for elt in self.old_new_entries_data.old_dirs:
+ if elt not in self.old_new_entries_data.new_dirs:
+ self.entries_lists.removed_dirs.append(elt)
+ logging.info("Removed dirs %s" % elt + "/")
+
+ for elt in self.old_new_entries_data.new_dirs:
+ if elt not in self.old_new_entries_data.old_dirs:
+ self.entries_lists.added_dirs.append(elt)
+ logging.info("Added dirs %s" % elt + "/")
+
+ def generate_changed_files_lists(self):
+ for elt in self.old_new_entries_data.new_files:
+ if elt in self.old_new_entries_data.old_files:
+ src_file = os.path.join(self.constant_strings.BASE_OLD, elt)
+ dst_file = os.path.join(self.constant_strings.BASE_NEW, elt)
+ # Both are symbolic links and they differ
+ if os.path.islink(src_file) and os.path.islink(dst_file):
+ if not (os.readlink(src_file) == os.readlink(dst_file)):
+ self.entries_lists.changed_files.append(elt)
+ logging.info("Symlinks changed - %s" % elt)
+ else:
+ self.entries_lists.unchanged_files.append(elt)
+ # Both are hardlinks - we add them because we can't be sure if file they point to changes
+ elif elt in self.old_new_entries_data.new_hardlinks and elt in self.old_new_entries_data.old_hardlinks:
+ self.entries_lists.changed_files.append(elt)
+ # Both are normal files and they differ. (isfile() returns true in case of sym/hardlink also,
+ # so we need additional checks to make sure both entries are the same type (normal))
+ elif (not (os.path.islink(src_file) or os.path.islink(dst_file))) \
+ and (not (elt in self.old_new_entries_data.new_hardlinks or elt in self.old_new_entries_data.old_hardlinks)) \
+ and os.path.isfile(src_file) and os.path.isfile(dst_file):
+ if not filecmp.cmp(src_file, dst_file):
+ self.entries_lists.changed_files.append(elt)
+ else:
+ self.entries_lists.unchanged_files.append(elt)
+ # Both are files of different types
+ else:
+ logging.info("Files are of diff types but same names; Src - %s Des - %s" % (src_file, dst_file))
+ self.entries_lists.changed_files.append(elt)
+
+ def process_changed_entries(self):
+ for elt in self.entries_lists.changed_files:
+ out_file = os.path.join(self.constant_strings.OUT_DIR, elt)
+ dst_file = os.path.join(self.constant_strings.BASE_NEW, elt)
+ src_file = os.path.join(self.constant_strings.BASE_OLD, elt)
+ # Both files are symbolic links and they differ
+ if os.path.islink(dst_file) and os.path.islink(src_file):
+ logging.debug("File changed is a symlink %s " % dst_file)
+ patch = os.readlink(dst_file)
+ self.add_file(dst_file, out_file, patch)
+ self.operations_count.sym_diff_cnt += 1
+ # Both are hardlinks and they differ (pointing to something different, new/changed file)
+ elif elt in self.old_new_entries_data.old_hardlinks and elt in self.old_new_entries_data.new_hardlinks:
+ if self.old_new_entries_data.old_hardlinks[elt] != self.old_new_entries_data.new_hardlinks[elt] \
+ or self.old_new_entries_data.new_hardlinks[elt] in self.entries_lists.changed_files \
+ or self.old_new_entries_data.new_hardlinks[elt] in self.entries_lists.added_files:
+ logging.debug("File changed is a hardlink %s %s" % (src_file, dst_file))
+ self.add_file(dst_file, out_file)
+ self.operations_count.hard_diff_cnt += 1
+ # Both are NORMAL files and they differ
+ elif not os.path.islink(src_file) and not os.path.islink(dst_file) \
+ and elt not in self.old_new_entries_data.old_hardlinks and elt not in self.old_new_entries_data.new_hardlinks \
+ and os.path.isfile(dst_file) and os.path.isfile(src_file):
+ logging.debug("Regular file change %s %s" % (src_file, dst_file))
+ self.add_file(dst_file, out_file)
+ self.operations_count.diff_cnt += 1
+ # Both differ but they are of different types
+ else:
+ # Processing and updating partition txt file will be done under REMOVED case and NEW files case accordingly, we just make an entry here
+ self.add_file(dst_file, out_file)
+ self.operations_count.diff_cnt += 1
+
+ def process_removed_entries(self):
+ for elt in self.entries_lists.removed_files:
+ src_file = os.path.join(self.constant_strings.BASE_OLD, elt)
+ dst_file = os.path.join(self.constant_strings.OUT_DIR, elt)
+ if not os.path.exists(src_file):
+ if os.path.islink(src_file):
+ logging.info(f"{src_file} is a symlink and does not exists")
+ else:
+ logging.info(f"{src_file} is a file and does not exists")
+ continue
+
+ dstfolder = os.path.dirname(dst_file)
+ ensure_dir_exists(dstfolder)
+ subprocess.call(["mknod", dst_file, "c", "0", "1"])
+ self.operations_count.del_cnt += 1
+ logging.debug("File deleted %s" % src_file)
+
+ self.entries_lists.removed_dirs.sort(reverse=True)
+ for elt in self.entries_lists.removed_dirs:
+ src_file = os.path.join(self.constant_strings.BASE_OLD, elt)
+ dst_file = os.path.join(self.constant_strings.OUT_DIR, elt)
+ dstfolder = os.path.dirname(dst_file)
+ ensure_dir_exists(dstfolder)
+ subprocess.call(["mknod", dst_file, "c", "0", "1"])
+ self.operations_count.del_cnt += 1
+ logging.debug("Dir deleted- %s" % src_file)
+
+ def process_added_entries(self):
+ for elt in self.entries_lists.added_files:
+ src_file = os.path.join(self.constant_strings.BASE_NEW, elt)
+ dst_file = os.path.join(self.constant_strings.OUT_DIR, elt)
+
+ if os.path.islink(src_file):
+ logging.debug("File is an added symlink %s" % elt)
+ self.add_file(src_file, dst_file)
+ self.operations_count.sym_new_cnt += 1
+ elif elt in self.old_new_entries_data.new_hardlinks:
+ logging.debug("File is an added hardlink %s" % elt)
+ self.add_file(src_file, dst_file)
+ self.operations_count.hard_new_cnt += 1
+ elif os.path.isdir(src_file): # We create just empty directory here
+ if not os.path.exists(dst_file):
+ os.makedirs(dst_file)
+ logging.debug("New dir created %s" % dst_file)
+ self.operations_count.new_cnt += 1
+ else:
+ self.operations_count.new_cnt += 1
+ destdir = os.path.dirname(dst_file)
+ logging.debug("New files - %s ==> %s" % (src_file, destdir))
+
+ if not os.path.isdir(destdir):
+ try:
+ os.makedirs(destdir)
+ except Exception as e:
+ logging.critical("Error in NEW files DIR entry -%s" % destdir)
+ raise e
+
+ try:
+ if not stat.S_ISFIFO(os.stat(src_file).st_mode):
+ shutil.copy2(src_file, dst_file)
+ logging.debug("New files copied from- %s to- %s" % (src_file, dst_file))
+ except Exception as e:
+ logging.critical("Error in NEW files entry -%s -%s" % (src_file, dst_file))
+ raise e
+
+ for elt in self.entries_lists.added_dirs:
+ dst_dir = os.path.join(self.constant_strings.OUT_DIR, elt)
+ if not os.path.exists(dst_dir):
+ os.makedirs(dst_dir)
+ logging.debug("New dir created %s" % dst_dir)
+ self.operations_count.new_cnt += 1
+
+ def generate_delta_partial(self):
+ tmp_str = "Going from %d files to %d files" % (len(self.old_new_entries_data.old_files), len(self.old_new_entries_data.new_files))
+ print(tmp_str)
+ logging.info(tmp_str)
+
+ # Generate lists of entries
+ self.generate_entries_lists()
+ # What files have changed contents but not name/path?
+ self.generate_changed_files_lists()
+
+ """
+ Patch Section
+ """
+
+ self.process_changed_entries()
+
+ self.process_removed_entries()
+
+ self.process_added_entries()
+
+ logging.info("%d files unchanged" % len(self.entries_lists.unchanged_files))
+ logging.info("%d files added" % len(self.entries_lists.added_files))
+
+ # <PaTcHCoUnT> is not a typo, it is parsed like this in other programs that use deltas
+ tmp_str = "PaTcHCoUnT:Diffs-%d Moves-%d News-%d Delets-%d SymDiffs-%d SymNews-%d HardDiffs-%d HardNews-%d\n" % \
+ (self.operations_count.diff_cnt, self.operations_count.move_cnt, self.operations_count.new_cnt, \
+ self.operations_count.del_cnt, self.operations_count.sym_diff_cnt, self.operations_count.sym_new_cnt, \
+ self.operations_count.hard_diff_cnt, self.operations_count.hard_new_cnt)
+ logging.info(tmp_str)
+ print(tmp_str)
+
+ if self.operations_count.diff_cnt + self.operations_count.move_cnt + self.operations_count.new_cnt + \
+ self.operations_count.del_cnt + self.operations_count.sym_diff_cnt + self.operations_count.sym_new_cnt + \
+ self.operations_count.hard_diff_cnt + self.operations_count.hard_new_cnt == 0:
+ print("No delta generated for %s - %s" % (self.constant_strings.PART_NAME, self.constant_strings.OUT_DIR))
+ logging.info("No delta generated for %s" % self.constant_strings.PART_NAME)
+ shutil.rmtree(self.constant_strings.OUT_DIR)
+
+
+def generate_delta_partial(args):
+ delta_partial_generator = DeltaPartialGenerator(args.PART_NAME, args.BASE_OLD, args.BASE_NEW, args.OUT_DIR)
+ delta_partial_generator.generate_delta_partial()
+
+
+def create_parser():
+ parser = argparse.ArgumentParser(description="Generate a delta of type: DELTA_PARTIAL")
+
+ parser.add_argument("UPDATE_TYPE", type=str, help="TYPE_OF_UPDATE")
+ parser.add_argument("PART_NAME", metavar="PARTITION_NAME", type=str, help="name of partition for which we generate the delta")
+ parser.add_argument("BASE_OLD", type=str, help="older image path / mount point of the older version of the partition")
+ parser.add_argument("BASE_NEW", type=str, help="newer image path / mount point of the newer version of the partition")
+ parser.add_argument("OUT_DIR", type=str, help="directory where generated delta and logs will be placed")
+ parser.add_argument("UPDATE_CFG_PATH", type=str, help="path to config file which will be updated")
+
+ return parser
+
+
+def main():
+ logging.basicConfig(filename=LOGFILE, level=logging.DEBUG)
+
+ try:
+ parser = create_parser()
+ args = parser.parse_args()
+
+ if args.UPDATE_TYPE != DELTA_PARTIAL:
+ parser.error(f"Unknown update type {args.UPDATE_TYPE}!")
+
+ if not (os.path.exists(args.BASE_OLD) and os.path.exists(args.BASE_NEW)):
+ parser.error("Paths to images/mountpoints do not exist!")
+
+ args.UPDATE_CFG_PATH = os.path.join(PARENT_DIR, args.UPDATE_CFG_PATH)
+
+ try:
+ ensure_dir_exists(args.OUT_DIR)
+ except FileExistsError as e:
+ logging.error("Argument passed as OUT_DIR - %s is already an existing file" % args.OUT_DIR)
+ raise e
+
+ start = datetime.datetime.now().time()
+ text = f"Started CreatePartial.py at {start}"
+ logging.info(f"{text:^70}")
+ print(f"{text:^70}")
+ logging.info("Arguments passed: [UpdateType - %s][Part Name - %s] [BaseOld - %s] [BaseNew - %s] \n [OUTPUTDir - %s]" \
+ % (args.UPDATE_TYPE, args.PART_NAME, args.BASE_OLD, args.BASE_NEW, args.OUT_DIR))
+
+ generate_delta_partial(args)
+
+ end = datetime.datetime.now().time()
+ text = f"Done with CreatePartial.py at {end}"
+ logging.info(f"{text:^70}")
+ print(f"{text:^70}")
+ logging.info("Time start [%s] - Time end [%s]" % (start, end))
+ print("Time start [%s] - Time end [%s]" % (start, end))
+
+ except Exception as e:
+ logging.error(f"Usage: {os.path.basename(sys.argv[0])} <Update_Type> <Part_Name> <OLD_Base> <NEW_Base> <OUT_DIR>")
+ raise e
+
+
+if __name__ == "__main__":
+ main()
--- /dev/null
+#!/bin/bash
+
+# Enabling this option is needed to remove patterns from the argument list if
+# there are no matching files. Without this, "touch" would create the file
+# "*.ini" if there was no file that matched.
+shopt -s nullglob
+
+#==================
+# funtions
+#==================
+
+#------------------------------------------------------------------------------
+# Function :
+# fn_set_default_params
+#
+# Description :
+# set default params used globally in this script
+#
+
+fn_set_default_params()
+{
+ CUR_DIR=${PWD}
+ DATA_DIR=./data
+ CFG_DIR=./cfg
+ COMMON_BINDIR=${CUR_DIR}/../common/bin
+ UPDATE_CFG_PATH=./cfg/update.cfg
+ LOG_PATH=./data/Delta.log
+ CHECKSUM_PATH=./data/checksum.md5
+ OLD_TAR_DIR="old_tar"
+ NEW_TAR_DIR="new_tar"
+
+ MONDATE=$(date +%m%d)
+ i=1
+ while [ -d "result/${MONDATE}_${I}" ]
+ do
+ i=$(($i + 1))
+ done
+ MONDATE="${MONDATE}_${i}"
+ echo "MONDATE = ${MONDATE}"
+}
+
+#------------------------------------------------------------------------------
+# Function :
+# fn_mk_delta_partial_core
+#
+fn_mk_delta_partial_core()
+{
+ START_TIMESTAMP="generation of ${DELTA} [START] $(date +%T)"
+ UPDATE_TYPE_S=(${UPDATE_TYPE//:/ })
+
+ echo "untar ${OLD_TAR_FILE}"
+ tar xvf ${OLD_TAR_DIR}/${OLD_TAR_FILE} ${PART_IMG_ORG}
+ if [ "$?" != "0" ]; then
+ return 1;
+ fi
+ mv ${PART_IMG_ORG} ${PART_IMG_OLD}
+
+ echo "untar ${NEW_TAR_FILE}"
+ tar xvf ${NEW_TAR_DIR}/${NEW_TAR_FILE} ${PART_IMG_ORG}
+ if [ "$?" != "0" ]; then
+ return 1;
+ fi
+ mv ${PART_IMG_ORG} ${PART_IMG_NEW}
+
+ #---- check whether the binaries are same ----
+ diff ${PART_IMG_OLD} ${PART_IMG_NEW}
+ if [ "$?" = "0" ]; then
+ echo "[${PART_NAME}] two binaries are same"
+ rm -rf ${PART_IMG_OLD}
+ rm -rf ${PART_IMG_NEW}
+ rm -rf ${OUTPUT_DIR}
+ return 0
+ fi
+
+ #---- make mount point ----
+ mkdir -p ${MNT_PNT_OLD}
+ mkdir -p ${MNT_PNT_NEW}
+
+ #---- mount partition image to mount point ----
+ echo "mount ${PART_IMG_OLD} & ${PART_IMG_NEW}"
+ mount -o loop ${PART_IMG_OLD} ${MNT_PNT_OLD}
+ if [ "$?" != "0" ]; then
+ exit 1;
+ fi
+ mount -o loop ${PART_IMG_NEW} ${MNT_PNT_NEW}
+ if [ "$?" != "0" ]; then
+ exit 1;
+ fi
+
+ # after mounting
+ #---- remove unnecessary files & directories ----
+ for ff in ${EXCLUDE_FILES}
+ do
+ rm -rf ${MNT_PNT_OLD}/${ff}
+ rm -rf ${MNT_PNT_NEW}/${ff}
+ done
+
+ echo "calc checksum for ${MNT_PNT_NEW}"
+ find ${BASE_NEW} -type f -exec md5sum {} \; >> checksum.md5
+ sed -i -e "s/${BASE_NEW}//g" checksum.md5
+
+ sudo python3 ${COMMON_BINDIR}/CreatePartial.py ${UPDATE_TYPE_S} ${PART_NAME} ${BASE_OLD} ${BASE_NEW} ${OUTPUT_DIR} ${UPDATE_CFG_PATH}
+ PythonRet=$?
+
+ #---- unmount partition image ----
+ echo "umount ${MNT_PNT_OLD} & ${MNT_PNT_NEW}"
+ umount ${MNT_PNT_OLD}
+ umount ${MNT_PNT_NEW}
+
+ #---- remove files & directories ----
+ echo "remove temp files/directory"
+ rm -rf ${PART_IMG_OLD}
+ rm -rf ${PART_IMG_NEW}
+ rm -rf ${BASE_OLD} ${BASE_NEW}
+
+ if [ $PythonRet != "0" ]; then
+ echo "[${PART_NAME}] Failed to generate DELTA"
+ exit 1
+ fi
+ END_TIMESTAMP="generation of ${DELTA} [END] $(date +%T)"
+ echo "${START_TIMESTAMP}"
+ echo "${END_TIMESTAMP}"
+
+ return 0
+}
+
+#------------------------------------------------------------------------------
+# Function :
+# fn_mk_delta_partial
+#
+fn_mk_delta_partial()
+{
+ PART_IMG_ORG=${PART_BIN}
+ PART_IMG_OLD=${PART_IMG_ORG}.old
+ PART_IMG_NEW=${PART_IMG_ORG}.new
+
+ DELTA=${PART_BIN}
+
+ BASE_OLD=${PART_NAME}_OLD
+ BASE_NEW=${PART_NAME}_NEW
+
+ EXCLUDE_FILES="lost+found dev proc tmp var sys csa run"
+
+ MNT_PNT_OLD=${BASE_OLD}/
+ MNT_PNT_NEW=${BASE_NEW}/
+
+ fn_mk_delta_partial_core
+ if [ "$?" != "0" ]; then
+ return 1
+ fi
+
+ return 0
+}
+
+#------------------------------------------------------------------------------
+# Function :
+# fn_mk_rootfs_delta
+#
+fn_mk_rootfs_delta()
+{
+ FW_BW=FW_DELTA
+ RESULT_DIR=result/$MONDATE/${FW_BW}
+ DELTA_DIR=${RESULT_DIR}/DELTA
+ mkdir -p ${DELTA_DIR}
+
+ #--- generate delta binary ---
+ cd ${DATA_DIR} || exit 1
+
+ OUTPUT_DIR=${PART_NAME}_OUT
+ if [ ! -d ${OUTPUT_DIR} ]; then
+ if [ -f ${OUTPUT_DIR} ]; then
+ rm -f ${OUTPUT_DIR}
+ fi
+ mkdir ${OUTPUT_DIR}
+ fi
+
+ fn_mk_delta_partial
+
+ cd -
+
+ if [ "$?" != "0" ]; then
+ rm -rf ${RESULT_DIR}/${PART_BIN}
+ echo "Error: Abort Delta Generation"
+ exit 1
+ fi
+
+ mksquashfs ${DATA_DIR}/${OUTPUT_DIR} ${RESULT_DIR}/${PART_BIN}
+ mv ${RESULT_DIR}/${PART_BIN} ${DELTA_DIR}
+
+ rm -rf ${RESULT_DIR}/${PART_BIN}
+
+ #--- move update.cfg to delta directory
+ if [ -r ${UPDATE_CFG_PATH} ]; then
+ cp ${UPDATE_CFG_PATH} ${DELTA_DIR}/update.cfg
+ rm ${UPDATE_CFG_PATH}
+ fi
+
+ if [ -r ${LOG_PATH} ]; then
+ cp ${LOG_PATH} ${RESULT_DIR}/Delta.log
+ rm ${LOG_PATH}
+ fi
+
+ if [ -r ${CHECKSUM_PATH} ]; then
+ cp ${CHECKSUM_PATH} ${DELTA_DIR}/checksum.md5
+ rm ${CHECKSUM_PATH}
+ fi
+
+ #--- archive result directory
+ cd result/$MONDATE
+ echo "tar result directory"
+ tar cf ${MONDATE}_${FW_BW}.tar $FW_BW
+ cd ../..
+
+ echo "Making delta binary is end!"
+
+ cd ${DELTA_DIR}
+ cp ${COMMON_BINDIR}/upgrade-partial.sh ./
+ cp ${COMMON_BINDIR}/verify-partial.sh ./
+
+ # Ensure essential files are at beginning of archive (metadata, scripts and binaries to perform upgrade)
+ touch *.txt *.cfg *.ini *.sh *.inc
+ tar --overwrite -cpf ../delta.tar $(ls -1td *)
+
+ cd -
+
+ return 0
+}
+
+
+###############################################################################
+#==================
+# Main Start
+#==================
+
+# Fixed params for rootfs
+PART_NAME=rootfs
+UPDATE_TYPE=DELTA_PARTIAL:AT_BOOT_FOTA
+PART_BIN=rootfs.img
+
+fn_set_default_params
+
+#--- get old/new tar file which includes rootfs.img ---
+OLD_TAR_FILE=tizen-unified_20241022.045018_tizen-headless-aarch64.tar.gz
+NEW_TAR_FILE=tizen-unified_20241023.121718_tizen-headless-aarch64.tar.gz
+#OLD_TAR_FILE=tizen-unified-x_20241124.213502_tizen-headless-riscv64.tar.gz
+#NEW_TAR_FILE=tizen-unified-x_20241206.234549_tizen-headless-riscv64.tar.gz
+
+#--- check if the current working directory is the parent directory of bin, data, cfg, xml ---
+if [ ! -d ${DATA_DIR} ]; then
+ echo "Invalid working directory. ${DATA_DIR} does not exist"
+ exit 1
+fi
+
+if [ ! -d ${CFG_DIR} ]; then
+ echo "Invalid working directory. ${CFG_DIR} does not exist"
+ exit 1
+fi
+
+
+START_TIME="$(date +%T)"
+
+fn_mk_rootfs_delta
+
+END_TIME="$(date +%T)"
+
+echo "OVERALL TIMESTAMP : [START] ${START_TIME} ~ [END] ${END_TIME}"