Remove UPG directory 66/138866/2
authorSunmin Lee <sunm.lee@samsung.com>
Wed, 24 May 2017 02:40:22 +0000 (11:40 +0900)
committerSunmin Lee <sunm.lee@samsung.com>
Fri, 14 Jul 2017 07:04:10 +0000 (16:04 +0900)
The UPG includes scripts which are used to make delta.
Because there is another repository "tota-upg", it doesn't
need to be duplicated at both repository.

Change-Id: I059c3f149dbb441a322c41978407a66a41cf440e
Signed-off-by: Sunmin Lee <sunm.lee@samsung.com>
UPG/CreatePatch.py [deleted file]
UPG/dzImagescript.sh [deleted file]
UPG/ss_bsdiff [deleted file]
UPG/ss_bspatch [deleted file]
UPG/unpack.sh [deleted file]
packaging/libtota.spec

diff --git a/UPG/CreatePatch.py b/UPG/CreatePatch.py
deleted file mode 100755 (executable)
index f45f3e3..0000000
+++ /dev/null
@@ -1,1137 +0,0 @@
-#!/usr/bin/python
-
-import sys
-
-
-if sys.hexversion < 0x02040000:
-  print >> sys.stderr, "Python 2.4 or newer is required."
-  sys.exit(1)
-
-import sys
-import os
-import filecmp
-import shutil
-import subprocess
-import re
-import ntpath
-import zipfile
-import datetime
-import hashlib
-import operator
-import locale
-import errno
-import logging
-import glob
-import apt
-
-'''
-Diff two folders and create delta using SS_BSDIFF
-Will maintain same format of script that will be generated when we use diffutil
-
-1. Create a list of files in each Base folders,
-2. These files will fall into one these below categories:
-       1) Only in OLD - Should be deleted
-       2) Only in NEW - Should be added or renamed accordingly
-       3) File exists in both directories but contents are different - Create Diff.
-       4) File name is same but TYPE can change (File to Folder, Folder to Link etc.)
-       5) Duplicates in the list of Deletes and News
-       6) Close matching diffs even though name changes across directories. (for matching extension)
-       7) Clearing empty directories after Moves or diffs under Rename.
-
-Current Case
-1. Given two folders, from list of REMOVED and NEW files find if there
-is version change and create diff between them
-
-TODO
-Want to extend the same script for entire DIFF generation and replace TOTAlib.sh file
-Catching errors at all stages. SHOULD exit & return error in case of failure
-'''
-
-def global_paths():
-       global DIFF_UTIL
-       global ZIPUTIL
-       global NEW_FILES_PATH
-       global NEW_FILES_FOLDER
-       global NEW_FILES_ZIP_NAME
-       global SYMLINK_TYPE
-       global ATTR_DOC_EXT
-       global SYMLINK_DOC_NAME
-       global DIFF_PREFIX
-       global DIFF_SUFFIX
-       global SUPPORT_RENAME
-       global NEW_PREFIX
-       global DIFFPATCH_UTIL
-       global SUPPORT_CONTAINERS
-       global FULL_IMG
-       global DELTA_IMG
-       global DELTA_FS
-       global EXTRA
-       global COMMON_BIN_PATH
-       global MEM_REQ
-       global EMPTY
-
-COMMON_BIN_PATH = "../../common/bin/"
-DIFF_UTIL = "./ss_bsdiff"
-DIFFPATCH_UTIL = "./ss_bspatch"
-ZIPUTIL = "p7zip "
-#ZIPUTIL = "7z a system.7z "
-NEW_FILES_PATH = "system"
-NEW_FILES_FOLDER  = "system"
-NEW_FILES_ZIP_NAME = "system.7z"
-SYMLINK_TYPE = "SYM"
-ATTR_DOC_EXT = "_attr.txt"
-SYMLINK_DOC_NAME = "_sym.txt"
-PART_DOC_EXT = ".txt"
-DIFF_PREFIX = "diff"
-DIFF_SUFFIX = ".delta"
-NEW_PREFIX = 'new'
-FULL_IMG = "FULL_IMG"
-DELTA_IMG = "DELTA_IMG"
-DELTA_FS = "DELTA_FS"
-EXTRA = "EXTRA"
-LOGFILE = "Delta.log"
-EMPTY = ""
-MEM_REQ = 0
-
-SUPPORT_RENAME = "TRUE" #Use appropriate name
-SUPPORT_CONTAINERS = "FALSE"
-SUPPORT_DZIMAGE = "TRUE"
-
-TEST_MODE = "FALSE"
-
-def main():
-       logging.basicConfig(filename=LOGFILE, level=logging.DEBUG)
-       global AttributeFile
-       global GenerateDiffAttr
-       try:
-
-               if len(sys.argv) < 5:
-                       sys.exit('Usage: CreatePatch.py UPDATE_TYPE PARTNAME OLDBASE NEWBASE OUTFOLDER')
-               UPDATE_TYPE = sys.argv[1]
-               PART_NAME = sys.argv[2]  # lets make this also optional
-
-               BASE_OLD = sys.argv[3]
-               BASE_NEW = sys.argv[4]
-               OUT_DIR = sys.argv[5]
-               ATTR_OLD = EMPTY
-               ATTR_NEW = EMPTY
-               UPDATE_CFG_PATH = EMPTY
-               GenerateDiffAttr = "FALSE"
-               if UPDATE_TYPE == DELTA_FS:
-                       #instead of arguments check it in outdirectory ?
-                       if len(sys.argv) == 9:
-                               ATTR_OLD = sys.argv[6]
-                               ATTR_NEW = sys.argv[7]
-                               UPDATE_CFG_PATH = '../'+sys.argv[8]
-                               GenerateDiffAttr = "TRUE"
-
-               elif UPDATE_TYPE == DELTA_IMG or UPDATE_TYPE == FULL_IMG:
-                       if len(sys.argv) == 7:
-                               #Use path in better way
-                               UPDATE_CFG_PATH = '../'+sys.argv[6]
-
-               global DIFF_UTIL
-               global DIFFPATCH_UTIL
-               if not (os.path.isfile(DIFF_UTIL) and os.access(DIFF_UTIL, os.X_OK)):
-                       DIFF_UTIL = COMMON_BIN_PATH+DIFF_UTIL
-                       DIFFPATCH_UTIL = COMMON_BIN_PATH+DIFFPATCH_UTIL
-                       if not (os.path.isfile(DIFF_UTIL) and os.access(DIFF_UTIL, os.X_OK)):
-                               print >> sys.stderr, "Diff Util Does NOT exist -- ABORT"
-                               logging.info ('Diff Util Does NOT exist -- ABORT')
-                               sys.exit(1)
-
-               start = datetime.datetime.now().time()
-               logging.info('*************** ENTERED PYTHON SCRIPT *****************')
-               logging.info('Arguments Passed: [UpdateType - %s][Part Name - %s] [BaseOld - %s]  [BaseNew - %s] \n [OUTPUTDir - %s] [BASE ATTR - %s] [TARGET ATTR - %s]'% (UPDATE_TYPE, PART_NAME, BASE_OLD, BASE_NEW, OUT_DIR, ATTR_OLD, ATTR_NEW))
-
-               ensure_dir_exists(OUT_DIR)
-               if GenerateDiffAttr == "TRUE":
-                       if not (os.path.isfile(ATTR_OLD) and os.path.isfile(ATTR_NEW)):
-                               print >> sys.stderr, "Attributes missing -- ABORT"
-                               sys.exit(1)
-
-
-               # Should check if APT is supported on other linux flavours
-               cache = apt.Cache()
-               if cache['p7zip'].is_installed and cache['attr'].is_installed and cache['tar'].is_installed:
-                       logging.info ('Basic utils installed')
-
-               if UPDATE_TYPE == FULL_IMG:
-                       SS_mk_full_img(BASE_OLD, BASE_NEW, OUT_DIR, PART_NAME, UPDATE_CFG_PATH)
-               elif UPDATE_TYPE == DELTA_IMG:
-                       SS_mk_delta_img(BASE_OLD, BASE_NEW, OUT_DIR, PART_NAME, UPDATE_CFG_PATH)
-               elif UPDATE_TYPE == DELTA_FS:
-                       AttributeFile = ATTR_NEW
-                       ATTR_FILE = OUT_DIR+'/'+PART_NAME+ATTR_DOC_EXT
-                       Diff_AttrFiles(ATTR_OLD, ATTR_NEW, ATTR_FILE)
-                       Old_files, Old_dirs = Get_Files(BASE_OLD)
-                       New_files, New_dirs = Get_Files(BASE_NEW)
-                       SS_Generate_Delta(PART_NAME, BASE_OLD, Old_files, Old_dirs, BASE_NEW, New_files, New_dirs, OUT_DIR, ATTR_FILE)
-
-                       if not UPDATE_CFG_PATH == EMPTY:
-                               SS_update_cfg(PART_NAME, UPDATE_CFG_PATH)
-
-
-               elif UPDATE_TYPE == EXTRA:
-                       print('UPDATE_TYPE ---- EXTRA')
-               else:
-                       print('UPDATE_TYPE ---- UNKNOWN FORMAT')
-
-               if GenerateDiffAttr == "TRUE":
-                       os.remove(ATTR_OLD)
-                       os.remove(ATTR_NEW)
-               end = datetime.datetime.now().time()
-
-               logging.info('Max Memory requried to upgrade [%s] is [%d]' % (PART_NAME, MEM_REQ))
-               logging.info('*************** DONE WITH PYTHON SCRIPT ***************')
-               logging.info('Time start [%s] - Time end [%s]' % (start, end))
-               print('Done with [%s][%d] ---- Time start [%s] - Time end [%s]' % (PART_NAME, MEM_REQ, start, end))
-       except:
-               logging.error('Usage: {} <Update_Type> <Part_Name> <OLD_Base> <NEW_Base> <OUT_DIR>'.format(os.path.basename(sys.argv[0])))
-               raise
-
-
-def SS_update_cfg(DELTA_BIN, UPDATE_CFG_PATH):
-       f = open(UPDATE_CFG_PATH, 'r')
-       lines = f.readlines()
-       f.close()
-       f = open(UPDATE_CFG_PATH, 'w')
-       for line in lines:
-               ConfigItems = line.split()
-               if ConfigItems[0] == DELTA_BIN:
-                       DELTA = ConfigItems[1]
-                       logging.info ('Updating %s config' % DELTA_BIN)
-                       line = line.rstrip('\n')
-                       Value = MEM_REQ
-                       line = line.replace(line, line+'\t'+str(Value)+'\n')
-                       f.write(line)
-               else:
-                       f.write(line)
-       f.close()
-
-def SS_mk_delta_img(BASE_OLD, BASE_NEW, OUT_DIR, DELTA_BIN, UPDATE_CFG_PATH):
-       #for sizes
-
-       ZIMAGE_SCRIPT = COMMON_BIN_PATH+'./dzImagescript.sh'
-       ZIMAGE_OLD = BASE_OLD+'_unpacked'
-       ZIMAGE_NEW = BASE_NEW+'_unpacked'
-       DZIMAGE_HEADER = 'UnpackdzImage'
-       DZIMAGE_SEP = ':'
-
-       oldsize_d= os.path.getsize(BASE_OLD)
-       newsize_d= os.path.getsize(BASE_NEW)
-       SHA_BIN_DEST= hash_file(BASE_NEW)
-       SHA_BIN_BASE=hash_file(BASE_OLD)
-
-       #incase UPDATE CFG is empty
-       DELTA = DELTA_BIN
-       SS_UpdateSize(BASE_OLD, BASE_NEW)
-       #Should throw error if PART NAME NOT found??
-       if not UPDATE_CFG_PATH == EMPTY:
-               f = open(UPDATE_CFG_PATH, 'r')
-               lines = f.readlines()
-               f.close()
-               f = open(UPDATE_CFG_PATH, 'w')
-               for line in lines:
-                       ConfigItems = line.split()
-                       if ConfigItems[0] == DELTA_BIN:
-                               logging.info ('Updating %s config' % DELTA_BIN)
-                               DELTA = ConfigItems[1]
-                               line = line.rstrip('\n')
-                               line = line.replace(line, line+'\t'+str(oldsize_d)+'\t\t'+str(newsize_d)+'\t\t'+str(SHA_BIN_BASE)+'\t\t'+str(SHA_BIN_DEST)+'\n')
-                               f.write(line)
-                       else:
-                               f.write(line)
-               f.close()
-
-       #Any validation checks required?
-       if (DELTA_BIN == "zImage" or DELTA_BIN == "dzImage" or DELTA_BIN == "KERNEL" or DELTA_BIN == "BOOT") and SUPPORT_DZIMAGE == "TRUE":
-
-               #Unpack Old and New Images for creating delta
-               subprocess.call([ZIMAGE_SCRIPT, '-u', BASE_OLD])
-               subprocess.call([ZIMAGE_SCRIPT, '-u', BASE_NEW])
-
-               DeltaFiles = []
-               Old_files, Old_dirs = Get_Files(ZIMAGE_OLD)
-               New_files, New_dirs = Get_Files(ZIMAGE_NEW)
-
-               patchLoc = '%s/%s_temp' % (OUT_DIR, DELTA_BIN)
-               ensure_dir_exists(patchLoc)
-
-               for elt in New_files:
-                       if elt in Old_files:
-                               src_file = ZIMAGE_OLD+'/'+elt
-                               dst_file = ZIMAGE_NEW+'/'+elt
-                               if not filecmp.cmp(src_file, dst_file):
-                                       patch = '%s/%s' % (patchLoc,elt)
-                                       DeltaFiles.append(patch)
-                                       subprocess.call([DIFF_UTIL,src_file,dst_file,patch])
-                                       logging.info('Make dz Image %s <--> %s ==> %s %s' % (src_file, dst_file , DELTA_BIN, patch))
-
-               #Append all delta files to make image.delta
-
-               #HEADER FORMAT MAGICNAME:FILECOUNT:[FILENAME:FILESIZE:][FILECONTENT/S]
-               HeaderStr = DZIMAGE_HEADER+DZIMAGE_SEP+'%d' % len(DeltaFiles)
-               HeaderStr = HeaderStr+DZIMAGE_SEP
-
-               with open(OUT_DIR+'/'+DELTA, 'w') as DeltaFile:
-                       for fname in DeltaFiles:
-                               DeltaSize = os.path.getsize(fname)
-                               HeaderStr = HeaderStr+path_leaf(fname)+DZIMAGE_SEP+'%d' % DeltaSize
-                               HeaderStr = HeaderStr+DZIMAGE_SEP
-                       #Using 128 bytes as max Header.
-                       logging.info('zImage Header - %s' % HeaderStr.ljust(128,'0'))
-                       DeltaFile.write(HeaderStr.ljust(128,'0'))
-                       for fname in DeltaFiles:
-                               with open(fname) as infile:
-                                       DeltaFile.write(infile.read())
-                                       infile.close()
-
-               DeltaFile.close()
-               shutil.rmtree(patchLoc)
-               shutil.rmtree(ZIMAGE_OLD)
-               shutil.rmtree(ZIMAGE_NEW)
-               #Do we need to incorprate Max memory required for backup??
-
-       else:
-               patchLoc = '%s/%s' % (OUT_DIR, DELTA)
-               subprocess.call([DIFF_UTIL,BASE_OLD,BASE_NEW,patchLoc])
-               logging.info('Make Delta Image %s <--> %s ==> %s %s' % (BASE_OLD, BASE_NEW , DELTA_BIN, patchLoc))
-
-
-
-def    SS_mk_full_img(BASE_OLD, BASE_NEW, OUT_DIR, DELTA_BIN ,UPDATE_CFG_PATH):
-       logging.info('Make Full Image %s <--> %s ==> %s' % (BASE_OLD, BASE_NEW ,DELTA_BIN))
-       oldsize_d= os.path.getsize(BASE_OLD)
-       newsize_d= os.path.getsize(BASE_NEW)
-       SHA_BIN_DEST= hash_file(BASE_NEW)
-       SHA_BIN_BASE=hash_file(BASE_OLD)
-       #echo -e "\t${oldsize_d}\t\t${newsize_d}\t\t${SHA_BIN_BASE}\t\t${SHA_BIN_DEST}" >> ${DATA_DIR}/update_new.cfg
-       SS_UpdateSize(BASE_OLD, BASE_NEW)
-
-       if not UPDATE_CFG_PATH == EMPTY:
-               f = open(UPDATE_CFG_PATH, 'r')
-               lines = f.readlines()
-               f.close()
-               f = open(UPDATE_CFG_PATH, 'w')
-               for line in lines:
-                       ConfigItems = line.split()
-                       if ConfigItems[0] == DELTA_BIN:
-                               logging.info ('Updating %s config' % DELTA_BIN)
-                               DELTA = ConfigItems[1]
-                               line = line.rstrip('\n')
-                               line = line.replace(line, line+'\t'+str(oldsize_d)+'\t\t'+str(newsize_d)+'\t\t'+str(SHA_BIN_BASE)+'\t\t'+str(SHA_BIN_DEST)+'\n')
-                               f.write(line)
-                       else:
-                               f.write(line)
-               f.close()
-
-def zipdir(path, zip):
-    for root, dirs, files in os.walk(path):
-        for file in files:
-            zip.write(os.path.join(root, file))
-
-def ensure_dir_exists(path):
-       if not os.path.exists(path):
-               os.makedirs(path)
-               #shutil.rmtree(path)
-       #os.makedirs(path)
-
-
-def path_leaf(path):
-    head, tail = ntpath.split(path) #This is for windows?? Recheck
-    return tail
-
-def path_head(path):
-    head, tail = ntpath.split(path)
-    return head
-
-def difflines(list1, list2):
-    c = set(list1).union(set(list2))
-    d = set(list1).intersection(set(list2))
-    return list(c-d)
-
-#Creating Diff between OLD and NEW attribute files v12
-def Diff_AttrFiles(ATTR_OLD, ATTR_NEW, ATTR_FILE):
-       if GenerateDiffAttr == "FALSE":
-               return
-       with open(ATTR_OLD, 'r') as f_old:
-               lines1 = set(f_old.read().splitlines())
-
-       with open(ATTR_NEW, 'r') as f_new:
-               lines2 = set(f_new.read().splitlines())
-
-       lines = difflines(lines2, lines1)
-       with open(ATTR_FILE, 'w+') as file_out:
-               for line in lines:
-                       if line not in lines1:
-                               logging.info('Diff_AttrFiles - %s' % line)
-                               file_out.write(line+'\n')
-
-       f_new.close()
-       f_old.close()
-       file_out.close()
-
-
-
-def Update_Attr(RequestedPath, Type, File_Attibutes, Sym_Attibutes):
-       #Full File Path should MATCH
-       if GenerateDiffAttr == "FALSE":
-               return
-       FilePath = '"/'+RequestedPath+'"'
-       #print ('FilePath - %s'% (FilePath))
-       with open(AttributeFile) as f:
-               for line in f:
-                       if FilePath in line:
-                               if Type == SYMLINK_TYPE:
-                                       Sym_Attibutes.append(line)
-                               else:
-                                       File_Attibutes.append(line)
-
-
-'''This function returns the SHA-1 hash of the file passed into it'''
-def hash_file(filename):
-
-   # make a hash object
-   h = hashlib.sha1()
-
-   # open file for reading in binary mode
-   with open(filename,'rb') as file:
-       # loop till the end of the file
-       chunk = 0
-       while chunk != b'':
-           # read only 1024 bytes at a time
-           chunk = file.read(1024*1024)
-           h.update(chunk)
-
-   # return the hex representation of digest
-   return h.hexdigest()
-
-def find_dupes_dir(BASE_OLD, BASE_NEW):
-       dups = {}
-       fdupes = {}
-       print('Finding Duplicates in - %s %s' % (BASE_OLD, BASE_NEW))
-       logging.info('Finding Duplicates in - %s %s' % (BASE_OLD, BASE_NEW))
-       for rootbase, subdirsB, fileListB in os.walk(BASE_OLD):
-               #print('Scanning %s...' % rootbase)
-               for filename in fileListB:
-                       path = os.path.join(rootbase, filename)
-                       if os.path.islink(path):
-                               continue
-                       # Calculate hash
-                       file_hash = hash_file(path)
-                       dups[file_hash] = path
-
-       for roottarget, subdirsT, fileListT in os.walk(BASE_NEW):
-               #print('Scanning %s...' % roottarget)
-               for filename in fileListT:
-                       # Get the path to the file
-                       path = os.path.join(roottarget, filename)
-                       if os.path.islink(path):
-                               continue
-                       # Calculate hash
-                       file_hash = hash_file(path)
-                       # Add or append the file path
-                       if file_hash in dups:
-                               BaseStr = dups.get(file_hash)
-                               Baseloc = path.find('/')
-                               TarLoc = BaseStr.find('/')
-                               if not path[Baseloc:] == BaseStr[TarLoc:]:
-                                       logging.info('Dupes - %s ==> %s' % (path[Baseloc:], BaseStr[TarLoc:]))
-                                       fdupes[path] = BaseStr
-       logging.info('Total Duplicate files %d' % (len(fdupes)))
-       return fdupes
-
-
-def find_dupes_list(BASE_OLD, BASE_NEW, fileListB, fileListT):
-       dups = {}
-       fdupes = {}
-       print('Finding Duplicates in - %s %s' % (BASE_OLD, BASE_NEW))
-
-       for filename in fileListB:
-               Src_File = BASE_OLD+'/'+filename
-               if os.path.islink(Src_File) or os.path.isdir(Src_File):
-                       continue
-               # Calculate hash
-               file_hash = hash_file(Src_File)
-               dups[file_hash] = Src_File
-
-
-       for filename in fileListT:
-               Dest_File = BASE_NEW+'/'+filename
-               if os.path.islink(Dest_File) or os.path.isdir(Dest_File):
-                       continue
-               # Calculate hash
-               file_hash = hash_file(Dest_File)
-               if file_hash in dups:
-                       BaseStr = dups.get(file_hash)
-                       Baseloc = BaseStr.find('/')
-                       if not BaseStr[Baseloc:] == filename:
-                               #print('Dupes - %s ==> %s' % (BaseStr[Baseloc:], filename))
-                               fdupes[BaseStr] = filename
-
-       logging.info('Total Duplicate files %d' % (len(fdupes)))
-       return fdupes
-
-def SS_UpdateSize(src_file, dst_file):
-       global MEM_REQ
-       oldsize_d= os.path.getsize(src_file)
-       newsize_d= os.path.getsize(dst_file)
-       if oldsize_d >= newsize_d:
-               Max = newsize_d
-       else:
-               Max = oldsize_d
-       if MEM_REQ < Max:
-               MEM_REQ = Max
-
-
-
-def SS_Generate_Delta(PART_NAME, BASE_OLD, Old_files, Old_dirs, BASE_NEW, New_files, New_dirs, OUT_DIR, ATTR_FILE):
-       print('Going from %d files to %d files' % (len(Old_files), len(New_files)))
-       logging.info('Going from %d files to %d files' % (len(Old_files), len(New_files)))
-
-       # First let's fill up these categories
-       files_new = []
-       files_removed = []
-       Dir_removed = []
-       Dir_Added = []
-       files_changed = []
-       files_unchanged = []
-       files_renamed = []
-       File_Attibutes = []
-       Sym_Attibutes = []
-
-       files_Del_List = {}
-       files_New_List = {}
-       MyDict_Patches = {}
-
-
-
-       PWD = os.getcwd()
-
-       # Generate NEW List
-       for elt in New_files:
-               if elt not in Old_files:
-                       files_new.append(elt)
-                       logging.info('New files %s' % elt)
-
-       # Generate Delete List
-       for elt in Old_files:
-               if elt not in New_files:
-                       # Cant we just append it here only if this is NOT a directory???? so that we have list of removed files ONLY. including directories
-                       files_removed.append(elt)
-                       logging.info('Old files %s' % elt)
-
-
-       for elt in Old_dirs:
-               #print('List of Old Dirs %s' % elt)
-               if elt not in New_dirs:
-                       Dir_removed.append(elt)
-                       #print('Old Dirs %s' % elt+'/')
-
-       for elt in New_dirs:
-               if elt not in Old_dirs:
-                       Dir_Added.append(elt)
-               #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-
-       # What files have changed contents but not name/path?
-       for elt in New_files:
-               if elt in Old_files:
-                       #Both are symbolic linkes and they differ
-                       src_file = BASE_OLD+'/'+elt
-                       dst_file = BASE_NEW+'/'+elt
-                       #print('Files Changed - %s -%s' % (src_file,dst_file))
-                       if os.path.islink(src_file) and os.path.islink(dst_file):
-                               if not os.readlink(src_file) == os.readlink(dst_file):
-                                       files_changed.append(elt)
-                                       #print('%d Sym link files changed' % len(files_changed))
-                                       logging.info('Sym links Changed - %s' % elt)
-                               else:
-                                       files_unchanged.append(elt)
-                       #Both are Normal files and they differ. (Is file returns true in case of symlink also, so additional check to find either of the file is symlink)
-                       elif (not os.path.islink(src_file) or os.path.islink(dst_file)) and os.path.isfile(src_file) and os.path.isfile(dst_file):
-                               if not filecmp.cmp(src_file, dst_file):
-                                       files_changed.append(elt)
-                                       #print('%d Normal files changed' % len(files_changed))
-                                       #print('Files Changed - %s' % elt)
-                               else:
-                                       files_unchanged.append(elt)
-                       #File types differ between BASE and TARGET
-                       else:
-                               logging.info('Files are of diff types but same names  Src- %s Des- %s' % (src_file, dst_file))
-                               #Both file types have changed and they differ
-                               #Case 1: First Delete the OLD entry file type (Be it anything)
-                               #Processing and updating partition txt file will be done under REMOVED case and NEW files case accordingly, we just make an entry here
-                               files_removed.append(elt)
-                               files_new.append(elt)
-
-
-
-       #Currently if Version or number is the first character of the file, then we are NOT making any diffs.
-       if SUPPORT_RENAME == "TRUE":
-               for elt in files_removed:
-                       if os.path.isfile(BASE_OLD+'/'+elt):
-                               FileName = path_leaf(elt)
-                               entries = re.split('[0-9]' , FileName)
-                               #Gives the STRING part of NAME. if name starts with version then later part wil b string
-                               #print('Entires under removed list after split - %s %s - %s' % (FileName, entries[0], elt))
-                               #If version is starting at the begining of the string?? shd we hav additional check for such cases??
-                               if len(entries[0]) > 0:
-                                       files_Del_List.update({entries[0]: elt})
-
-               for elt in files_new:
-                       if os.path.isfile(BASE_NEW+'/'+elt):
-                               FileName = path_leaf(elt)
-                               entries = re.split('[0-9]' , FileName)
-                               #print('Entires under NEWfiles list after split  - %s %s - %s' % (FileName, entries[0], elt))
-                               if len(entries[0]) > 0:
-                                       files_New_List.update({entries[0]: elt})
-
-               for key, value in files_Del_List.iteritems():
-                       #print('Key value pair -%s -%s' % (key, value))
-                       if key in files_New_List:
-                               # this file is the same name in both!
-                               src_file = BASE_OLD+'/'+value
-                               dst_file = BASE_NEW+'/'+files_New_List[key]
-                               olddirpath = path_head(files_New_List[key])
-                               newdirpath = path_head(value)
-                               if os.path.islink(src_file) or os.path.islink(dst_file):
-                                       logging.debug('Cannot diff as one of them is Symlink')
-                               else:
-                                       #Pick the best diff of same type and diff names
-                                       files_renamed.append([files_New_List[key], value])
-                                       files_removed.remove(value)
-                                       files_new.remove(files_New_List[key])
-
-       '''
-       Patch Section
-               Partition.txt contains Protocol for UPI
-               Types Supported: DIFFS, MOVES, NEWS, DELETES, SYMDIFFS, SYMNEWS.
-       '''
-       Sym_Diff_Cnt = 0
-       Sym_New_Cnt = 0;
-       Del_Cnt = 0
-       New_Cnt = 0
-       Diff_Cnt = 0
-       Move_Cnt = 0
-
-       SymLinkDoc = OUT_DIR+'/'+PART_NAME+SYMLINK_DOC_NAME
-       Partition_Doc = open(OUT_DIR+'/'+PART_NAME+'.txt','w')
-       Partition_Doc_SymLinks = open(SymLinkDoc,'w')
-
-       print("writing diff'ed changed files...")
-       for elt in files_changed:
-               dst_file = BASE_NEW+'/'+elt
-               src_file = BASE_OLD+'/'+elt
-               #Both files are symbolic links and they differ
-               if os.path.islink(dst_file) and os.path.islink(src_file):
-               #Both are symlinks and they differ
-                       logging.debug(' File Changed is Link %s ' % dst_file)
-                       patch = os.readlink(dst_file)
-                       Sym_Diff_Cnt = Sym_Diff_Cnt + 1
-                       Partition_Doc_SymLinks.write('SYM:DIFF:%s:%s:%s\n' % (elt, elt, patch))
-                       Update_Attr(elt, "SYM", File_Attibutes, Sym_Attibutes)
-               #Both are NORMAL files and they differ
-               elif (not os.path.islink(src_file) or os.path.islink(dst_file)) and os.path.isfile(dst_file) and os.path.isfile(src_file):
-                       #Both are files and they differ
-                       Diff_Cnt = Diff_Cnt + 1
-                       patchName = (DIFF_PREFIX+'%d_%s'+DIFF_SUFFIX) % (Diff_Cnt, path_leaf(elt))
-                       patchLoc = '%s/%s' % (OUT_DIR, patchName)
-                       logging.debug(' File Differ %s %s' % (src_file, dst_file))
-                       SS_UpdateSize(src_file, dst_file)
-                       if SUPPORT_CONTAINERS == "TRUE":
-                               if src_file.endswith('.zip') and dst_file.endswith('.zip'):
-                                       FORMAT = "ZIP"
-                                       Partition_Doc.write('DIFF:ZIP:%s:%s:%s:%s:%s/\n' % (elt, elt, hash_file(src_file), hash_file(dst_file), patchName))
-                                       compute_containerdelta(src_file, dst_file, FORMAT, OUT_DIR+'/'+patchName, Partition_Doc)
-                               elif src_file.endswith('.tpk') and dst_file.endswith('.tpk'):
-                                       FORMAT = "TPK"
-                                       Partition_Doc.write('DIFF:TPK:%s:%s:%s:%s:%s/\n' % (elt, elt, hash_file(src_file), hash_file(dst_file), patchName))
-                                       compute_containerdelta(src_file, dst_file, FORMAT, OUT_DIR+'/'+patchName, Partition_Doc)
-                               else:
-                                       FORMAT = "REG"
-                                       Partition_Doc.write('DIFF:REG:%s:%s:%s:%s:%s\n' % (elt, elt, hash_file(src_file), hash_file(dst_file), patchName))
-                                       subprocess.call([DIFF_UTIL,src_file,dst_file,patchLoc])
-                       else:
-                               FORMAT = "REG"
-                               Partition_Doc.write('DIFF:REG:%s:%s:%s:%s:%s\n' % (elt, elt, hash_file(src_file), hash_file(dst_file), patchName))
-                               subprocess.call([DIFF_UTIL,src_file,dst_file,patchLoc])
-                       Update_Attr(elt, "FILE", File_Attibutes, Sym_Attibutes)
-               #Both differ but they are of diff types
-               else:
-                       #Processing and updating partition txt file will be done under REMOVED case and NEW files case accordingly, we just make an entry here
-                       files_removed.append(elt)
-                       files_new.append(elt)
-
-       fdupes = find_dupes_list(BASE_OLD, BASE_NEW, files_removed, files_new)
-       for oldpath, newpath in fdupes.iteritems():
-               logging.info('Dupes %s -> %s' % (oldpath, newpath))
-
-       for elt in files_removed:
-               src_file = BASE_OLD+'/'+elt
-               if src_file in fdupes.keys():
-                       dst_file = BASE_NEW+'/'+ fdupes[src_file]
-                       logging.debug(' File Moved %s ==> %s' % (src_file, dst_file))
-                       Move_Cnt = Move_Cnt + 1
-                       Partition_Doc.write('MOVE:REG:%s:%s:%s\n' % (elt, fdupes[src_file], hash_file(src_file)))
-                       #directories should b taken care?? +++++++++++++++++++++++++++++++++++++ PARENT DIREC if not present etc
-                       files_removed.remove(elt)
-                       files_new.remove(fdupes[src_file])
-
-       #Should be placed after removing duplicates, else they will be filtered here.
-       # loop shd b for all NEW files, rather than for all delete files ??
-       DelList = files_removed[:]
-       NewList = files_new[:]
-       for new_file in NewList:
-               if os.path.islink(BASE_NEW+'/'+new_file):
-                       continue
-               elif os.path.isdir(BASE_NEW+'/'+new_file):
-                       continue
-               else:# os.path.isfile(BASE_NEW+'/'+new_file):
-                       DirPathNew = path_head(new_file)
-                       FileNameNew = path_leaf(new_file)
-                       DiffSize = 0
-                       winning_patch_sz = os.path.getsize(BASE_NEW+'/'+new_file)
-                       winning_file = ''
-               for del_file in DelList:
-                       #print '+++++'
-                       if os.path.islink(BASE_OLD+'/'+del_file):
-                               continue
-                       elif os.path.isdir(BASE_OLD+'/'+del_file):
-                               continue
-                       else: #if os.path.isfile(BASE_OLD+'/'+del_file):
-                               FileNameOld = path_leaf(del_file)
-                               if (FileNameOld.startswith(FileNameNew[:len(FileNameNew)/2]) and (os.path.splitext(FileNameNew)[1] == os.path.splitext(del_file)[1])):
-                                       #winning_patch_sz = 0.9 * os.path.getsize(BASE_NEW+'/'+new_file)
-                                       #logging.debug('I can compute diff between %s %s' % (del_file, new_file))
-                                       DiffSize = measure_two_filediffs(BASE_OLD+'/'+del_file, BASE_NEW+'/'+new_file)
-                                       if (DiffSize < 0.8 * winning_patch_sz):
-                                               winning_patch_sz = DiffSize
-                                               winning_file = del_file
-               if len(winning_file) > 0:
-                       logging.debug('Best Pick -%s ==> %s [%d]' % (winning_file, new_file, DiffSize))
-                       files_renamed.append([new_file, winning_file])
-                       DelList.remove(winning_file)
-                       files_removed.remove(winning_file)
-                       files_new.remove(new_file)
-
-       #********************** SHOULD CHECK THIS LOGIC ***********************
-
-       if SUPPORT_RENAME == "TRUE":
-               for elt in files_renamed:
-                       src_file = BASE_OLD+'/'+elt[1]
-                       dst_file = BASE_NEW+'/'+elt[0]
-                       Diff_Cnt = Diff_Cnt + 1
-                       patchName = (DIFF_PREFIX+'%d_%s'+DIFF_SUFFIX) % (Diff_Cnt, path_leaf(elt[1]))
-                       #patchName = (DIFF_PREFIX+'_%s'+DIFF_SUFFIX) % (path_leaf(elt[0]))
-                       patchLoc = '%s/%s' % (OUT_DIR, patchName)
-                       logging.debug(' File Renamed %s ==> %s' % (src_file, dst_file))
-                       # Should be careful of renaming files??
-                       # Should we consider measure_two_filediffs ?? so that patch size is NOT greater than actual file?
-                       # What if folder path has numerics??
-
-                       if  os.path.isdir(src_file) or os.path.isdir(dst_file):
-                               #This case never occurs??
-                               Partition_Doc.write('"%s" and "%s" renamed 0 0\n' % (elt[0], elt[1]))
-                               Update_Attr(elt[0], "FILE", File_Attibutes, Sym_Attibutes)
-                       else: #Make sure these files are PROPER and they shd NOT be symlinks
-                               if filecmp.cmp(src_file, dst_file):
-                                       Move_Cnt = Move_Cnt + 1
-                                       Diff_Cnt = Diff_Cnt - 1
-                                       Partition_Doc.write('MOVE:REG:%s:%s:%s\n' % (elt[1], elt[0], hash_file(src_file)))
-                               elif SUPPORT_CONTAINERS == "TRUE":
-                                       if src_file.endswith('.zip') and dst_file.endswith('.zip'):
-                                               FORMAT = "ZIP"
-                                               Partition_Doc.write('DIFF:ZIP:%s:%s:%s:%s:%s/\n' % (elt[1], elt[0], hash_file(src_file), hash_file(dst_file), patchName))
-                                               compute_containerdelta(src_file, dst_file, FORMAT, OUT_DIR+'/'+patchName, Partition_Doc)
-                                       elif src_file.endswith('.tpk') and dst_file.endswith('.tpk'):
-                                               FORMAT = "TPK"
-                                               Partition_Doc.write('DIFF:TPK:%s:%s:%s:%s:%s/\n' % (elt[1], elt[0], hash_file(src_file), hash_file(dst_file), patchName))
-                                               compute_containerdelta(src_file, dst_file, FORMAT, OUT_DIR+'/'+patchName, Partition_Doc)
-                                       else:
-                                               FORMAT = "REG"
-                                               Partition_Doc.write('DIFF:REG:%s:%s:%s:%s:%s\n' % (elt[1], elt[0], hash_file(src_file), hash_file(dst_file), patchName))
-                                               subprocess.call([DIFF_UTIL,src_file,dst_file,patchLoc])
-                               else:
-                                       FORMAT = "REG"
-                                       Partition_Doc.write('DIFF:REG:%s:%s:%s:%s:%s\n' % (elt[1], elt[0], hash_file(src_file), hash_file(dst_file), patchName))
-                                       subprocess.call([DIFF_UTIL,src_file,dst_file,patchLoc])
-
-                               SS_UpdateSize(src_file, dst_file)
-                               Update_Attr(elt[0], "FILE", File_Attibutes, Sym_Attibutes)
-
-
-       for elt in files_removed:
-               #if files are part of patches after renaming, we shd remove them as part of removed.
-               src_file = BASE_OLD+'/'+elt
-               if os.path.islink(src_file):
-                       Partition_Doc.write('DEL:SYM:%s\n' % (elt))
-               elif os.path.isdir(src_file):
-                       Partition_Doc.write('DEL:DIR:%s\n' % (elt))
-               else:
-                       Partition_Doc.write('DEL:REG:%s:%s\n' % (elt, hash_file(src_file)))
-               logging.debug(' File Deleted %s' % src_file)
-               Del_Cnt = Del_Cnt + 1
-
-       for elt in Dir_removed:
-               #if Dir is empty, add it to the removed list.
-               src_file = BASE_OLD+'/'+elt
-               #Irrespective of weather files are MOVED or DIFF'ed, we can delete the folders. This action can be performed at the end.
-               #It covers symlinks also, as NEW symlinks cannot point to NON existant folders of TARGET (NEW binary)
-               if os.path.isdir(src_file):
-                       Partition_Doc.write('DEL:END:%s\n' % (elt))
-                       Del_Cnt = Del_Cnt + 1
-                       logging.debug(' Dir Deleted- %s' % src_file)
-
-
-       for elt in files_new:
-               dst_file = BASE_NEW+'/'+elt
-               newfiles_dest_path = 'system/'
-               ensure_dir_exists(newfiles_dest_path)
-               if os.path.islink(dst_file):
-                       patch = os.readlink(dst_file)
-                       logging.debug(' File New Links %s' % elt)
-                       Partition_Doc_SymLinks.write('SYM:NEW:%s:%s\n' % (elt, patch))
-                       #What if this is only a new sym link and folder already exists??? Should recheck
-                       destpath = newfiles_dest_path + elt
-                       if not os.path.exists(path_head(destpath)):
-                               os.makedirs(path_head(destpath))
-                               logging.info('New SymLink - Adding missing Dir')
-                       #Update_Attr(elt, "SYM", File_Attibutes, Sym_Attibutes)
-                       Sym_New_Cnt = Sym_New_Cnt + 1
-               elif os.path.isdir(dst_file): # We create just empty directory here
-                       destpath = newfiles_dest_path + elt
-                       if not os.path.exists(destpath):
-                               os.makedirs(destpath)
-                               logging.debug(' File New Dir %s' % destpath)
-                               New_Cnt = New_Cnt + 1
-               else:
-                       New_Cnt = New_Cnt + 1
-                       #newfiles_dest_path = OUT_DIR + '/system/'
-                       destpath = newfiles_dest_path + elt
-                       destdir = os.path.dirname(destpath)
-                       logging.debug('New files - %s ==> %s' % (dst_file, destdir))
-
-                       if not os.path.isdir(destdir):
-                               try:
-                                       os.makedirs(destdir)
-                               except:
-                                       logging.critical('Error in NEW files DIR entry -%s' % destdir)
-                                       raise
-
-                       try:
-                               shutil.copy2(dst_file, destpath)
-                               logging.debug('New files copied from- %s to- %s' % (dst_file, destpath))
-                       except:
-                               logging.critical('Error in NEW files entry -%s -%s' % (dst_file, destpath))
-                               raise
-
-       for elt in Dir_Added:
-               newfiles_dest_path = 'system/'
-               ensure_dir_exists(newfiles_dest_path)
-               destpath = newfiles_dest_path + elt
-               if not os.path.exists(destpath):
-                       os.makedirs(destpath)
-                       logging.debug(' DirList New Dir %s' % destpath)
-                       New_Cnt = New_Cnt + 1
-
-       #Base directory should be system
-       print 'Compressing New files'
-       if (New_Cnt > 0):
-               WorkingDir = os.getcwd()
-               os.chdir(os.getcwd()+"/"+NEW_FILES_PATH)
-               logging.info('Curr Working Dir - %s' % os.getcwd())
-               os.system(ZIPUTIL+NEW_FILES_PATH+" >> " + LOGFILE)
-               shutil.move(NEW_FILES_ZIP_NAME, WorkingDir+"/"+OUT_DIR)
-               #New file size?? cos, we extract system.7z from delta.tar and then proceed with decompression
-               SS_UpdateSize(WorkingDir+"/"+OUT_DIR+"/"+NEW_FILES_ZIP_NAME, WorkingDir+"/"+OUT_DIR+"/"+NEW_FILES_ZIP_NAME)
-               os.chdir(WorkingDir)
-               shutil.rmtree(NEW_FILES_PATH)
-               # use 7z a system.7z ./*
-
-       #logging.info('%d Dir to be removed' % len(Dir_removed))
-       logging.info('%d files unchanged' % len(files_unchanged))
-       logging.info('%d files files_renamed' % len(files_renamed))
-       logging.info('%d files NEW' % len(files_new))
-       logging.info('%d File attr' % len(File_Attibutes))
-       logging.info('%d Sym attr' % len(Sym_Attibutes))
-       logging.info('PaTcHCoUnT:Diffs-%d Moves-%d News-%d Delets-%d SymDiffs-%d SymNews-%d\n' % (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt))
-       print('PaTcHCoUnT:Diffs-%d Moves-%d News-%d Delets-%d SymDiffs-%d SymNews-%d\n' % (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt))
-
-       #There could be duplicates, TODO, can check before adding..
-       ATTR_FILE = open(ATTR_FILE,'a+')
-       for elt in File_Attibutes:
-               ATTR_FILE.write(elt)
-       for elt in Sym_Attibutes:
-               ATTR_FILE.write(elt)
-
-       ATTR_FILE.close()
-
-       Partition_Doc_SymLinks.close()
-       Partition_Read_SymLinks = open(SymLinkDoc,'r+')
-       Partition_Doc.write(Partition_Read_SymLinks.read())
-       Partition_Doc.write('PaTcHCoUnT:%d %d %d %d %d %d\n' % (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt))
-       Partition_Doc_SymLinks.close()
-       Partition_Doc.close()
-       os.remove(SymLinkDoc)
-
-
-def Apply_Container_Delta(a_apk, b_apk, new_apk, a_folder, g_output_dir):
-
-       #CONTROL NAMES, AND PRINTS AND ERROR CASES... SHOULD NOT PROCEED.
-       print 'ApplyContainerDelta - ', b_apk, a_folder, g_output_dir
-       shutil.copy2(g_output_dir+'/'+b_apk, g_output_dir+'/temp')
-       temp_apk = '../'+g_output_dir+'/'+b_apk
-       Patch = 'Patch_'+b_apk
-       ensure_dir_exists(Patch)
-       shutil.copy2(g_output_dir+'/'+b_apk, Patch+'/'+b_apk)
-
-       #Size issue on Device side?? shd check this
-       subprocess.call(['unzip','-q', Patch+'/'+b_apk, '-d', Patch])
-       with open(g_output_dir+'/PATCH.txt', 'r') as f_new:
-               lines = set(f_new.read().splitlines())
-               for line in lines:
-                       #print('Action ==> %s' % line)
-                       #Action, Path, Patch = line.split('|')
-                       Items = line.split('|')
-                       Action = Items[0]
-                       Path = Items[1]
-                       ActualPath = a_folder+'/'+Path
-                       PatchPath = Patch+'/'+Path
-                       SrcPath = g_output_dir+'/'+path_leaf(Path)
-                       #print('Action ==> %s Path ==> %s ' % (Action, Path))
-                       if line[0] == 'c':
-                               patchName = g_output_dir+'/'+Items[2]
-                               #print('Apply Patch: ActualPath %s SrcPath %s PatchLoc %s ' % (PatchPath, ActualPath, patchName))
-                               subprocess.call([DIFFPATCH_UTIL,ActualPath,ActualPath,patchName])
-                               WorkingDir = os.getcwd()
-                               os.chdir(WorkingDir+"/"+"temp_a")
-                               subprocess.call(['cp', '--parents', Path, '../'+Patch])
-                               os.chdir(WorkingDir)
-                       elif line[0] == 's':
-                               WorkingDir = os.getcwd()
-                               os.chdir(WorkingDir+"/"+"temp_a")
-                               subprocess.call(['cp', '--parents', Path, '../'+Patch])
-                               os.chdir(WorkingDir)
-                       else:
-                               print('Unknown Error')
-       #print('Touch all files and set common attributes for DIFF generation')
-       WorkingDir = os.getcwd()
-       os.chdir(WorkingDir+"/"+Patch)
-
-       CONTAINER_DATE = '200011111111.11'
-       CONTAINER_MODE = '0755'
-       subprocess.call(['find', '.', '-type', 'l', '-exec', 'rm', '-rf', '{}', ';'])
-       subprocess.call(['find', '.', '-exec', 'touch', '-t', CONTAINER_DATE, '{}', ';'])
-       subprocess.call(['chmod', '-R', CONTAINER_MODE, '../'+Patch])
-
-       print 'Update Intermediate Archive'
-       #subprocess.call(['zip','-ryX', b_apk, '*'])
-       subprocess.call(['zip','-ryX', b_apk] + glob.glob('*'))
-       os.chdir(WorkingDir)
-       #print('Apply Path completed - Now create diff for this and place in patch folder')
-       #print os.getcwd()
-       print('Patch Applied, Create Final Diff - %s %s' % (g_output_dir+'/'+b_apk,new_apk))
-       patchName = ('New'+'_%s'+DIFF_SUFFIX) % (b_apk)
-       patchLoc = '%s/%s' % (g_output_dir, patchName)
-
-       subprocess.call([DIFF_UTIL, Patch+'/'+b_apk ,new_apk,patchLoc])
-
-       #Only on HOST... for testing
-       if TEST_MODE == 'TRUE':
-               UpgradedName = '%s_Upgraded' % (b_apk)
-               subprocess.call([DIFFPATCH_UTIL,Patch+'/'+b_apk,UpgradedName,patchLoc])
-
-       #This is file only with NEWS and empty diffs and same files.
-       if TEST_MODE == 'FALSE':
-               os.remove(g_output_dir+'/'+b_apk)
-               os.rename(g_output_dir+'/temp', g_output_dir+'/'+b_apk)
-               shutil.rmtree(Patch)
-
-def IsSymlink(info):
-  return (info.external_attr >> 16) == 0120777
-
-
-def compute_containerdelta(src_file, dst_file, FORMAT, patchName, Partition_Doc):
-
-       a_apk = src_file
-       b_apk = dst_file
-       a_folder = 'temp_a'
-       b_folder = 'temp_b'
-
-       g_output_dir = patchName
-
-       logging.info('Uncompressing Containers... [%s][%s]' % (src_file, dst_file))
-       logging.info('Out Dir -%s' %(g_output_dir))
-       ensure_dir_exists(a_folder)
-       zipf = zipfile.ZipFile(a_apk, 'r');
-       zipf.extractall(a_folder)
-       zipf.close()
-
-       ensure_dir_exists(b_folder)
-       zipf = zipfile.ZipFile(b_apk, 'r');
-       zipf.extractall(b_folder)
-
-
-       symlinks = []
-       for info in zipf.infolist():
-               basefilename = info.filename[7:]
-               if IsSymlink(info):
-                       symlinks.append(info.filename)
-                       os.remove(b_folder+'/'+info.filename)
-       zipf.close()
-
-       a_files, a_dirs = Get_Files(a_folder)
-       b_files, b_dirs = Get_Files(b_folder)
-
-       logging.info('Going from %d files %d files' % (len(a_files), len(b_files)))
-
-       # First let's fill up these categories
-       C_files_new = []
-       C_files_removed = []
-       C_files_changed = []
-       C_files_unchanged = []
-
-       # What files appear in B but not in A?
-       for elt in b_files:
-               if elt not in a_files:
-                       #if not elt.endswith('.so'):
-                       C_files_new.append(elt)
-
-       # What files appear in A but not in B?
-       for elt in a_files:
-               if elt not in b_files:
-                       C_files_removed.append(elt)
-
-       # What files have changed contents but not name/path?
-       for elt in b_files:
-               if elt in a_files:
-                       if os.path.islink(a_folder+'/'+elt) or os.path.islink(b_folder+'/'+elt):
-                               print 'links - skip'
-                       elif not filecmp.cmp(a_folder+'/'+elt, b_folder+'/'+elt):
-                               C_files_changed.append(elt)
-                       else:
-                               C_files_unchanged.append(elt)
-
-
-       print('%d new files' % len(C_files_new))
-       print('%d removed files' % len(C_files_removed))
-       print('%d files changed' % len(C_files_changed))
-       print('%d files unchanged' % len(C_files_unchanged))
-
-       # temp dir where we're assembling the patch
-       ensure_dir_exists(g_output_dir)
-
-       unique_fileid = 0
-       toc = open(g_output_dir+'/PATCH.txt','w')
-       print("writing diff'ed changed files...")
-
-       for elt in C_files_changed:
-               dst_file = b_folder+'/'+elt
-               src_file = a_folder+'/'+elt
-               patchName = (DIFF_PREFIX+'%d_%s'+DIFF_SUFFIX) % (unique_fileid, path_leaf(elt))
-               patchLoc = '%s/%s' % (g_output_dir, patchName)
-               #print('src - %s dest -%s patch -%s' % (src_file ,dst_file,patchLoc))
-               subprocess.call([DIFF_UTIL,src_file ,dst_file,patchLoc])
-               toc.write('c%d|%s|%s\n' % (unique_fileid, elt, patchName))
-               unique_fileid = unique_fileid + 1
-
-       for elt in C_files_unchanged:
-               dst_file = b_folder+'/'+elt
-               src_file = a_folder+'/'+elt
-               #print('Same Files src - %s dest -%s' % (src_file ,dst_file))
-               toc.write('s%d|%s\n' % (unique_fileid, elt))
-               unique_fileid = unique_fileid + 1
-
-       #Create NEW TPK with empty data for below files and NEW files
-       shutil.copy2(b_apk, g_output_dir)
-
-       #May b for host??
-       #temp_apk = '../'+g_output_dir+'/'+b_apk
-       temp_apk = '../'+g_output_dir+'/'+path_leaf(b_apk)
-
-       for elt in C_files_changed:
-               dst_file = b_folder+'/'+elt
-               #print dst_file
-               open(dst_file, 'w').close()
-
-       for elt in C_files_unchanged:
-               dst_file = b_folder+'/'+elt
-               open(dst_file, 'w').close()
-
-       WorkingDir = os.getcwd()
-       os.chdir(WorkingDir+"/"+b_folder)
-
-       #for elt in files_changed:
-       #       subprocess.call(['zip', temp_apk, elt]) # confirm ZIP options, extra fields etc.. jus zip it, shd do all at once.. else time taking
-
-       #for elt in files_unchanged:
-       #       subprocess.call(['zip', temp_apk, elt])
-
-       subprocess.call(['zip','-ryq', temp_apk, '*'])
-       os.chdir(WorkingDir)
-       toc.close()
-
-       Apply_Container_Delta(path_leaf(a_apk), path_leaf(b_apk), b_apk, a_folder, g_output_dir)
-       shutil.rmtree(a_folder)
-       shutil.rmtree(b_folder)
-
-def NewFiles(src, dest):
-       print src,dest
-       subprocess.call(['cp','-rp', src,dest])
-    #try:
-               #shutil.copytree(src, dest)
-    #except OSError as e:
-        # If the error was caused because the source wasn't a directory
-        #if e.errno == errno.ENOTDIR:
-            #shutil.copy2(src, dest)
-        #else:
-            #print('Directory not copied. Error: %s' % e)
-
-def measure_two_filediffs(src, dst):
-       patchLoc = 'temp.patch'
-       subprocess.call([DIFF_UTIL,src,dst,patchLoc])
-       result_size = os.path.getsize(patchLoc)
-       os.remove(patchLoc)
-       return result_size
-
-def Get_Files(path):
-       all_files = []
-       all_dirs = []
-
-       for root, directories, filenames in os.walk(path, topdown=False, followlinks=False):
-               for directory in directories:
-                       #DirName = os.path.join(root+'/',directory)
-                       DirName = os.path.join(root,directory)
-                       if os.path.islink(DirName):
-                               logging.debug('This is symlink pointing to dir -%s' % DirName)
-                               all_files.append(os.path.relpath(DirName, path))
-                       elif not os.listdir(DirName):
-                               #print('*****Empty Directory******* -%s', DirName)
-                               #This should NOT be appended ??? Empty dir shd b considered
-                               all_dirs.append(os.path.relpath(DirName, path))
-                       else:
-                               all_dirs.append(os.path.relpath(DirName, path))
-               for filename in filenames:
-                       FileName = os.path.join(root,filename)
-                       all_files.append(os.path.relpath(FileName, path))
-
-       all_files.sort()
-       all_dirs.sort()
-       return all_files, all_dirs
-
-
-USAGE_DOCSTRING = """
-      Generate Delta using BASEOLD AND BASE NEW
-         Attributes is optional
-"""
-
-def Usage(docstring):
-  print docstring.rstrip("\n")
-  print COMMON_DOCSTRING
-
-
-
-if __name__ == '__main__':
-       main()
-
diff --git a/UPG/dzImagescript.sh b/UPG/dzImagescript.sh
deleted file mode 100755 (executable)
index f96bc4f..0000000
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/bin/bash
-
-pname="${0##*/}"
-args=("$@")
-cur_dir="$(pwd)"
-
-# file names:
-decompression_code="decompression_code"
-piggy_gz_piggy_trailer="piggy.gz+piggy_trailer"
-piggy="piggy"
-piggy_gz="piggy.gz"
-padding_piggy="padding_piggy"
-piggy_trailer="piggy_trailer"
-ramfs_gz_part3="initramfs.cpio+part3"
-ramfs_cpio_gz="initramfs.cpio.gz"
-padding3="padding3"
-part3="part3"
-kernel_img="kernel.img"
-ramfs_cpio="initramfs.cpio"
-ramfs_dir="initramfs"
-sizes="sizes"
-ramfs_part3="ramfs+part3"
-ramfs_list="initramfs_list"
-cpio_t="cpio-t"
-
-
-cpio="cpio_set0"
-
-# We dup2 stderr to 3 so an error path is always available (even
-# during commands where stderr is redirected to /dev/null).  If option
-# -v is set, we dup2 sterr to 9 also so commands (and some of their
-# results if redirected to &9) are printed also.
-exec 9>/dev/null                # kill diagnostic ouput (will be >&2 if -v)
-exec 3>&2                       # an always open error channel
-
-#
-########### Start of functions
-#
-
-# Emit an error message and abort
-fatal(){
-    # Syntax: fatal <string ...>
-    # Output error message, then abort
-    echo >&3
-    echo >&3 "$pname: $*"
-    kill $$
-    exit 1
-}
-
-# Execute a command, displaying the command if -v:
-cmd(){
-    # Syntax: cmd <command> <args...>
-    # Execute <command>, echo command line if -v
-    echo >&9 "$*"
-    "$@"
-}
-
-# Execute a required command, displaying the command if -v, abort on
-# error:
-rqd(){
-    # Syntax: cmd <command> <args...>
-    # Execute <command>, echo commandline if -v, abort on error
-    cmd "$@" || fatal "$* failed."
-}
-
-findByteSequence(){
-    # Syntax: findByteSequence <fname> [<string, default: gzip header>]
-    # Returns: position (offset) on stdout, empty string if nothing found
-    file="$1"
-    local opt
-    if [ "$2" = "lzma" ]; then
-        srch=$'\x5d....\xff\xff\xff\xff\xff'
-        opt=
-    else
-        srch="${2:-$'\x1f\x8b\x08'}" # Default: search for gzip header
-        opt="-F"
-    fi
-    pos=$(LC_ALL=C grep $opt -a --byte-offset -m 1 --only-matching -e "$srch" -- "$file")
-    echo ${pos%%:*}
-}
-
-getFileSize(){
-    # Syntax: getFileSize <file>
-    # Returns size of the file on stdout.
-    # Aborts if file doesn't exist.
-    rqd stat -c %s "$1"
-}
-checkNUL(){
-    # Syntax: checkNUL file offset
-    # Returns true (0) if byte there is 0x0.
-    [ "$(rqd 2>/dev/null dd if="$1" skip=$2 bs=1 count=1)" = $'\0' ]
-}
-
-gunzipWithTrailer(){
-    # Syntax gunzipWithTrailer <file> <gzip name, sans .gz> <padding> <trailer>
-    #
-    # <file>: the input file
-    # <gzip name, sans .gz>, <padding>, <trailer>:
-    #   The output files.  For the gzipped part, both the
-    #   compressed and the uncompressed output is generated, so we have
-    #   4 output files.
-    local file="$1"
-    local gz_result="$2.gz"
-    local result="$2"
-    local padding="$3"
-    local trailer="$4"
-    local tmpfile="/tmp/gunzipWithTrailer.$$.gz"
-    local original_size=$(getFileSize "$file")
-    local d=$(( (original_size+1) / 2))
-    local direction fini at_min=0
-    local results_at_min=()
-    local size=$d
-    local at_min=
-    echo "Separating gzipped part from trailer in  "$file""
-    echo -n "Trying size: $size"
-    while :; do
-        rqd dd if="$file" of="$tmpfile" bs=$size count=1 2>/dev/null
-        cmd gunzip >/dev/null 2>&1 -c "$tmpfile"
-        res=$?
-        if [ "$d" -eq 1 ]; then
-            : $((at_min++))
-            results_at_min[$size]=1
-            [ "$at_min" -gt 3 ] && break
-        fi
-        d=$(((d+1)/2))
-        case $res in
-                # 1: too small
-            1) size=$((size+d)); direction="↑";;
-                # 2: trailing garbage
-            2) size=$((size-d)); direction="↓";;
-                # OK
-            0) break;;
-            *) fatal "gunzip returned $res while checking "$file"";;
-        esac
-        echo -n "  $size"
-    done
-    if [ "$at_min" -gt 3 ]; then
-        echo -e "\ngunzip result is oscillating between 'too small' and 'too large' at size: ${!results_at_min[*]}"
-        echo -n "Trying lower nearby values:  "
-        fini=
-        for ((d=1; d < 30; d++)); do
-            : $((size--))
-            echo -n "  $size"
-            rqd dd if="$file" of="$tmpfile" bs=$size count=1 2>/dev/null
-            if cmd gunzip >/dev/null 2>&1 -c "$tmpfile"; then
-                echo -n " - OK"
-                fini=1
-                break
-            fi
-        done
-        [ -z "$fini" ] && fatal 'oscillating gunzip result, giving up.'
-    fi
-    # We've found the end of the gzipped part.  This is not the real
-    # end since gzip allows for some trailing padding to be appended
-    # before it barfs.  First, go back until we find a non-null
-    # character:
-    echo -ne "\npadding check (may take some time): "
-    real_end=$((size-1))
-    while checkNUL "$file" $real_end; do
-        : $((real_end--))
-    done
-    # Second, try if gunzip still succeeds.  If not, add trailing
-    # null(s) until it succeeds:
-    while :; do
-        rqd dd if="$file" of="$tmpfile" bs=$real_end count=1 2>/dev/null
-        gunzip >/dev/null 2>&1 -c "$tmpfile"
-        case $? in
-            # 1: too small
-            1) : $((real_end++));;
-            *) break;;
-        esac
-    done
-    real_next_start=$size
-    # Now, skip NULs forward until we reach a non-null byte.  This is
-    # considered as being the start of the next part.
-    while checkNUL "$file" $real_next_start; do
-        : $((real_next_start++))
-    done
-    echo $((real_next_start - real_end))
-    echo
-    rm "$tmpfile"
-    # Using the numbers we got so far, create the output files which
-    # reflect the parts we've found so far:
-    rqd dd 2>&9 if="$file" of="$gz_result" bs=$real_end count=1
-    rqd dd 2>&9 if="$file" of="$padding" skip=$real_end bs=1 count=$((real_next_start - real_end))
-    rqd dd 2>&9 if="$file" of="$trailer" bs=$real_next_start skip=1
-    rqd gunzip -c "$gz_result" > "$result"
-}
-
-
-unpack()(
-    [ -d "$unpacked" ] && echo "\
-Warning: there is aready an unpacking directory.  If you have files added on
-your own there, the  repacking result may not reflect the result of the
-current unpacking process."
-    rqd mkdir -p "$unpacked"
-    rqd cd "$unpacked"
-    sizes="$unpacked/sizes"
-    echo "# Unpacking sizes" > "$sizes"
-
-    piggy_start=$(findByteSequence "$cur_dir/$zImage")
-    if [ -z "$piggy_start" ]; then
-        fatal "Can't find a gzip header in file '$zImage'"
-    fi
-
-    rqd dd 2>&9 if="$cur_dir/$zImage" bs="$piggy_start" count=1 of="$decompression_code"
-    rqd dd 2>&9 if="$cur_dir/$zImage" bs="$piggy_start" skip=1 of="$piggy_gz_piggy_trailer"
-
-    gunzipWithTrailer  "$piggy_gz_piggy_trailer" \
-        "$piggy" "$padding_piggy" "$piggy_trailer"
-
-    echo
-       sudo rm -rf "piggy.gz" "piggy.gz+piggy_trailer" "sizes"
-    echo "Success."
-    echo "The unpacked files and the initramfs directory are in "$unpacked""
-)
-
-#### start of main program
-while getopts xv12345sgrpuhtz-: argv; do
-    case $argv in
-        p|u|z|1|2|3|4|5|t|r|g) eval opt_$argv=1;;
-        v) exec 9>&2; opt_v=1;;
-        s) cpio="cpio";;
-        x) set -x;;
-        -) if [ "$OPTARG" = "version" ]; then
-              echo "$pname $version"
-              exit 0
-           else
-              echo "Wrong Usage, use -u to unpack"
-           fi;;
-        h|-) echo "Wrong Usage, use -u to unpack";;
-        *) fatal "Illegal option";;
-    esac
-done
-shift $((OPTIND-1))
-zImage="${1:-zImage}"
-unpacked="$cur_dir/${zImage}_unpacked"
-packing="$cur_dir/${zImage}_packing"
-shift
-if [ -n "$*" ]; then
-    fatal "Excess arguments: '$*'"
-fi
-
-if [ -n "$opt_u" ]; then
-    [ -f "$zImage" ] || fatal "file '$zImage': not found"
-    unpack
-fi
-if [ -z "$opt_u" ]; then
-    echo >&2 "$pname: Need at least -u option."
-    echo >&2 "$pname: Type '$pname --help' for usage info."
-    exit 1
-fi
-
-exit
-
diff --git a/UPG/ss_bsdiff b/UPG/ss_bsdiff
deleted file mode 100755 (executable)
index d1293cd..0000000
Binary files a/UPG/ss_bsdiff and /dev/null differ
diff --git a/UPG/ss_bspatch b/UPG/ss_bspatch
deleted file mode 100755 (executable)
index 0e539ab..0000000
Binary files a/UPG/ss_bspatch and /dev/null differ
diff --git a/UPG/unpack.sh b/UPG/unpack.sh
deleted file mode 100755 (executable)
index 869881d..0000000
+++ /dev/null
@@ -1,236 +0,0 @@
-#!/bin/bash
-
-pname="${0##*/}"
-args=("$@")
-cur_dir="$(pwd)"
-
-# file names:
-decompression_code="decompression_code"
-piggy_gz_piggy_trailer="piggy.gz+piggy_trailer"
-piggy="piggy"
-piggy_gz="piggy.gz"
-padding_piggy="padding_piggy"
-piggy_trailer="piggy_trailer"
-padding3="padding3"
-sizes="sizes"
-
-# We dup2 stderr to 3 so an error path is always available (even
-# during commands where stderr is redirected to /dev/null).  If option
-# -v is set, we dup2 sterr to 9 also so commands (and some of their
-# results if redirected to &9) are printed also.
-exec 9>/dev/null                # kill diagnostic ouput (will be >&2 if -v)
-exec 3>&2                       # an always open error channel
-
-#
-########### Start of functions
-#
-
-# Emit an error message and abort
-fatal(){
-    # Syntax: fatal <string ...>
-    # Output error message, then abort
-    echo >&3
-    echo >&3 "$pname: $*"
-    kill $$
-    exit 1
-}
-
-# Execute a command, displaying the command if -v:
-cmd(){
-    # Syntax: cmd <command> <args...>
-    # Execute <command>, echo command line if -v
-    echo >>"$workspace/log_file" "$*"
-    "$@"
-}
-
-# Execute a required command, displaying the command if -v, abort on
-# error:
-rqd(){
-    # Syntax: cmd <command> <args...>
-    # Execute <command>, echo commandline if -v, abort on error
-    cmd "$@" || fatal "$* failed."
-}
-
-checkNUL(){
-    # Syntax: checkNUL file offset
-    # Returns true (0) if byte there is 0x0.
-    [ "$(rqd 2>>"$workspace/log_file" "$workspace/dd" if="$1" skip=$2 bs=1 count=1)" = $'\0' ]
-}
-
-gunzipWithTrailer(){
-    # Syntax gunzipWithTrailer <file> <gzip name, sans .gz> <padding> <trailer>
-    #
-    # <file>: the input file
-    # <gzip name, sans .gz>, <padding>, <trailer>:
-    #   The output files.  For the gzipped part, both the
-    #   compressed and the uncompressed output is generated, so we have
-    #   4 output files.
-    local file="$1"
-    local gz_result="$2.gz"
-    local result="$2"
-    local padding="$3"
-    local trailer="$4"
-    local tmpfile="/tmp/gunzipWithTrailer.$$.gz"
-       local original_size=$("$workspace/stat" -c %s "$unpacked/$file") 2>>"$workspace/log_file"
-       echo "Original size is $original_size" >> "$workspace/log_file"
-    local d=$(( (original_size+1) / 2))
-    local direction fini at_min=0
-    local results_at_min=()
-    local size=$d
-    local at_min=
-       rm -rf /tmp/test_file
-    echo "Separating gzipped part from trailer in "$unpacked/$file"" >> "$workspace/log_file"
-    echo -n "Trying size: $size"       >> "$workspace/log_file"
-    while :; do
-        rqd "$workspace/dd" if="$unpacked/$file" of="$tmpfile" bs=$size count=1 2>>"$workspace/log_file"
-        cmd "$workspace/gzip" >/tmp/test_file 2>>"$workspace/log_file" -d -c "$tmpfile"
-        res=$?
-               echo "result for gunzip is $res" >>"$workspace/log_file"
-        if [ "$d" -eq 1 ]; then
-            : $((at_min++))
-            results_at_min[$size]=1
-            [ "$at_min" -gt 3 ] && break
-        fi
-        d=$(((d+1)/2))
-        case $res in
-                # 1: too small
-            1)  echo "In case 1" >> "$workspace/log_file"
-                               size=$((size+d)); direction="↑";;
-                # 2: trailing garbage
-            2)         echo "In case 2" >> "$workspace/log_file"
-                               size=$((size-d)); direction="↓";;
-                # OK
-            0)         echo "Breaking" >> "$workspace/log_file"
-                               break;;
-            *)         echo "In case *" >> "$workspace/log_file"
-                               fatal "gunzip returned $res while checking "$unpacked/$file"";;
-        esac
-        echo -n "  $size" >> "$workspace/log_file"
-    done
-    if [ "$at_min" -gt 3 ]; then
-        echo -e "\ngunzip result is oscillating between 'too small' and 'too large' at size: ${!results_at_min[*]}"    >> "$workspace/log_file"
-        echo -n "Trying lower nearby values:  "        >> "$workspace/log_file"
-        fini=
-        for ((d=1; d < 30; d++)); do
-            : $((size--))
-            echo -n "  $size" >> "$workspace/log_file"
-            rqd "$workspace/dd" if="$unpacked/$file" of="$tmpfile" bs=$size count=1 2>/dev/null
-            if cmd "$workspace/gzip" >/dev/null 2>&1 -d -c  "$tmpfile"; then
-                echo -n " - OK"        >> "$workspace/log_file"
-                fini=1
-                break
-            fi
-        done
-        [ -z "$fini" ] && fatal 'oscillating gunzip result, giving up.'
-    fi
-    # We've found the end of the gzipped part.  This is not the real
-    # end since gzip allows for some trailing padding to be appended
-    # before it barfs.  First, go back until we find a non-null
-    # character:
-    echo -ne "\npadding check (may take some time): " >> "$workspace/log_file"
-    real_end=$((size-1))
-    while checkNUL "$unpacked/$file" $real_end; do
-        : $((real_end--))
-    done
-       echo "Found real end at $real_end" >> "$workspace/log_file"
-    # Second, try if gunzip still succeeds.  If not, add trailing
-    # null(s) until it succeeds:
-    while :; do
-        rqd "$workspace/dd" if="$unpacked/$file" of="$tmpfile" bs=$real_end count=1 2>>"$workspace/log_file"
-        "$workspace/gzip" >/tmp/test_file2 2>>"$workspace/log_file" -d -c "$tmpfile"
-        case $? in
-            # 1: too small
-            1)         echo "In case 1" >> "$workspace/log_file"
-                               : $((real_end++));;
-            *)         echo "Case other $?" >> "$workspace/log_file"
-                               break;;
-        esac
-    done
-       echo "Done with add trailing null(s) until it succeeds" >> "$workspace/log_file"
-    real_next_start=$size
-    # Now, skip NULs forward until we reach a non-null byte.  This is
-    # considered as being the start of the next part.
-    while checkNUL "$unpacked/$file" $real_next_start; do
-        : $((real_next_start++))
-    done
-    echo $((real_next_start - real_end))       >> "$workspace/log_file"
-    echo >> "$workspace/log_file"
-    rm "$tmpfile"
-    # Using the numbers we got so far, create the output files which
-    # reflect the parts we've found so far:
-    rqd "$workspace/dd" if="$unpacked/$file" of="$unpacked/$gz_result" bs=$real_end count=1
-    rqd "$workspace/dd" if="$unpacked/$file" of="$unpacked/$padding" skip=$real_end bs=1 count=$((real_next_start - real_end))
-    rqd "$workspace/dd" if="$unpacked/$file" of="$unpacked/$trailer" bs=$real_next_start skip=1
-    rqd "$workspace/gzip" -c -d "$unpacked/$gz_result" > "$unpacked/$result"
-}
-
-unpack()(
-    [ -d "$unpacked" ] && echo "\
-Warning: there is aready an unpacking directory.  If you have files added on
-your own there, the  repacking result may not reflect the result of the
-current unpacking process."
-    rqd mkdir -p "$unpacked"
-    rqd cd "$unpacked"
-    sizes="$unpacked/sizes"
-    echo "# Unpacking sizes" > "$sizes"
-    log_file="$unpacked/log_file"
-    #piggy_start=$1
-    if [ -z "$piggy_start" ]; then
-               fatal "Can't find a gzip header in file '$zImage'" >> "$workspace/log_file"
-        fatal "Can't find a gzip header in file '$zImage'"
-       else
-               echo "start is $piggy_start" >> "$sizes"
-    fi
-
-    rqd "$workspace/dd" if="$zImage" bs="$piggy_start" count=1 of="$unpacked/$decompression_code"
-    rqd "$workspace/dd" if="$zImage" bs="$piggy_start" skip=1 of="$piggy_gz_piggy_trailer"
-
-    gunzipWithTrailer  "$piggy_gz_piggy_trailer" \
-        "$piggy" "$padding_piggy" "$piggy_trailer"
-
-    echo
-    echo "Success."
-)
-
-#### start of main program
-while getopts xv12345sgrpuhtz-: argv; do
-    case $argv in
-        p|z|1|2|3|4|5|t|r|g) eval opt_$argv=1;;
-        u)  opt_u=1
-                       workspace=$2
-                   zImage="$2/$3"
-                       piggy_start=$4
-                       unpacked="${zImage}_unpacked"
-                       packing="${zImage}_packing";;
-        -) if [ "$OPTARG" = "version" ]; then
-              echo "$pname $version"
-              exit 0
-           else
-              echo "Wrong Usage, use -u to unpack"
-           fi;;
-        h|-) echo "Wrong Usage, use -u to unpack";;
-        *) fatal "Illegal option";;
-    esac
-done
-if [ -n "$opt_u" ]; then
-    [ -f "$zImage" ] || fatal "file '$zImage': not found"
-    unpack
-fi
-if [ -n "$opt_p" ]; then
-       work_dir=$2
-       tgt_file=$3
-       cmd_dir=$4
-       rqd cd "$work_dir"
-       #remove all links before proceeding with zip processing
-       "$cmd_dir/find" . -type l  -exec rm {} \;
-       "$cmd_dir/find" . -exec touch -t 200011111111.11 {} \;
-       "$cmd_dir/find" . -exec chmod 0755 {} \;
-       "$cmd_dir/zip" -ryX "$tgt_file" *
-fi
-if [ -z "$opt_u$opt_p" ]; then
-    echo >&2 "$pname: Need -u or -p option to work"
-    echo >&2 "$pname: Type '$pname --help' for usage info."
-    exit 1
-fi
-
-exit
index b9e7a58..a873d50 100755 (executable)
@@ -1,8 +1,8 @@
 Name:          libtota
 Summary:       fota update library
 ExclusiveArch:         %{arm}
-Version:       0.2.0
-Release:       1
+Version:       0.2.1
+Release:       2
 Group:         System
 License:       Apache-2.0 and BSD-2-Clause and BSD-3-Clause and PD
 Source0:       %{name}-%{version}.tar.gz