import shutil
import subprocess
import re
-import ntpath
import zipfile
import datetime
import hashlib
Catching errors at all stages. SHOULD exit & return error in case of failure
'''
+
def global_paths():
global DIFF_UTIL
global ZIPUTIL
global NEW_FILES_PATH
- global NEW_FILES_FOLDER
global NEW_FILES_ZIP_NAME
global SYMLINK_TYPE
global ATTR_DOC_EXT
global NEW_PREFIX
global DIFFPATCH_UTIL
global SUPPORT_CONTAINERS
- global FULL_IMG
- global DELTA_IMG
+ global FULL_IMAGE
+ global DELTA_IMAGE
global DELTA_FS
global EXTRA
global COMMON_BIN_PATH
global VERBATIM_LIST
global MEM_FILE
+
COMMON_BIN_PATH = "../../common/bin/"
DIFF_UTIL = "/usr/local/bin/ss_bsdiff"
DIFFPATCH_UTIL = "/usr/local/bin/ss_bspatch"
-ZIPUTIL = "p7zip "
-#ZIPUTIL = "7z a system.7z "
-NEW_FILES_PATH = "system"
-NEW_FILES_FOLDER = "system"
+#ZIPUTIL = "p7zip "
+ZIPUTIL = "7z -mf=off a "
+NEW_FILES_PATH = "run/upgrade-sysroot"
NEW_FILES_ZIP_NAME = "system.7z"
SYMLINK_TYPE = "SYM"
ATTR_DOC_EXT = "_attr.txt"
SYMLINK_DOC_NAME = "_sym.txt"
+HARDLINK_DOC_NAME = "_hard.txt"
PART_DOC_EXT = ".txt"
DIFF_PREFIX = "diff"
DIFF_SUFFIX = ".delta"
NEW_PREFIX = 'new'
-FULL_IMG = "FULL_IMG"
-DELTA_IMG = "DELTA_IMG"
+FULL_IMAGE = "FULL_IMAGE"
+DELTA_IMAGE = "DELTA_IMAGE"
DELTA_FS = "DELTA_FS"
EXTRA = "EXTRA"
-PRE_UA = "PRE_UA"
LOGFILE = "Delta.log"
VERBATIM_LIST = "Verbatim_List.txt"
EMPTY = ""
MEM_REQ = 0
MEM_FILE = "NULL"
+COMPRESSION_LZMA = "lzma"
+COMPRESSION_BROTLI = "brotli"
-SUPPORT_RENAME = "TRUE" #Use appropriate name
+SUPPORT_RENAME = "TRUE" # Use appropriate name
SUPPORT_CONTAINERS = "FALSE"
-SUPPORT_DZIMAGE = "TRUE"
SUPPORT_VERBATIM = "TRUE"
TEST_MODE = "FALSE"
+
def main():
logging.basicConfig(filename=LOGFILE, level=logging.DEBUG)
global AttributeFile
if len(sys.argv) < 5:
sys.exit('Usage: CreatePatch.py UPDATE_TYPE PARTNAME OLDBASE NEWBASE OUTFOLDER')
UPDATE_TYPE = sys.argv[1]
+ UPDATE_TYPE_S = UPDATE_TYPE.split(":")
PART_NAME = sys.argv[2] # lets make this also optional
BASE_OLD = sys.argv[3]
ATTR_NEW = EMPTY
UPDATE_CFG_PATH = EMPTY
GenerateDiffAttr = "FALSE"
- if UPDATE_TYPE == DELTA_FS:
+ if UPDATE_TYPE_S[0] == DELTA_FS:
#instead of arguments check it in outdirectory ?
if len(sys.argv) == 9:
ATTR_OLD = sys.argv[6]
ATTR_NEW = sys.argv[7]
- UPDATE_CFG_PATH = '../'+sys.argv[8]
+ UPDATE_CFG_PATH = '../' + sys.argv[8]
GenerateDiffAttr = "TRUE"
- elif UPDATE_TYPE == DELTA_IMG or UPDATE_TYPE == FULL_IMG:
+ elif UPDATE_TYPE_S[0] in [DELTA_IMAGE, FULL_IMAGE]:
if len(sys.argv) == 7:
#Use path in better way
- UPDATE_CFG_PATH = '../'+sys.argv[6]
+ UPDATE_CFG_PATH = '../' + sys.argv[6]
global DIFF_UTIL
global DIFFPATCH_UTIL
if not (os.path.isfile(DIFF_UTIL) and os.access(DIFF_UTIL, os.X_OK)):
- DIFF_UTIL = COMMON_BIN_PATH+DIFF_UTIL
- DIFFPATCH_UTIL = COMMON_BIN_PATH+DIFFPATCH_UTIL
+ DIFF_UTIL = COMMON_BIN_PATH + DIFF_UTIL
+ DIFFPATCH_UTIL = COMMON_BIN_PATH + DIFFPATCH_UTIL
if not (os.path.isfile(DIFF_UTIL) and os.access(DIFF_UTIL, os.X_OK)):
print >> sys.stderr, "Diff Util Does NOT exist -- ABORT"
- logging.info ('Diff Util Does NOT exist -- ABORT')
+ logging.info('Diff Util Does NOT exist -- ABORT')
sys.exit(1)
start = datetime.datetime.now().time()
logging.info('*************** ENTERED PYTHON SCRIPT *****************')
- logging.info('Arguments Passed: [UpdateType - %s][Part Name - %s] [BaseOld - %s] [BaseNew - %s] \n [OUTPUTDir - %s] [BASE ATTR - %s] [TARGET ATTR - %s]'% (UPDATE_TYPE, PART_NAME, BASE_OLD, BASE_NEW, OUT_DIR, ATTR_OLD, ATTR_NEW))
+ logging.info('Arguments Passed: [UpdateType - %s][Part Name - %s] [BaseOld - %s] [BaseNew - %s] \n [OUTPUTDir - %s] [BASE ATTR - %s] [TARGET ATTR - %s]' % (UPDATE_TYPE, PART_NAME, BASE_OLD, BASE_NEW, OUT_DIR, ATTR_OLD, ATTR_NEW))
- ensure_dir_exists(OUT_DIR)
+ try:
+ ensure_dir_exists(OUT_DIR)
+ except FileExistsError as exc:
+ logging.error('Argument passed as OUT_DIR - %s is already an existing file' % OUT_DIR)
+ raise exc
if GenerateDiffAttr == "TRUE":
if not (os.path.isfile(ATTR_OLD) and os.path.isfile(ATTR_NEW)):
print >> sys.stderr, "Attributes missing -- ABORT"
sys.exit(1)
-
# Should check if APT is supported on other linux flavours
cache = apt.Cache()
if cache['p7zip'].is_installed and cache['attr'].is_installed and cache['tar'].is_installed:
- logging.info ('Basic utils installed')
+ logging.info('Basic utils installed')
else:
print >> sys.stderr, "Basic utils missing -- ABORT"
sys.exit(1)
- if UPDATE_TYPE == FULL_IMG:
+ if UPDATE_TYPE_S[0] == FULL_IMAGE:
SS_mk_full_img(BASE_OLD, BASE_NEW, OUT_DIR, PART_NAME, UPDATE_CFG_PATH)
- elif UPDATE_TYPE == DELTA_IMG:
- SS_mk_delta_img(BASE_OLD, BASE_NEW, OUT_DIR, PART_NAME, UPDATE_CFG_PATH)
+ # #### currently does not support LZMA ####
+ # elif UPDATE_TYPE == DELTA_IMAGE:
+ # SS_mk_delta_img(BASE_OLD, BASE_NEW, OUT_DIR, PART_NAME, UPDATE_CFG_PATH, COMPRESSION_LZMA)
+ elif UPDATE_TYPE_S[0] == DELTA_IMAGE:
+ SS_mk_delta_img(BASE_OLD, BASE_NEW, OUT_DIR, PART_NAME, UPDATE_CFG_PATH, COMPRESSION_BROTLI)
elif UPDATE_TYPE == DELTA_FS:
AttributeFile = ATTR_NEW
- ATTR_FILE = OUT_DIR+'/'+PART_NAME+ATTR_DOC_EXT
+ ATTR_FILE = OUT_DIR + '/' + PART_NAME + ATTR_DOC_EXT
Diff_AttrFiles(ATTR_OLD, ATTR_NEW, ATTR_FILE)
Old_files, Old_dirs = Get_Files(BASE_OLD)
New_files, New_dirs = Get_Files(BASE_NEW)
if not UPDATE_CFG_PATH == EMPTY:
SS_update_cfg(PART_NAME, UPDATE_CFG_PATH)
-
elif UPDATE_TYPE == EXTRA:
print('UPDATE_TYPE ---- EXTRA')
- elif UPDATE_TYPE == PRE_UA:
- print('UPDATE_TYPE ---- PRE_UA')
else:
print('UPDATE_TYPE ---- UNKNOWN FORMAT')
logging.info('Time start [%s] - Time end [%s]' % (start, end))
print('Done with [%s][%d]---- Time start [%s] - Time end [%s]' % (PART_NAME, MEM_REQ, start, end))
- except:
+ except Exception as exc:
logging.error('Usage: {} <Update_Type> <Part_Name> <OLD_Base> <NEW_Base> <OUT_DIR>'.format(os.path.basename(sys.argv[0])))
- raise
+ raise exc
def SS_update_cfg(DELTA_BIN, UPDATE_CFG_PATH):
ConfigItems = line.split()
if ConfigItems[0] == DELTA_BIN:
DELTA = ConfigItems[1]
- logging.info ('Updating %s config' % DELTA_BIN)
+ logging.info('Updating %s config' % DELTA_BIN)
line = line.rstrip('\n')
Value = MEM_REQ
- line = line.replace(line, line+'\t'+str(Value)+'\n')
+ line = line.replace(line, line + '\t' + str(Value) + '\n')
f.write(line)
else:
f.write(line)
f.close()
-def SS_mk_delta_img(BASE_OLD, BASE_NEW, OUT_DIR, DELTA_BIN, UPDATE_CFG_PATH):
- #for sizes
- ZIMAGE_SCRIPT = COMMON_BIN_PATH+'./dzImagescript.sh'
- ZIMAGE_OLD = BASE_OLD+'_unpacked'
- ZIMAGE_NEW = BASE_NEW+'_unpacked'
- DZIMAGE_HEADER = 'UnpackdzImage'
- DZIMAGE_SEP = ':'
+def SS_mk_delta_img(BASE_OLD, BASE_NEW, OUT_DIR, DELTA_BIN, UPDATE_CFG_PATH, COMPRESSION_METHOD):
+ #for sizes
- oldsize_d= os.path.getsize(BASE_OLD)
- newsize_d= os.path.getsize(BASE_NEW)
- SHA_BIN_DEST= hash_file(BASE_NEW)
- SHA_BIN_BASE=hash_file(BASE_OLD)
+ oldsize_d = os.path.getsize(BASE_OLD)
+ newsize_d = os.path.getsize(BASE_NEW)
+ SHA_BIN_DEST = hash_file(BASE_NEW)
+ SHA_BIN_BASE = hash_file(BASE_OLD)
#incase UPDATE CFG is empty
DELTA = DELTA_BIN
for line in lines:
ConfigItems = line.split()
if ConfigItems[0] == DELTA_BIN:
- logging.info ('Updating %s config' % DELTA_BIN)
+ logging.info('Updating %s config' % DELTA_BIN)
DELTA = ConfigItems[1]
line = line.rstrip('\n')
- line = line.replace(line, line+'\t'+str(oldsize_d)+'\t\t'+str(newsize_d)+'\t\t'+str(SHA_BIN_BASE)+'\t\t'+str(SHA_BIN_DEST)+'\n')
+ line = line.replace(line, line + '\t' + str(oldsize_d) + '\t\t' + str(newsize_d) + '\t\t' + str(SHA_BIN_BASE) + '\t\t' + str(SHA_BIN_DEST) + '\n')
f.write(line)
else:
f.write(line)
f.close()
- #Any validation checks required?
- if (DELTA_BIN == "zImage" or DELTA_BIN == "dzImage" or DELTA_BIN == "KERNEL" or DELTA_BIN == "BOOT") and SUPPORT_DZIMAGE == "TRUE":
-
- #Unpack Old and New Images for creating delta
- subprocess.call([ZIMAGE_SCRIPT, '-u', BASE_OLD])
- subprocess.call([ZIMAGE_SCRIPT, '-u', BASE_NEW])
-
- DeltaFiles = []
- Old_files, Old_dirs = Get_Files(ZIMAGE_OLD)
- New_files, New_dirs = Get_Files(ZIMAGE_NEW)
+ patchLoc = '%s/%s' % (OUT_DIR, DELTA)
+ logging.info('Make Delta Image %s <--> %s ==> %s %s' % (BASE_OLD, BASE_NEW, DELTA_BIN, patchLoc))
+ subprocess.call([DIFF_UTIL, "-c", COMPRESSION_METHOD, BASE_OLD, BASE_NEW, patchLoc])
- patchLoc = '%s/%s_temp' % (OUT_DIR, DELTA_BIN)
- ensure_dir_exists(patchLoc)
- for elt in New_files:
- if elt in Old_files:
- src_file = ZIMAGE_OLD+'/'+elt
- dst_file = ZIMAGE_NEW+'/'+elt
- if not filecmp.cmp(src_file, dst_file):
- patch = '%s/%s' % (patchLoc,elt)
- DeltaFiles.append(patch)
- subprocess.call([DIFF_UTIL,src_file,dst_file,patch])
- logging.info('Make dz Image %s <--> %s ==> %s %s' % (src_file, dst_file , DELTA_BIN, patch))
-
- #Append all delta files to make image.delta
-
- #HEADER FORMAT MAGICNAME:FILECOUNT:[FILENAME:FILESIZE:][FILECONTENT/S]
- HeaderStr = DZIMAGE_HEADER+DZIMAGE_SEP+'%d' % len(DeltaFiles)
- HeaderStr = HeaderStr+DZIMAGE_SEP
-
- with open(OUT_DIR+'/'+DELTA, 'w') as DeltaFile:
- for fname in DeltaFiles:
- DeltaSize = os.path.getsize(fname)
- HeaderStr = HeaderStr+path_leaf(fname)+DZIMAGE_SEP+'%d' % DeltaSize
- HeaderStr = HeaderStr+DZIMAGE_SEP
- #Using 128 bytes as max Header.
- logging.info('zImage Header - %s' % HeaderStr.ljust(128,'0'))
- DeltaFile.write(HeaderStr.ljust(128,'0'))
- for fname in DeltaFiles:
- with open(fname) as infile:
- DeltaFile.write(infile.read())
- infile.close()
-
- DeltaFile.close()
- shutil.rmtree(patchLoc)
- shutil.rmtree(ZIMAGE_OLD)
- shutil.rmtree(ZIMAGE_NEW)
- #Do we need to incorprate Max memory required for backup??
-
- else:
- patchLoc = '%s/%s' % (OUT_DIR, DELTA)
- logging.info('Make Delta Image %s <--> %s ==> %s %s' % (BASE_OLD, BASE_NEW , DELTA_BIN, patchLoc))
- subprocess.call([DIFF_UTIL,BASE_OLD,BASE_NEW,patchLoc])
-
-
-
-def SS_mk_full_img(BASE_OLD, BASE_NEW, OUT_DIR, DELTA_BIN ,UPDATE_CFG_PATH):
- logging.info('Make Full Image %s <--> %s ==> %s' % (BASE_OLD, BASE_NEW ,DELTA_BIN))
- oldsize_d= os.path.getsize(BASE_OLD)
- newsize_d= os.path.getsize(BASE_NEW)
- SHA_BIN_DEST= hash_file(BASE_NEW)
- SHA_BIN_BASE=hash_file(BASE_OLD)
+def SS_mk_full_img(BASE_OLD, BASE_NEW, OUT_DIR, DELTA_BIN, UPDATE_CFG_PATH):
+ logging.info('Make Full Image %s <--> %s ==> %s' % (BASE_OLD, BASE_NEW, DELTA_BIN))
+ oldsize_d = os.path.getsize(BASE_OLD)
+ newsize_d = os.path.getsize(BASE_NEW)
+ SHA_BIN_DEST = hash_file(BASE_NEW)
+ SHA_BIN_BASE = hash_file(BASE_OLD)
#echo -e "\t${oldsize_d}\t\t${newsize_d}\t\t${SHA_BIN_BASE}\t\t${SHA_BIN_DEST}" >> ${DATA_DIR}/update_new.cfg
SS_UpdateSize(BASE_OLD, BASE_NEW)
for line in lines:
ConfigItems = line.split()
if ConfigItems[0] == DELTA_BIN:
- logging.info ('Updating %s config' % DELTA_BIN)
+ logging.info('Updating %s config' % DELTA_BIN)
DELTA = ConfigItems[1]
line = line.rstrip('\n')
- line = line.replace(line, line+'\t'+str(oldsize_d)+'\t\t'+str(newsize_d)+'\t\t'+str(SHA_BIN_BASE)+'\t\t'+str(SHA_BIN_DEST)+'\n')
+ line = line.replace(line, line + '\t' + str(oldsize_d) + '\t\t' + str(newsize_d) + '\t\t' + str(SHA_BIN_BASE) + '\t\t' + str(SHA_BIN_DEST) + '\n')
f.write(line)
else:
f.write(line)
f.close()
+
def zipdir(path, zip):
- for root, dirs, files in os.walk(path):
- for file in files:
- zip.write(os.path.join(root, file))
+ for root, dirs, files in os.walk(path):
+ for file in files:
+ zip.write(os.path.join(root, file))
+
def ensure_dir_exists(path):
if not os.path.exists(path):
os.makedirs(path)
+ elif os.path.isfile(path):
+ raise FileExistsError
#shutil.rmtree(path)
#os.makedirs(path)
def path_leaf(path):
- head, tail = ntpath.split(path) #This is for windows?? Recheck
- return tail
+ head, tail = os.path.split(path)
+ return tail
-def path_head(path):
- head, tail = ntpath.split(path)
- return head
-def difflines(list1, list2):
- c = set(list1).union(set(list2))
- d = set(list1).intersection(set(list2))
- return list(c-d)
-
-#Creating Diff between OLD and NEW attribute files v12
+# Creating Diff between OLD and NEW attribute files v12
def Diff_AttrFiles(ATTR_OLD, ATTR_NEW, ATTR_FILE):
if GenerateDiffAttr == "FALSE":
return
with open(ATTR_NEW, 'r') as f_new:
lines2 = set(f_new.read().splitlines())
- lines = difflines(lines2, lines1)
+ lines = set.difference(lines2, lines1)
with open(ATTR_FILE, 'w+') as file_out:
for line in lines:
- if line not in lines1:
- logging.info('Diff_AttrFiles - %s' % line)
- file_out.write(line+'\n')
-
- f_new.close()
- f_old.close()
- file_out.close()
-
+ logging.info('Diff_AttrFiles - %s' % line)
+ file_out.write(line + '\n')
-def Update_Attr(RequestedPath, Type, File_Attibutes, Sym_Attibutes):
- #Full File Path should MATCH
+def Update_Attr(RequestedPath, Type, File_Attributes, Sym_Attributes):
+ # Full File Path should MATCH
if GenerateDiffAttr == "FALSE":
return
- FilePath = '"/'+RequestedPath+'"'
+ FilePath = '"' + RequestedPath + '"'
#print ('FilePath - %s'% (FilePath))
with open(AttributeFile) as f:
for line in f:
if FilePath in line:
if Type == SYMLINK_TYPE:
- Sym_Attibutes.append(line)
+ Sym_Attributes.append(line)
else:
- File_Attibutes.append(line)
+ File_Attributes.append(line)
-'''This function returns the SHA-1 hash of the file passed into it'''
def hash_file(filename):
+ '''This function returns the SHA-1 hash of the file passed into it'''
- # make a hash object
- h = hashlib.sha1()
+ # make a hash object
+ h = hashlib.sha1()
- # open file for reading in binary mode
- with open(filename,'rb') as file:
- # loop till the end of the file
- chunk = 0
- while chunk != b'':
- # read only 1024 bytes at a time
- chunk = file.read(1024*1024)
- h.update(chunk)
+ # open file for reading in binary mode
+ with open(filename, 'rb') as file:
+ # loop till the end of the file
+ chunk = 0
+ while chunk != b'':
+ # read only 1024 bytes at a time
+ chunk = file.read(1024 * 1024)
+ h.update(chunk)
- # return the hex representation of digest
- return h.hexdigest()
-
-def find_dupes_dir(BASE_OLD, BASE_NEW):
- dups = {}
- fdupes = {}
- print('Finding Duplicates in - %s %s' % (BASE_OLD, BASE_NEW))
- logging.info('Finding Duplicates in - %s %s' % (BASE_OLD, BASE_NEW))
- for rootbase, subdirsB, fileListB in os.walk(BASE_OLD):
- #print('Scanning %s...' % rootbase)
- for filename in fileListB:
- path = os.path.join(rootbase, filename)
- if os.path.islink(path):
- continue
- # Calculate hash
- file_hash = hash_file(path)
- dups[file_hash] = path
-
- for roottarget, subdirsT, fileListT in os.walk(BASE_NEW):
- #print('Scanning %s...' % roottarget)
- for filename in fileListT:
- # Get the path to the file
- path = os.path.join(roottarget, filename)
- if os.path.islink(path):
- continue
- # Calculate hash
- file_hash = hash_file(path)
- # Add or append the file path
- if file_hash in dups:
- BaseStr = dups.get(file_hash)
- Baseloc = path.find('/')
- TarLoc = BaseStr.find('/')
- if not path[Baseloc:] == BaseStr[TarLoc:]:
- logging.info('Dupes - %s ==> %s' % (path[Baseloc:], BaseStr[TarLoc:]))
- fdupes[path] = BaseStr
- logging.info('Total Duplicate files %d' % (len(fdupes)))
- return fdupes
+ # return the hex representation of digest
+ return h.hexdigest()
-def find_dupes_list(BASE_OLD, BASE_NEW, fileListB, fileListT):
+def find_dupes_list(BASE_OLD, BASE_NEW, fileListB, fileListT, Old_hardlinks, New_hardlinks):
dups = {}
fdupes = {}
print('Finding Duplicates in - %s %s' % (BASE_OLD, BASE_NEW))
for filename in fileListB:
- Src_File = BASE_OLD+'/'+filename
- if os.path.islink(Src_File) or os.path.isdir(Src_File):
+ Src_File = BASE_OLD + '/' + filename
+ if os.path.islink(Src_File) or os.path.isdir(Src_File) or ishardlink(Src_File):
continue
# Calculate hash
file_hash = hash_file(Src_File)
dups[file_hash] = Src_File
-
for filename in fileListT:
- Dest_File = BASE_NEW+'/'+filename
- if os.path.islink(Dest_File) or os.path.isdir(Dest_File):
+ Dest_File = BASE_NEW + '/' + filename
+ if os.path.islink(Dest_File) or os.path.isdir(Dest_File) or ishardlink(Dest_File):
continue
# Calculate hash
file_hash = hash_file(Dest_File)
if not BaseStr[Baseloc:] == filename:
#print('Dupes - %s ==> %s' % (BaseStr[Baseloc:], filename))
fdupes[BaseStr] = filename
-
logging.info('Total Duplicate files %d' % (len(fdupes)))
return fdupes
+
def SS_UpdateSize(src_file, dst_file):
global MEM_REQ
global MEM_FILE
- oldsize_d= os.path.getsize(src_file)
- newsize_d= os.path.getsize(dst_file)
+ oldsize_d = os.path.getsize(src_file)
+ newsize_d = os.path.getsize(dst_file)
if oldsize_d >= newsize_d:
Max = newsize_d
else:
MEM_FILE = dst_file
-
def SS_Generate_Delta(PART_NAME, BASE_OLD, Old_files, Old_dirs, BASE_NEW, New_files, New_dirs, OUT_DIR, ATTR_FILE):
print('Going from %d files to %d files' % (len(Old_files), len(New_files)))
logging.info('Going from %d files to %d files' % (len(Old_files), len(New_files)))
files_changed = []
files_unchanged = []
files_renamed = []
- File_Attibutes = []
- Sym_Attibutes = []
+ File_Attributes = []
+ Sym_Attributes = []
files_Del_List = {}
files_New_List = {}
- MyDict_Patches = {}
-
-
- PWD = os.getcwd()
+ # Get dictionaries used for hardlinks form both directories
+ New_hardlinks = get_hardlinks(BASE_NEW)
+ Old_hardlinks = get_hardlinks(BASE_OLD)
# Generate NEW List
for elt in New_files:
files_removed.append(elt)
logging.info('Old files %s' % elt)
-
for elt in Old_dirs:
#print('List of Old Dirs %s' % elt)
# Delete END logic goes in hand with UPG, After Diffs and moves, DEL END should be done.
if elt not in New_dirs:
Dir_removed.append(elt)
- logging.info('Old Dirs %s' % elt+'/')
+ logging.info('Old Dirs %s' % elt + '/')
for elt in New_dirs:
if elt not in Old_dirs:
# What files have changed contents but not name/path?
for elt in New_files:
if elt in Old_files:
- #Both are symbolic linkes and they differ
- src_file = BASE_OLD+'/'+elt
- dst_file = BASE_NEW+'/'+elt
+ # Both are symbolic linkes and they differ
+ src_file = BASE_OLD + '/' + elt
+ dst_file = BASE_NEW + '/' + elt
#print('Files Changed - %s -%s' % (src_file,dst_file))
if os.path.islink(src_file) and os.path.islink(dst_file):
- if not os.readlink(src_file) == os.readlink(dst_file):
+ if not (os.readlink(src_file) == os.readlink(dst_file)):
files_changed.append(elt)
#print('%d Sym link files changed' % len(files_changed))
logging.info('Sym links Changed - %s' % elt)
else:
files_unchanged.append(elt)
- #Both are Normal files and they differ. (Is file returns true in case of symlink also, so additional check to find either of the file is symlink)
- elif (not (os.path.islink(src_file) or os.path.islink(dst_file))) and os.path.isfile(src_file) and os.path.isfile(dst_file):
+ # Both are hardlinks - we add them because we can't be sure if file they point to changes
+ elif elt in New_hardlinks and elt in Old_hardlinks:
+ files_changed.append(elt)
+ # Both are Normal files and they differ. (Is file returns true in case of sym/hardlink also,
+ # so additional check to find either of the file is sym/hardlink)
+ elif (not (os.path.islink(src_file) or os.path.islink(dst_file))) \
+ and (not (elt in New_hardlinks or elt in Old_hardlinks)) \
+ and os.path.isfile(src_file) and os.path.isfile(dst_file):
if not filecmp.cmp(src_file, dst_file):
files_changed.append(elt)
#print('%d Normal files changed' % len(files_changed))
#print('Files Changed - %s' % elt)
else:
files_unchanged.append(elt)
- #File types differ between BASE and TARGET
+ # File types differ between BASE and TARGET
else:
logging.info('Files are of diff types but same names Src- %s Des- %s' % (src_file, dst_file))
- #Both file types have changed and they differ
- #Case 1: First Delete the OLD entry file type (Be it anything)
- #Processing and updating partition txt file will be done under REMOVED case and NEW files case accordingly, we just make an entry here
+ # Both file types have changed and they differ
+ # Case 1: First Delete the OLD entry file type (Be it anything)
+ # Processing and updating partition txt file will be done under REMOVED case and NEW files case accordingly, we just make an entry here
files_removed.append(elt)
files_new.append(elt)
-
# HANDLING VERBATIM - Remove from changed list and delete the entries on device first
- #This script is called partition wise, So, how do u want to handle it? (specialy for delete case?)
+ # This script is called partition wise, So, how do u want to handle it? (specialy for delete case?)
print("Check for any verbatim under - %s" % VERBATIM_LIST)
if SUPPORT_VERBATIM == "TRUE" and os.path.exists(VERBATIM_LIST):
if line in files_new:
files_new.remove(line)
- #Currently if Version or number is the first character of the file, then we are NOT making any diffs.
+ # Currently if Version or number is the first character of the file, then we are NOT making any diffs.
if SUPPORT_RENAME == "TRUE":
for elt in files_removed:
- if os.path.isfile(BASE_OLD+'/'+elt):
+ if os.path.isfile(BASE_OLD + '/' + elt):
FileName = path_leaf(elt)
- entries = re.split('[0-9]' , FileName)
- #Gives the STRING part of NAME. if name starts with version then later part wil b string
+ entries = re.split('[0-9]', FileName)
+ # Gives the STRING part of NAME. if name starts with version then later part wil b string
#print('Entires under removed list after split - %s %s - %s' % (FileName, entries[0], elt))
- #If version is starting at the begining of the string?? shd we hav additional check for such cases??
+ # If version is starting at the begining of the string?? shd we hav additional check for such cases??
if len(entries[0]) > 0:
files_Del_List.update({entries[0]: elt})
for elt in files_new:
- if os.path.isfile(BASE_NEW+'/'+elt):
+ if os.path.isfile(BASE_NEW + '/' + elt):
FileName = path_leaf(elt)
- entries = re.split('[0-9]' , FileName)
+ entries = re.split('[0-9]', FileName)
#print('Entires under NEWfiles list after split - %s %s - %s' % (FileName, entries[0], elt))
if len(entries[0]) > 0:
files_New_List.update({entries[0]: elt})
#print('Key value pair -%s -%s' % (key, value))
if key in files_New_List:
# this file is the same name in both!
- src_file = BASE_OLD+'/'+value
- dst_file = BASE_NEW+'/'+files_New_List[key]
- olddirpath = path_head(files_New_List[key])
- newdirpath = path_head(value)
- if os.path.islink(src_file) or os.path.islink(dst_file):
+ src_file = BASE_OLD + '/' + value
+ dst_file = BASE_NEW + '/' + files_New_List[key]
+ # we don't want to move hardlinks
+ if ishardlink(src_file) or ishardlink(dst_file):
+ logging.debug('Cannot diff as one of them is a hardlink')
+ elif os.path.islink(src_file) or os.path.islink(dst_file):
logging.debug('Cannot diff as one of them is Symlink')
elif os.path.isdir(src_file) or os.path.isdir(dst_file):
logging.debug('Cannot diff as one of them is dir')
Types Supported: DIFFS, MOVES, NEWS, DELETES, SYMDIFFS, SYMNEWS.
'''
Sym_Diff_Cnt = 0
- Sym_New_Cnt = 0;
+ Sym_New_Cnt = 0
+ Hard_Diff_Cnt = 0
+ Hard_New_Cnt = 0
Del_Cnt = 0
New_Cnt = 0
Diff_Cnt = 0
Move_Cnt = 0
Verbatim_Cnt = 0
- SymLinkDoc = OUT_DIR+'/'+PART_NAME+SYMLINK_DOC_NAME
- Partition_Doc = open(OUT_DIR+'/'+PART_NAME+'.txt','w')
- Partition_Doc_SymLinks = open(SymLinkDoc,'w')
+ SymLinkDoc = OUT_DIR + '/' + PART_NAME + SYMLINK_DOC_NAME
+ HardLinkDoc = OUT_DIR + '/' + PART_NAME + HARDLINK_DOC_NAME
+ Partition_Doc = open(OUT_DIR + '/' + PART_NAME + '.txt', 'w')
+ Partition_Doc_SymLinks = open(SymLinkDoc, 'w')
+ Partition_Doc_HardLinks = open(HardLinkDoc, "w")
print("writing diff'ed changed files...")
for elt in files_changed:
- dst_file = BASE_NEW+'/'+elt
- src_file = BASE_OLD+'/'+elt
- #Both files are symbolic links and they differ
+ dst_file = BASE_NEW + '/' + elt
+ src_file = BASE_OLD + '/' + elt
+ # Both files are symbolic links and they differ
if os.path.islink(dst_file) and os.path.islink(src_file):
- #Both are symlinks and they differ
+ # Both are symlinks and they differ
logging.debug(' File Changed is Link %s ' % dst_file)
patch = os.readlink(dst_file)
Sym_Diff_Cnt = Sym_Diff_Cnt + 1
Partition_Doc_SymLinks.write('SYM:DIFF:%s:%s:%s\n' % (elt, elt, patch))
- Update_Attr(elt, "SYM", File_Attibutes, Sym_Attibutes)
- #Both are NORMAL files and they differ
- elif (not (os.path.islink(src_file) or os.path.islink(dst_file))) and os.path.isfile(dst_file) and os.path.isfile(src_file):
- #Both are files and they differ
+ Update_Attr(elt, "SYM", File_Attributes, Sym_Attributes)
+ # Both are hardlinks and they differ (point to something different, new/changed file)
+ if elt in Old_hardlinks and elt in New_hardlinks:
+ if Old_hardlinks[elt] != New_hardlinks[elt] or New_hardlinks[elt] in files_changed or New_hardlinks[elt] in files_new:
+ logging.debug('Hardlinks changed %s %s' % (src_file, dst_file))
+ patch = New_hardlinks[elt]
+ Hard_Diff_Cnt += 1
+ Partition_Doc_HardLinks.write('HARD:DIFF:%s:%s:%s\n' % (elt, elt, patch))
+ # Both are NORMAL files and they differ
+ elif (not (os.path.islink(src_file) or os.path.islink(dst_file))) \
+ and (not (elt in Old_hardlinks or elt in New_hardlinks)) \
+ and os.path.isfile(dst_file) and os.path.isfile(src_file):
+ # Both are files and they differ
Diff_Cnt = Diff_Cnt + 1
- patchName = (DIFF_PREFIX+'%d_%s_'+PART_NAME+DIFF_SUFFIX) % (Diff_Cnt, path_leaf(elt))
+ patchName = (DIFF_PREFIX + '%d_%s_' + PART_NAME + DIFF_SUFFIX) % (Diff_Cnt, path_leaf(elt))
patchLoc = '%s/%s' % (OUT_DIR, patchName)
logging.debug(' File Differ %s %s' % (src_file, dst_file))
SS_UpdateSize(src_file, dst_file)
FORMAT = "REG"
- ret = subprocess.call([DIFF_UTIL,src_file,dst_file,patchLoc])
+ ret = subprocess.call([DIFF_UTIL, src_file, dst_file, patchLoc])
if ret is not 0:
logging.debug('Failed to create diff %d %s %s\n' % (ret, src_file, dst_file))
files_new.append(elt)
else:
Partition_Doc.write('DIFF:REG:%s:%s:%s:%s:%s\n' % (elt, elt, hash_file(src_file), hash_file(dst_file), patchName))
- Update_Attr(elt, "FILE", File_Attibutes, Sym_Attibutes)
- #Both differ but they are of diff types
+ Update_Attr(elt, "FILE", File_Attributes, Sym_Attributes)
+ # Both differ but they are of diff types
else:
- #Processing and updating partition txt file will be done under REMOVED case and NEW files case accordingly, we just make an entry here
+ # Processing and updating partition txt file will be done under REMOVED case and NEW files case accordingly, we just make an entry here
files_removed.append(elt)
files_new.append(elt)
- fdupes = find_dupes_list(BASE_OLD, BASE_NEW, files_removed, files_new)
+ fdupes = find_dupes_list(BASE_OLD, BASE_NEW, files_removed, files_new, Old_hardlinks, New_hardlinks)
for oldpath, newpath in fdupes.iteritems():
logging.info('Dupes %s -> %s' % (oldpath, newpath))
-
for elt in files_removed:
- src_file = BASE_OLD+'/'+elt
- #If parent directory is deleted.. & del end not possible. (==> Moves should be done before deletes in ENGINE)
+ src_file = BASE_OLD + '/' + elt
+ # If parent directory is deleted.. & del end not possible. (==> Moves should be done before deletes in ENGINE)
if src_file in fdupes.keys():
- dst_file = BASE_NEW+'/'+ fdupes[src_file]
+ dst_file = BASE_NEW + '/' + fdupes[src_file]
logging.debug(' File Moved %s ==> %s' % (src_file, dst_file))
Move_Cnt = Move_Cnt + 1
Partition_Doc.write('MOVE:REG:%s:%s:%s\n' % (elt, fdupes[src_file], hash_file(src_file)))
files_removed.remove(elt)
files_new.remove(fdupes[src_file])
-
- #Should be placed after removing duplicates, else they will be filtered here.
+ # Should be placed after removing duplicates, else they will be filtered here.
# loop shd b for all NEW files, rather than for all delete files (Current understanding)
- # First Step: Sort & Filter out unwanted files
+ # First Step: Sort & Filter out unwanted files
# Minimum condition used is,
# 1. File name should match 70%
# 2. Extensions should be same
# 3. File name length shd b greater than 3 char
# 4. As we are using sorting on file names, once file name does not match and R_Flag is set to true, we nee not check remaining files. So, will execute break.
- # 5. Should consider editdistance for RENAME LOGIC ==> TBD
-
+ # 5. Should consider editdistance for RENAME LOGIC ==> TBD
Base_DelList = files_removed[:]
Base_NewList = files_new[:]
DelList = sorted(Base_DelList, key=path_leaf)
Filter1 = []
Filter2 = []
- #Remove unwanted items which we cant make diff with for rename logic
+ # Remove unwanted items which we cant make diff with for rename logic
for file in DelList:
- if os.path.islink(BASE_OLD+'/'+file):
+ if os.path.islink(BASE_OLD + '/' + file):
continue
- elif os.path.isdir(BASE_OLD+'/'+file):
+ elif ishardlink(BASE_OLD + '/' + file):
+ continue
+ elif os.path.isdir(BASE_OLD + '/' + file):
continue
else:
Filter1.append(file)
DelList = Filter1
for file in NewList:
- if os.path.islink(BASE_NEW+'/'+file):
+ if os.path.islink(BASE_NEW + '/' + file):
+ continue
+ elif ishardlink(BASE_NEW + '/' + file):
continue
- elif os.path.isdir(BASE_NEW+'/'+file):
+ elif os.path.isdir(BASE_NEW + '/' + file):
continue
elif len(path_leaf(file)) <= 3:
- logging.debug('Ignored for best picks -%s ' % (BASE_NEW+'/'+file))
+ logging.debug('Ignored for best picks -%s ' % (BASE_NEW + '/' + file))
continue
else:
Filter2.append(file)
-
NewList = Filter2
-
logging.debug('Rename Logic After filter: Delcount -%d NewCount -%d' % (len(DelList), len(NewList)))
for new_file in NewList:
- R_Flag = 'FALSE';
- DirPathNew = path_head(new_file)
+ R_Flag = 'FALSE'
FileNameNew = path_leaf(new_file)
DiffSize = 0
- winning_patch_sz = os.path.getsize(BASE_NEW+'/'+new_file)
+ winning_patch_sz = os.path.getsize(BASE_NEW + '/' + new_file)
New_fs = winning_patch_sz
winning_file = ''
for del_file in DelList:
FileNameOld = path_leaf(del_file)
- if (FileNameOld.startswith(FileNameNew[:len(FileNameNew)*7/10]) and (os.path.splitext(FileNameNew)[1] == os.path.splitext(del_file)[1])):
+ if (FileNameOld.startswith(FileNameNew[:len(FileNameNew) * 7 / 10]) and (os.path.splitext(FileNameNew)[1] == os.path.splitext(del_file)[1])):
#winning_patch_sz = 0.9 * os.path.getsize(BASE_NEW+'/'+new_file)
- #Percentage difference between two file sizes is within 30%, then we consider for diff generation
- Del_fs = os.path.getsize(BASE_OLD+'/'+del_file)
- v1 = abs(New_fs-Del_fs)
- v2 = (New_fs+Del_fs)/2
- if( v2<=0 or ((v1/v2) * 100) > 30 ):
+ # Percentage difference between two file sizes is within 30%, then we consider for diff generation
+ Del_fs = os.path.getsize(BASE_OLD + '/' + del_file)
+ v1 = abs(New_fs - Del_fs)
+ v2 = (New_fs + Del_fs) / 2
+ if(v2 <= 0 or ((v1 / v2) * 100) > 30):
logging.debug('Ignore diff generation New_fs - %d Del_Fs - %d' % (New_fs, Del_fs))
continue
logging.debug('I can compute diff between %s %s Del_Fs - %d New_Fs - %d' % (del_file, new_file, Del_fs, New_fs))
- R_Flag = 'TRUE';
- DiffSize = measure_two_filediffs(BASE_OLD+'/'+del_file, BASE_NEW+'/'+new_file)
+ R_Flag = 'TRUE'
+ DiffSize = measure_two_filediffs(BASE_OLD + '/' + del_file, BASE_NEW + '/' + new_file)
if (DiffSize < 0.8 * winning_patch_sz):
winning_patch_sz = DiffSize
winning_file = del_file
- elif (not FileNameOld.startswith(FileNameNew[:len(FileNameNew)*7/10]) and R_Flag == 'TRUE'):
- logging.debug('Becuase nex set of files will not have matching name - break @@ %s %s' % (del_file, new_file))
- break;
+ elif (not FileNameOld.startswith(FileNameNew[:len(FileNameNew) * 7 / 10]) and R_Flag == 'TRUE'):
+ logging.debug('Because nex set of files will not have matching name - break @@ %s %s' % (del_file, new_file))
+ break
if len(winning_file) > 0:
logging.debug('Best Pick -%s ==> %s [%d]' % (winning_file, new_file, DiffSize))
files_renamed.append([new_file, winning_file])
if SUPPORT_RENAME == "TRUE":
for elt in files_renamed:
- src_file = BASE_OLD+'/'+elt[1]
- dst_file = BASE_NEW+'/'+elt[0]
+ src_file = BASE_OLD + '/' + elt[1]
+ dst_file = BASE_NEW + '/' + elt[0]
Diff_Cnt = Diff_Cnt + 1
- patchName = (DIFF_PREFIX+'%d_%s_'+PART_NAME+DIFF_SUFFIX) % (Diff_Cnt, path_leaf(elt[1]))
+ patchName = (DIFF_PREFIX + '%d_%s_' + PART_NAME + DIFF_SUFFIX) % (Diff_Cnt, path_leaf(elt[1]))
#patchName = (DIFF_PREFIX+'_%s'+DIFF_SUFFIX) % (path_leaf(elt[0]))
patchLoc = '%s/%s' % (OUT_DIR, patchName)
logging.debug(' File Renamed %s ==> %s' % (src_file, dst_file))
# Should we consider measure_two_filediffs ?? so that patch size is NOT greater than actual file?
# What if folder path has numerics??
- if os.path.isdir(src_file) or os.path.isdir(dst_file):
- #This case never occurs??
+ if os.path.isdir(src_file) or os.path.isdir(dst_file):
+ # This case never occurs??
Partition_Doc.write('"%s" and "%s" renamed 0 0\n' % (elt[0], elt[1]))
- Update_Attr(elt[0], "FILE", File_Attibutes, Sym_Attibutes)
- else: #Make sure these files are PROPER and they shd NOT be symlinks
+ Update_Attr(elt[0], "FILE", File_Attributes, Sym_Attributes)
+ # Make sure these files are PROPER and they shd NOT be symlinks
+ elif not (os.path.islink(src_file) or os.path.islink(dst_file)) \
+ and not (elt[0] in New_hardlinks or elt[1] in Old_hardlinks) \
+ and (os.path.isfile(src_file) and os.path.isfile(dst_file)):
if filecmp.cmp(src_file, dst_file):
Move_Cnt = Move_Cnt + 1
Diff_Cnt = Diff_Cnt - 1
Partition_Doc.write('MOVE:REG:%s:%s:%s\n' % (elt[1], elt[0], hash_file(src_file)))
else:
FORMAT = "REG"
- ret = subprocess.call([DIFF_UTIL,src_file,dst_file,patchLoc])
+ ret = subprocess.call([DIFF_UTIL, src_file, dst_file, patchLoc])
if ret is not 0:
logging.debug('Failed to create diff %d %s %s\n' % (ret, src_file, dst_file))
files_new.append(elt)
Partition_Doc.write('DIFF:REG:%s:%s:%s:%s:%s\n' % (elt[1], elt[0], hash_file(src_file), hash_file(dst_file), patchName))
SS_UpdateSize(src_file, dst_file)
- Update_Attr(elt[0], "FILE", File_Attibutes, Sym_Attibutes)
+ Update_Attr(elt[0], "FILE", File_Attributes, Sym_Attributes)
-
- #HANDLING VERBATIM - We Process NEWs and DELETEs for Verbatim list ONLY after processing duplicates & rename functionality.
- #So that, the rename functionality will NOT create PATCH instead of verbatims.
+ # HANDLING VERBATIM - We Process NEWs and DELETEs for Verbatim list ONLY after processing duplicates & rename functionality.
+ # So that, the rename functionality will NOT create PATCH instead of verbatims.
if SUPPORT_VERBATIM == "TRUE" and os.path.exists(VERBATIM_LIST):
with open(VERBATIM_LIST, 'r') as F_News:
lines = set(F_News.read().splitlines())
for line in lines:
- if not line in files_new:
- if os.path.exists(BASE_NEW+'/'+line):
+ if line not in files_new:
+ if os.path.exists(BASE_NEW + '/' + line):
files_new.append(line)
- Verbatim_Cnt = Verbatim_Cnt+1
- logging.debug("Added to list of verbatims -%s" % BASE_NEW+'/'+line)
-
-
+ Verbatim_Cnt = Verbatim_Cnt + 1
+ logging.debug("Added to list of verbatims -%s" % BASE_NEW + '/' + line)
for elt in files_removed:
- #if files are part of patches after renaming, we shd remove them as part of removed.
- src_file = BASE_OLD+'/'+elt
+ # if files are part of patches after renaming, we shd remove them as part of removed.
+ src_file = BASE_OLD + '/' + elt
if os.path.islink(src_file):
Partition_Doc.write('DEL:SYM:%s\n' % (elt))
+ elif elt in Old_hardlinks:
+ Partition_Doc.write('DEL:HARD:%s\n' % (elt))
elif os.path.isdir(src_file):
- #If we change to DIR TYPE, then the same token should be modified on UA also and SHA should be accordingly passed.
+ # If we change to DIR TYPE, then the same token should be modified on UA also and SHA should be accordingly passed.
Partition_Doc.write('DEL:REG:%s:NA\n' % (elt))
else:
Partition_Doc.write('DEL:REG:%s:%s\n' % (elt, hash_file(src_file)))
Dir_removed.sort(reverse=True)
for elt in Dir_removed:
- #if Dir is empty, add it to the removed list.
- src_file = BASE_OLD+'/'+elt
- #Irrespective of weather files are MOVED or DIFF'ed, we can delete the folders. This action can be performed at the end.
- #It covers symlinks also, as NEW symlinks cannot point to NON existant folders of TARGET (NEW binary)
+ # if Dir is empty, add it to the removed list.
+ src_file = BASE_OLD + '/' + elt
+ # Irrespective of weather files are MOVED or DIFF'ed, we can delete the folders. This action can be performed at the end.
+ # It covers symlinks also, as NEW symlinks cannot point to NON existant folders of TARGET (NEW binary)
if os.path.isdir(src_file):
Partition_Doc.write('DEL:END:%s\n' % (elt))
Del_Cnt = Del_Cnt + 1
logging.debug(' Dir Deleted- %s' % src_file)
+ try:
+ ensure_dir_exists(NEW_FILES_PATH)
+ except FileExistsError as exc:
+ logging.error('Directory %s used by this script is already an existing file' % NEW_FILES_PATH)
+ raise exc
for elt in files_new:
- dst_file = BASE_NEW+'/'+elt
- newfiles_dest_path = 'system/'
- ensure_dir_exists(newfiles_dest_path)
+ dst_file = os.path.join(BASE_NEW, elt)
+ destpath = os.path.join(NEW_FILES_PATH, elt)
+
if os.path.islink(dst_file):
patch = os.readlink(dst_file)
logging.debug(' File New Links %s' % elt)
Partition_Doc_SymLinks.write('SYM:NEW:%s:%s\n' % (elt, patch))
- #What if this is only a new sym link and folder already exists??? Should recheck
- destpath = newfiles_dest_path + elt
- if not os.path.exists(path_head(destpath)):
- os.makedirs(path_head(destpath))
+ # What if this is only a new sym link and folder already exists??? Should recheck
+ if not os.path.exists(os.path.dirname(destpath)):
+ os.makedirs(os.path.dirname(destpath))
logging.info('New SymLink - Adding missing Dir')
- #Update_Attr(elt, "SYM", File_Attibutes, Sym_Attibutes)
+ Update_Attr(elt, "SYM", File_Attributes, Sym_Attributes)
Sym_New_Cnt = Sym_New_Cnt + 1
- elif os.path.isdir(dst_file): # We create just empty directory here
- destpath = newfiles_dest_path + elt
+ elif elt in New_hardlinks:
+ patch = New_hardlinks[elt]
+ logging.debug('File new hardlink %s' % elt)
+ Partition_Doc_HardLinks.write('HARD:NEW:%s:%s\n' %(elt, patch))
+ if not os.path.exists(os.path.dirname(destpath)):
+ os.makedirs(os.path.dirname(destpath))
+ logging.info('New hardlink - Adding missing Dir')
+ Hard_New_Cnt += 1
+ elif os.path.isdir(dst_file): # We create just empty directory here
if not os.path.exists(destpath):
os.makedirs(destpath)
logging.debug(' File New Dir %s' % destpath)
New_Cnt = New_Cnt + 1
else:
New_Cnt = New_Cnt + 1
- #newfiles_dest_path = OUT_DIR + '/system/'
- destpath = newfiles_dest_path + elt
destdir = os.path.dirname(destpath)
logging.debug('New files - %s ==> %s' % (dst_file, destdir))
if not os.path.isdir(destdir):
try:
os.makedirs(destdir)
- except:
+ except Exception as exc:
logging.critical('Error in NEW files DIR entry -%s' % destdir)
- raise
+ raise exc
try:
if not stat.S_ISFIFO(os.stat(dst_file).st_mode):
shutil.copy2(dst_file, destpath)
logging.debug('New files copied from- %s to- %s' % (dst_file, destpath))
- except:
+ except Exception as exc:
logging.critical('Error in NEW files entry -%s -%s' % (dst_file, destpath))
- raise
+ raise exc
+ Update_Attr(elt, "FILE", File_Attributes, Sym_Attributes)
for elt in Dir_Added:
- newfiles_dest_path = 'system/'
- ensure_dir_exists(newfiles_dest_path)
- destpath = newfiles_dest_path + elt
+ destpath = os.path.join(NEW_FILES_PATH, elt)
if not os.path.exists(destpath):
os.makedirs(destpath)
logging.debug(' DirList New Dir %s' % destpath)
New_Cnt = New_Cnt + 1
- #Base directory should be system
+ # Base directory should be system
print 'Compressing New files'
if (New_Cnt > 0 or Sym_New_Cnt > 0):
WorkingDir = os.getcwd()
- os.chdir(os.getcwd()+"/"+NEW_FILES_PATH)
+ os.chdir(os.path.join(os.getcwd(), NEW_FILES_PATH))
logging.info('Curr Working Dir - %s' % os.getcwd())
- os.system(ZIPUTIL+NEW_FILES_PATH+" >> " + LOGFILE)
- shutil.move(NEW_FILES_ZIP_NAME, WorkingDir+"/"+OUT_DIR)
- #New file size?? cos, we extract system.7z from delta.tar and then proceed with decompression
- SS_UpdateSize(WorkingDir+"/"+OUT_DIR+"/"+NEW_FILES_ZIP_NAME, WorkingDir+"/"+OUT_DIR+"/"+NEW_FILES_ZIP_NAME)
+ log_path = os.path.join(WorkingDir, LOGFILE)
+ os.system(ZIPUTIL + NEW_FILES_ZIP_NAME + " . " + " >> " + log_path)
+ shutil.move(NEW_FILES_ZIP_NAME, WorkingDir + "/" + OUT_DIR)
+ # New file size?? cos, we extract system.7z from delta.tar and then proceed with decompression
+ SS_UpdateSize(WorkingDir + "/" + OUT_DIR + "/" + NEW_FILES_ZIP_NAME, WorkingDir + "/" + OUT_DIR + "/" + NEW_FILES_ZIP_NAME)
os.chdir(WorkingDir)
shutil.rmtree(NEW_FILES_PATH)
# use 7z a system.7z ./*
logging.info('%d files unchanged' % len(files_unchanged))
logging.info('%d files files_renamed' % len(files_renamed))
logging.info('%d files NEW' % len(files_new))
- logging.info('%d File attr' % len(File_Attibutes))
- logging.info('%d Sym attr' % len(Sym_Attibutes))
- logging.info('PaTcHCoUnT:Diffs-%d Moves-%d News-%d Delets-%d SymDiffs-%d SymNews-%d Verbatim -%d\n' % (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt, Verbatim_Cnt))
- print('PaTcHCoUnT:Diffs-%d Moves-%d News-%d Delets-%d SymDiffs-%d SymNews-%d Verbatim -%d\n' % (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt, Verbatim_Cnt))
-
- #There could be duplicates, TODO, can check before adding..
- ATTR_FILE_D = open(ATTR_FILE,'a+')
- for elt in File_Attibutes:
+ logging.info('%d File attr' % len(File_Attributes))
+ logging.info('%d Sym attr' % len(Sym_Attributes))
+ logging.info('PaTcHCoUnT:Diffs-%d Moves-%d News-%d Delets-%d SymDiffs-%d SymNews-%d HardDiffs-%d HardNews-%d Verbatim -%d\n' % \
+ (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt, Hard_Diff_Cnt, Hard_New_Cnt, Verbatim_Cnt))
+ print('PaTcHCoUnT:Diffs-%d Moves-%d News-%d Delets-%d SymDiffs-%d SymNews-%d HardDiffs-%d HardNews-%d Verbatim -%d\n' % \
+ (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt, Hard_Diff_Cnt, Hard_New_Cnt, Verbatim_Cnt))
+
+ # There could be duplicates, TODO, can check before adding..
+ ATTR_FILE_D = open(ATTR_FILE, 'a+')
+ for elt in File_Attributes:
ATTR_FILE_D.write(elt)
- for elt in Sym_Attibutes:
+ for elt in Sym_Attributes:
ATTR_FILE_D.write(elt)
ATTR_FILE_D.close()
Partition_Doc_SymLinks.close()
- Partition_Read_SymLinks = open(SymLinkDoc,'r+')
+ Partition_Doc_HardLinks.close()
+ Partition_Read_SymLinks = open(SymLinkDoc, 'r+')
+ Partition_Read_HardLinks = open(HardLinkDoc, 'r+')
Partition_Doc.write(Partition_Read_SymLinks.read())
- Partition_Doc.write('PaTcHCoUnT:%d %d %d %d %d %d\n' % (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt))
- Partition_Doc_SymLinks.close()
+ for line in reversed(Partition_Read_HardLinks.readlines()):
+ Partition_Doc.write(line)
+ Partition_Doc.write('PaTcHCoUnT:%d %d %d %d %d %d %d %d\n' % \
+ (Diff_Cnt, Move_Cnt, New_Cnt, Del_Cnt, Sym_Diff_Cnt, Sym_New_Cnt, Hard_Diff_Cnt, Hard_New_Cnt))
+ Partition_Read_SymLinks.close()
+ Partition_Read_HardLinks.close()
Partition_Doc.close()
os.remove(SymLinkDoc)
+ os.remove(HardLinkDoc)
- if Diff_Cnt + Move_Cnt + New_Cnt+ Del_Cnt + Sym_Diff_Cnt + Sym_New_Cnt + Verbatim_Cnt + os.path.getsize(ATTR_FILE) == 0:
- print('No Delta Generated for %s - %s' % (PART_NAME,OUT_DIR))
+ if Diff_Cnt + Move_Cnt + New_Cnt + Del_Cnt + Sym_Diff_Cnt + Sym_New_Cnt + Verbatim_Cnt + Hard_Diff_Cnt + \
+ Hard_New_Cnt + os.path.getsize(ATTR_FILE) == 0:
+ print('No Delta Generated for %s - %s' % (PART_NAME, OUT_DIR))
logging.info('No Delta Generated for %s' % PART_NAME)
shutil.rmtree(OUT_DIR)
-def Apply_Container_Delta(a_apk, b_apk, new_apk, a_folder, g_output_dir):
-
- #CONTROL NAMES, AND PRINTS AND ERROR CASES... SHOULD NOT PROCEED.
- print 'ApplyContainerDelta - ', b_apk, a_folder, g_output_dir
- shutil.copy2(g_output_dir+'/'+b_apk, g_output_dir+'/temp')
- temp_apk = '../'+g_output_dir+'/'+b_apk
- Patch = 'Patch_'+b_apk
- ensure_dir_exists(Patch)
- shutil.copy2(g_output_dir+'/'+b_apk, Patch+'/'+b_apk)
-
- #Size issue on Device side?? shd check this
- subprocess.call(['unzip','-q', Patch+'/'+b_apk, '-d', Patch])
- with open(g_output_dir+'/PATCH.txt', 'r') as f_new:
- lines = set(f_new.read().splitlines())
- for line in lines:
- #print('Action ==> %s' % line)
- #Action, Path, Patch = line.split('|')
- Items = line.split('|')
- Action = Items[0]
- Path = Items[1]
- ActualPath = a_folder+'/'+Path
- PatchPath = Patch+'/'+Path
- SrcPath = g_output_dir+'/'+path_leaf(Path)
- #print('Action ==> %s Path ==> %s ' % (Action, Path))
- if line[0] == 'c':
- patchName = g_output_dir+'/'+Items[2]
- #print('Apply Patch: ActualPath %s SrcPath %s PatchLoc %s ' % (PatchPath, ActualPath, patchName))
- subprocess.call([DIFFPATCH_UTIL,ActualPath,ActualPath,patchName])
- WorkingDir = os.getcwd()
- os.chdir(WorkingDir+"/"+"temp_a")
- subprocess.call(['cp', '--parents', Path, '../'+Patch])
- os.chdir(WorkingDir)
- elif line[0] == 's':
- WorkingDir = os.getcwd()
- os.chdir(WorkingDir+"/"+"temp_a")
- subprocess.call(['cp', '--parents', Path, '../'+Patch])
- os.chdir(WorkingDir)
- else:
- print('Apply_Container_Delta - Unknown Error')
- #print('Touch all files and set common attributes for DIFF generation')
- WorkingDir = os.getcwd()
- os.chdir(WorkingDir+"/"+Patch)
-
- CONTAINER_DATE = '200011111111.11'
- CONTAINER_MODE = '0755'
- subprocess.call(['find', '.', '-type', 'l', '-exec', 'rm', '-rf', '{}', ';'])
- subprocess.call(['find', '.', '-exec', 'touch', '-t', CONTAINER_DATE, '{}', ';'])
- subprocess.call(['chmod', '-R', CONTAINER_MODE, '../'+Patch])
-
- print 'Update Intermediate Archive'
- #subprocess.call(['zip','-ryX', b_apk, '*'])
- subprocess.call(['zip','-ryX', b_apk] + glob.glob('*'))
- os.chdir(WorkingDir)
- #print('Apply Path completed - Now create diff for this and place in patch folder')
- #print os.getcwd()
- print('Patch Applied, Create Final Diff - %s %s' % (g_output_dir+'/'+b_apk,new_apk))
- patchName = ('New'+'_%s'+DIFF_SUFFIX) % (b_apk)
- patchLoc = '%s/%s' % (g_output_dir, patchName)
-
- subprocess.call([DIFF_UTIL, Patch+'/'+b_apk ,new_apk,patchLoc])
-
- #Only on HOST... for testing
- if TEST_MODE == 'TRUE':
- UpgradedName = '%s_Upgraded' % (b_apk)
- subprocess.call([DIFFPATCH_UTIL,Patch+'/'+b_apk,UpgradedName,patchLoc])
-
- #This is file only with NEWS and empty diffs and same files.
- if TEST_MODE == 'FALSE':
- os.remove(g_output_dir+'/'+b_apk)
- os.rename(g_output_dir+'/temp', g_output_dir+'/'+b_apk)
- shutil.rmtree(Patch)
-
def IsSymlink(info):
- return (info.external_attr >> 16) == 0120777
+ return (info.external_attr >> 16) == 0120777
+
def NewFiles(src, dest):
- print src,dest
- subprocess.call(['cp','-rp', src,dest])
- #try:
+ print src, dest
+ subprocess.call(['cp', '-rp', src, dest])
+ #try:
#shutil.copytree(src, dest)
- #except OSError as e:
- # If the error was caused because the source wasn't a directory
- #if e.errno == errno.ENOTDIR:
- #shutil.copy2(src, dest)
- #else:
- #print('Directory not copied. Error: %s' % e)
+ #except OSError as e:
+ # If the error was caused because the source wasn't a directory
+ #if e.errno == errno.ENOTDIR:
+ #shutil.copy2(src, dest)
+ #else:
+ #print('Directory not copied. Error: %s' % e)
+
def measure_two_filediffs(src, dst):
patchLoc = 'temp.patch'
- subprocess.call([DIFF_UTIL,src,dst,patchLoc])
+ # TODO ensure this is excepts an error
+ subprocess.call([DIFF_UTIL, src, dst, patchLoc])
result_size = os.path.getsize(patchLoc)
os.remove(patchLoc)
return result_size
+
+def ishardlink(path):
+ if os.stat(path).st_nlink > 1:
+ return True
+ return False
+
+
+def get_inode(path):
+ return os.stat(path).st_ino
+
+
+def get_hardlinks(base):
+ hardlinks_dict = {}
+ inodes_dict = {}
+
+ for root, direcotories, files in os.walk(base, topdown=True, followlinks=False):
+ for file in sorted(files):
+ file_name = os.path.join(root, file)
+ if not os.path.islink(file_name) and ishardlink(file_name):
+ inode = get_inode(file_name)
+ rel_path = os.path.relpath(file_name, base)
+ if inode not in inodes_dict:
+ inodes_dict[inode] = rel_path
+ else:
+ hardlinks_dict[rel_path] = inodes_dict[inode]
+
+ return hardlinks_dict
+
+
def Get_Files(path):
all_files = []
all_dirs = []
for root, directories, filenames in os.walk(path, topdown=False, followlinks=False):
for directory in directories:
#DirName = os.path.join(root+'/',directory)
- DirName = os.path.join(root,directory)
+ DirName = os.path.join(root, directory)
if os.path.islink(DirName):
logging.debug('This is symlink pointing to dir -%s' % DirName)
all_files.append(os.path.relpath(DirName, path))
elif not os.listdir(DirName):
#print('*****Empty Directory******* -%s', DirName)
- #This should NOT be appended ??? Empty dir shd b considered
+ # This should NOT be appended ??? Empty dir shd b considered
all_dirs.append(os.path.relpath(DirName, path))
else:
all_dirs.append(os.path.relpath(DirName, path))
for filename in filenames:
- FileName = os.path.join(root,filename)
+ FileName = os.path.join(root, filename)
all_files.append(os.path.relpath(FileName, path))
all_files.sort()
USAGE_DOCSTRING = """
- Generate Delta using BASEOLD AND BASE NEW
- Attributes is optional
- Usage: CreatePatch.py UPDATE_TYPE PARTNAME OLDBASE NEWBASE OUTFOLDER
+ Generate Delta using BASEOLD AND BASE NEW
+ Attributes is optional
+ Usage: CreatePatch.py UPDATE_TYPE PARTNAME OLDBASE NEWBASE OUTFOLDER
"""
-def Usage(docstring):
- print docstring.rstrip("\n")
- print COMMON_DOCSTRING
+def Usage(docstring):
+ print docstring.rstrip("\n")
+ print COMMON_DOCSTRING
if __name__ == '__main__':
main()
-