From 87f709beb1ef003e265deb0646a055c68c4a00bc Mon Sep 17 00:00:00 2001 From: Pyry Haulos Date: Tue, 15 Sep 2015 12:10:25 -0700 Subject: [PATCH] Improve external/fetch_sources.py * Cache fetched packages, making fetch_sources.py significantly faster when package reference has not been updated. * Keep sources in a separate sub-directory * Add initial support for git repositories Change-Id: If24b18efb6d052147bfb4ed23ceeb40e2b391f3d --- external/.gitignore | 4 - external/fetch_sources.py | 186 +++++++++++++++++++++++++---------------- external/libpng/.gitignore | 2 + external/libpng/CMakeLists.txt | 4 +- external/zlib/.gitignore | 2 + external/zlib/CMakeLists.txt | 4 +- 6 files changed, 122 insertions(+), 80 deletions(-) delete mode 100644 external/.gitignore create mode 100644 external/libpng/.gitignore create mode 100644 external/zlib/.gitignore diff --git a/external/.gitignore b/external/.gitignore deleted file mode 100644 index 6838e6a..0000000 --- a/external/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -libpng/* -!libpng/CMakeLists.txt -zlib/* -!zlib/CMakeLists.txt diff --git a/external/fetch_sources.py b/external/fetch_sources.py index ff0c78e..71204f5 100644 --- a/external/fetch_sources.py +++ b/external/fetch_sources.py @@ -27,118 +27,160 @@ import tarfile import urllib2 import hashlib import argparse +import subprocess + +sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) + +from build.common import * EXTERNAL_DIR = os.path.realpath(os.path.normpath(os.path.dirname(__file__))) -class SourcePackage: - def __init__(self, url, filename, checksum, dstDir, postExtract=None): +def computeChecksum (data): + return hashlib.sha256(data).hexdigest() + +class Source: + def __init__(self, baseDir, extractDir): + self.baseDir = baseDir + self.extractDir = extractDir + + def clean (self): + fullDstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir) + if os.path.exists(fullDstPath): + shutil.rmtree(fullDstPath, ignore_errors=False) + +class SourcePackage (Source): + def __init__(self, url, filename, checksum, baseDir, extractDir = "src", postExtract=None): + Source.__init__(self, baseDir, extractDir) self.url = url self.filename = filename self.checksum = checksum - self.dstDir = dstDir + self.archiveDir = "packages" self.postExtract = postExtract -def computeChecksum (data): - return hashlib.sha256(data).hexdigest() + def clean (self): + Source.clean(self) + self.removeArchives() -def clean (pkg): - srcPath = os.path.join(EXTERNAL_DIR, pkg.dstDir) + def update (self): + if not self.isArchiveUpToDate(): + self.fetchAndVerifyArchive() - for entry in os.listdir(srcPath): - if entry == "CMakeLists.txt": - continue + # \note No way to verify that extracted contents match archive, re-extract + Source.clean(self) + self.extract() - fullPath = os.path.join(srcPath, entry) + def removeArchives (self): + archiveDir = os.path.join(EXTERNAL_DIR, pkg.baseDir, pkg.archiveDir) + if os.path.exists(archiveDir): + shutil.rmtree(archiveDir, ignore_errors=False) - if os.path.isfile(fullPath): - os.unlink(fullPath) - elif os.path.isdir(fullPath): - shutil.rmtree(fullPath, ignore_errors=False) + def isArchiveUpToDate (self): + archiveFile = os.path.join(EXTERNAL_DIR, pkg.baseDir, pkg.archiveDir, pkg.filename) + if os.path.exists(archiveFile): + return computeChecksum(readFile(archiveFile)) == self.checksum + else: + return False -def fetch (pkg): - print "Fetching %s" % pkg.url + def fetchAndVerifyArchive (self): + print "Fetching %s" % self.url - req = urllib2.urlopen(pkg.url) - data = req.read() - checksum = computeChecksum(data) - dstPath = os.path.join(EXTERNAL_DIR, pkg.filename) + req = urllib2.urlopen(self.url) + data = req.read() + checksum = computeChecksum(data) + dstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.archiveDir, self.filename) - if checksum != pkg.checksum: - raise Exception("Checksum mismatch for %s, exepected %s, got %s" % (pkg.filename, pkg.checksum, checksum)) + if checksum != self.checksum: + raise Exception("Checksum mismatch for %s, exepected %s, got %s" % (self.filename, self.checksum, checksum)) - out = open(dstPath, 'wb') - out.write(data) - out.close() + if not os.path.exists(os.path.dirname(dstPath)): + os.mkdir(os.path.dirname(dstPath)) -def extract (pkg): - print "Extracting %s to %s" % (pkg.filename, pkg.dstDir) + writeFile(dstPath, data) - srcPath = os.path.join(EXTERNAL_DIR, pkg.filename) - tmpPath = os.path.join(EXTERNAL_DIR, ".extract-tmp-%s" % pkg.dstDir) - dstPath = os.path.join(EXTERNAL_DIR, pkg.dstDir) - archive = tarfile.open(srcPath) + def extract (self): + print "Extracting %s to %s/%s" % (self.filename, self.baseDir, self.extractDir) - if os.path.exists(tmpPath): - shutil.rmtree(tmpPath, ignore_errors=False) + srcPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.archiveDir, self.filename) + tmpPath = os.path.join(EXTERNAL_DIR, ".extract-tmp-%s" % self.baseDir) + dstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir) + archive = tarfile.open(srcPath) - os.mkdir(tmpPath) + if os.path.exists(tmpPath): + shutil.rmtree(tmpPath, ignore_errors=False) - archive.extractall(tmpPath) - archive.close() + os.mkdir(tmpPath) - extractedEntries = os.listdir(tmpPath) - if len(extractedEntries) != 1 or not os.path.isdir(os.path.join(tmpPath, extractedEntries[0])): - raise Exception("%s doesn't contain single top-level directory" % pkg.filename) + archive.extractall(tmpPath) + archive.close() - topLevelPath = os.path.join(tmpPath, extractedEntries[0]) + extractedEntries = os.listdir(tmpPath) + if len(extractedEntries) != 1 or not os.path.isdir(os.path.join(tmpPath, extractedEntries[0])): + raise Exception("%s doesn't contain single top-level directory" % self.filename) - for entry in os.listdir(topLevelPath): - if os.path.exists(os.path.join(dstPath, entry)): - print " skipping %s" % entry - continue + topLevelPath = os.path.join(tmpPath, extractedEntries[0]) - shutil.move(os.path.join(topLevelPath, entry), dstPath) + if not os.path.exists(dstPath): + os.mkdir(dstPath) - shutil.rmtree(tmpPath, ignore_errors=True) + for entry in os.listdir(topLevelPath): + if os.path.exists(os.path.join(dstPath, entry)): + raise Exception("%s exists already" % entry) - if pkg.postExtract != None: - pkg.postExtract(dstPath) + shutil.move(os.path.join(topLevelPath, entry), dstPath) + + shutil.rmtree(tmpPath, ignore_errors=True) + + if self.postExtract != None: + self.postExtract(dstPath) + +class GitRepo (Source): + def __init__(self, url, revision, baseDir, extractDir = "src"): + Source.__init__(self, baseDir, extractDir) + self.url = url + self.revision = revision + + def update (self): + fullDstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir) + + if not os.path.exists(fullDstPath): + execute(["git", "clone", "--no-checkout", self.url, fullDstPath]) + + pushWorkingDir(fullDstPath) + try: + execute(["git", "fetch", self.url]) + execute(["git", "checkout", self.revision]) + finally: + popWorkingDir() def postExtractLibpng (path): shutil.copy(os.path.join(path, "scripts", "pnglibconf.h.prebuilt"), os.path.join(path, "pnglibconf.h")) PACKAGES = [ - SourcePackage("http://zlib.net/zlib-1.2.8.tar.gz", - "zlib-1.2.8.tar.gz", - "36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d", - "zlib"), - SourcePackage("http://prdownloads.sourceforge.net/libpng/libpng-1.6.17.tar.gz", - "libpng-1.6.17.tar.gz", - "a18233c99e1dc59a256180e6871d9305a42e91b3f98799b3ceb98e87e9ec5e31", - "libpng", - postExtract = postExtractLibpng), + SourcePackage( + "http://zlib.net/zlib-1.2.8.tar.gz", + "zlib-1.2.8.tar.gz", + "36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d", + "zlib"), + SourcePackage( + "http://prdownloads.sourceforge.net/libpng/libpng-1.6.17.tar.gz", + "libpng-1.6.17.tar.gz", + "a18233c99e1dc59a256180e6871d9305a42e91b3f98799b3ceb98e87e9ec5e31", + "libpng", + postExtract = postExtractLibpng), ] def parseArgs (): parser = argparse.ArgumentParser(description = "Fetch external sources") - parser.add_argument('--clean-only', dest='cleanOnly', action='store_true', default=False, - help='Clean only, do not fetch/extract') - parser.add_argument('--keep-archive', dest='keepArchive', action='store_true', default=False, - help='Keep archive after extracting') + parser.add_argument('--clean', dest='clean', action='store_true', default=False, + help='Remove sources instead of fetching') return parser.parse_args() if __name__ == "__main__": args = parseArgs() for pkg in PACKAGES: - clean(pkg) - - if args.cleanOnly: - continue - - fetch(pkg) - extract(pkg) - - if not args.keepArchive: - os.unlink(os.path.join(EXTERNAL_DIR, pkg.filename)) + if args.clean: + pkg.clean() + else: + pkg.update() diff --git a/external/libpng/.gitignore b/external/libpng/.gitignore new file mode 100644 index 0000000..671f540 --- /dev/null +++ b/external/libpng/.gitignore @@ -0,0 +1,2 @@ +src +packages diff --git a/external/libpng/CMakeLists.txt b/external/libpng/CMakeLists.txt index 838f892..0cbed64 100644 --- a/external/libpng/CMakeLists.txt +++ b/external/libpng/CMakeLists.txt @@ -4,8 +4,8 @@ if (NOT DE_DEFS) message(FATAL_ERROR "Include Defs.cmake") endif () -if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/png.h") - set(DEFAULT_PNG_SRC_PATH ${CMAKE_CURRENT_SOURCE_DIR}) +if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/src/png.h") + set(DEFAULT_PNG_SRC_PATH ${CMAKE_CURRENT_SOURCE_DIR}/src) else () # Assume build inside Android source tree set(DEFAULT_PNG_SRC_PATH "../libpng") diff --git a/external/zlib/.gitignore b/external/zlib/.gitignore new file mode 100644 index 0000000..671f540 --- /dev/null +++ b/external/zlib/.gitignore @@ -0,0 +1,2 @@ +src +packages diff --git a/external/zlib/CMakeLists.txt b/external/zlib/CMakeLists.txt index 53c257a..26de7ab 100644 --- a/external/zlib/CMakeLists.txt +++ b/external/zlib/CMakeLists.txt @@ -4,8 +4,8 @@ if (NOT DE_DEFS) message(FATAL_ERROR "Include Defs.cmake") endif () -if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/zlib.h") - set(DEFAULT_ZLIB_SRC_PATH ${CMAKE_CURRENT_SOURCE_DIR}) +if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/src/zlib.h") + set(DEFAULT_ZLIB_SRC_PATH ${CMAKE_CURRENT_SOURCE_DIR}/src) else () # Assume build inside Android source tree set(DEFAULT_ZLIB_SRC_PATH "../zlib/src") -- 2.7.4