import urllib2
import hashlib
import argparse
+import subprocess
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts"))
+
+from build.common import *
EXTERNAL_DIR = os.path.realpath(os.path.normpath(os.path.dirname(__file__)))
-class SourcePackage:
- def __init__(self, url, filename, checksum, dstDir, postExtract=None):
+def computeChecksum (data):
+ return hashlib.sha256(data).hexdigest()
+
+class Source:
+ def __init__(self, baseDir, extractDir):
+ self.baseDir = baseDir
+ self.extractDir = extractDir
+
+ def clean (self):
+ fullDstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir)
+ if os.path.exists(fullDstPath):
+ shutil.rmtree(fullDstPath, ignore_errors=False)
+
+class SourcePackage (Source):
+ def __init__(self, url, filename, checksum, baseDir, extractDir = "src", postExtract=None):
+ Source.__init__(self, baseDir, extractDir)
self.url = url
self.filename = filename
self.checksum = checksum
- self.dstDir = dstDir
+ self.archiveDir = "packages"
self.postExtract = postExtract
-def computeChecksum (data):
- return hashlib.sha256(data).hexdigest()
+ def clean (self):
+ Source.clean(self)
+ self.removeArchives()
-def clean (pkg):
- srcPath = os.path.join(EXTERNAL_DIR, pkg.dstDir)
+ def update (self):
+ if not self.isArchiveUpToDate():
+ self.fetchAndVerifyArchive()
- for entry in os.listdir(srcPath):
- if entry == "CMakeLists.txt":
- continue
+ # \note No way to verify that extracted contents match archive, re-extract
+ Source.clean(self)
+ self.extract()
- fullPath = os.path.join(srcPath, entry)
+ def removeArchives (self):
+ archiveDir = os.path.join(EXTERNAL_DIR, pkg.baseDir, pkg.archiveDir)
+ if os.path.exists(archiveDir):
+ shutil.rmtree(archiveDir, ignore_errors=False)
- if os.path.isfile(fullPath):
- os.unlink(fullPath)
- elif os.path.isdir(fullPath):
- shutil.rmtree(fullPath, ignore_errors=False)
+ def isArchiveUpToDate (self):
+ archiveFile = os.path.join(EXTERNAL_DIR, pkg.baseDir, pkg.archiveDir, pkg.filename)
+ if os.path.exists(archiveFile):
+ return computeChecksum(readFile(archiveFile)) == self.checksum
+ else:
+ return False
-def fetch (pkg):
- print "Fetching %s" % pkg.url
+ def fetchAndVerifyArchive (self):
+ print "Fetching %s" % self.url
- req = urllib2.urlopen(pkg.url)
- data = req.read()
- checksum = computeChecksum(data)
- dstPath = os.path.join(EXTERNAL_DIR, pkg.filename)
+ req = urllib2.urlopen(self.url)
+ data = req.read()
+ checksum = computeChecksum(data)
+ dstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.archiveDir, self.filename)
- if checksum != pkg.checksum:
- raise Exception("Checksum mismatch for %s, exepected %s, got %s" % (pkg.filename, pkg.checksum, checksum))
+ if checksum != self.checksum:
+ raise Exception("Checksum mismatch for %s, exepected %s, got %s" % (self.filename, self.checksum, checksum))
- out = open(dstPath, 'wb')
- out.write(data)
- out.close()
+ if not os.path.exists(os.path.dirname(dstPath)):
+ os.mkdir(os.path.dirname(dstPath))
-def extract (pkg):
- print "Extracting %s to %s" % (pkg.filename, pkg.dstDir)
+ writeFile(dstPath, data)
- srcPath = os.path.join(EXTERNAL_DIR, pkg.filename)
- tmpPath = os.path.join(EXTERNAL_DIR, ".extract-tmp-%s" % pkg.dstDir)
- dstPath = os.path.join(EXTERNAL_DIR, pkg.dstDir)
- archive = tarfile.open(srcPath)
+ def extract (self):
+ print "Extracting %s to %s/%s" % (self.filename, self.baseDir, self.extractDir)
- if os.path.exists(tmpPath):
- shutil.rmtree(tmpPath, ignore_errors=False)
+ srcPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.archiveDir, self.filename)
+ tmpPath = os.path.join(EXTERNAL_DIR, ".extract-tmp-%s" % self.baseDir)
+ dstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir)
+ archive = tarfile.open(srcPath)
- os.mkdir(tmpPath)
+ if os.path.exists(tmpPath):
+ shutil.rmtree(tmpPath, ignore_errors=False)
- archive.extractall(tmpPath)
- archive.close()
+ os.mkdir(tmpPath)
- extractedEntries = os.listdir(tmpPath)
- if len(extractedEntries) != 1 or not os.path.isdir(os.path.join(tmpPath, extractedEntries[0])):
- raise Exception("%s doesn't contain single top-level directory" % pkg.filename)
+ archive.extractall(tmpPath)
+ archive.close()
- topLevelPath = os.path.join(tmpPath, extractedEntries[0])
+ extractedEntries = os.listdir(tmpPath)
+ if len(extractedEntries) != 1 or not os.path.isdir(os.path.join(tmpPath, extractedEntries[0])):
+ raise Exception("%s doesn't contain single top-level directory" % self.filename)
- for entry in os.listdir(topLevelPath):
- if os.path.exists(os.path.join(dstPath, entry)):
- print " skipping %s" % entry
- continue
+ topLevelPath = os.path.join(tmpPath, extractedEntries[0])
- shutil.move(os.path.join(topLevelPath, entry), dstPath)
+ if not os.path.exists(dstPath):
+ os.mkdir(dstPath)
- shutil.rmtree(tmpPath, ignore_errors=True)
+ for entry in os.listdir(topLevelPath):
+ if os.path.exists(os.path.join(dstPath, entry)):
+ raise Exception("%s exists already" % entry)
- if pkg.postExtract != None:
- pkg.postExtract(dstPath)
+ shutil.move(os.path.join(topLevelPath, entry), dstPath)
+
+ shutil.rmtree(tmpPath, ignore_errors=True)
+
+ if self.postExtract != None:
+ self.postExtract(dstPath)
+
+class GitRepo (Source):
+ def __init__(self, url, revision, baseDir, extractDir = "src"):
+ Source.__init__(self, baseDir, extractDir)
+ self.url = url
+ self.revision = revision
+
+ def update (self):
+ fullDstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir)
+
+ if not os.path.exists(fullDstPath):
+ execute(["git", "clone", "--no-checkout", self.url, fullDstPath])
+
+ pushWorkingDir(fullDstPath)
+ try:
+ execute(["git", "fetch", self.url])
+ execute(["git", "checkout", self.revision])
+ finally:
+ popWorkingDir()
def postExtractLibpng (path):
shutil.copy(os.path.join(path, "scripts", "pnglibconf.h.prebuilt"),
os.path.join(path, "pnglibconf.h"))
PACKAGES = [
- SourcePackage("http://zlib.net/zlib-1.2.8.tar.gz",
- "zlib-1.2.8.tar.gz",
- "36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d",
- "zlib"),
- SourcePackage("http://prdownloads.sourceforge.net/libpng/libpng-1.6.17.tar.gz",
- "libpng-1.6.17.tar.gz",
- "a18233c99e1dc59a256180e6871d9305a42e91b3f98799b3ceb98e87e9ec5e31",
- "libpng",
- postExtract = postExtractLibpng),
+ SourcePackage(
+ "http://zlib.net/zlib-1.2.8.tar.gz",
+ "zlib-1.2.8.tar.gz",
+ "36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d",
+ "zlib"),
+ SourcePackage(
+ "http://prdownloads.sourceforge.net/libpng/libpng-1.6.17.tar.gz",
+ "libpng-1.6.17.tar.gz",
+ "a18233c99e1dc59a256180e6871d9305a42e91b3f98799b3ceb98e87e9ec5e31",
+ "libpng",
+ postExtract = postExtractLibpng),
]
def parseArgs ():
parser = argparse.ArgumentParser(description = "Fetch external sources")
- parser.add_argument('--clean-only', dest='cleanOnly', action='store_true', default=False,
- help='Clean only, do not fetch/extract')
- parser.add_argument('--keep-archive', dest='keepArchive', action='store_true', default=False,
- help='Keep archive after extracting')
+ parser.add_argument('--clean', dest='clean', action='store_true', default=False,
+ help='Remove sources instead of fetching')
return parser.parse_args()
if __name__ == "__main__":
args = parseArgs()
for pkg in PACKAGES:
- clean(pkg)
-
- if args.cleanOnly:
- continue
-
- fetch(pkg)
- extract(pkg)
-
- if not args.keepArchive:
- os.unlink(os.path.join(EXTERNAL_DIR, pkg.filename))
+ if args.clean:
+ pkg.clean()
+ else:
+ pkg.update()