2015-02-13 23:38:43 +00:00
|
|
|
# Copyright (c) 2015, Google Inc.
|
|
|
|
#
|
|
|
|
# Permission to use, copy, modify, and/or distribute this software for any
|
|
|
|
# purpose with or without fee is hereby granted, provided that the above
|
|
|
|
# copyright notice and this permission notice appear in all copies.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
|
|
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
|
|
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
|
|
|
|
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
|
|
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
|
|
|
|
# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
|
|
|
|
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
|
|
|
|
|
|
"""Extracts archives."""
|
|
|
|
|
|
|
|
|
2016-12-02 04:48:00 +00:00
|
|
|
import hashlib
|
2015-02-27 22:23:16 +00:00
|
|
|
import optparse
|
2015-02-13 23:38:43 +00:00
|
|
|
import os
|
|
|
|
import os.path
|
|
|
|
import tarfile
|
|
|
|
import shutil
|
|
|
|
import sys
|
|
|
|
import zipfile
|
|
|
|
|
|
|
|
|
2015-02-27 22:23:16 +00:00
|
|
|
def CheckedJoin(output, path):
|
2015-02-13 23:38:43 +00:00
|
|
|
"""
|
2015-02-27 22:23:16 +00:00
|
|
|
CheckedJoin returns os.path.join(output, path). It does sanity checks to
|
|
|
|
ensure the resulting path is under output, but shouldn't be used on untrusted
|
|
|
|
input.
|
2015-02-13 23:38:43 +00:00
|
|
|
"""
|
2015-02-27 22:23:16 +00:00
|
|
|
path = os.path.normpath(path)
|
|
|
|
if os.path.isabs(path) or path.startswith('.'):
|
2015-02-13 23:38:43 +00:00
|
|
|
raise ValueError(path)
|
2015-02-27 22:23:16 +00:00
|
|
|
return os.path.join(output, path)
|
2015-02-13 23:38:43 +00:00
|
|
|
|
|
|
|
|
2017-05-11 20:27:01 +01:00
|
|
|
class FileEntry(object):
|
|
|
|
def __init__(self, path, mode, fileobj):
|
|
|
|
self.path = path
|
|
|
|
self.mode = mode
|
|
|
|
self.fileobj = fileobj
|
|
|
|
|
|
|
|
|
|
|
|
class SymlinkEntry(object):
|
|
|
|
def __init__(self, path, mode, target):
|
|
|
|
self.path = path
|
|
|
|
self.mode = mode
|
|
|
|
self.target = target
|
|
|
|
|
|
|
|
|
2015-02-13 23:38:43 +00:00
|
|
|
def IterateZip(path):
|
|
|
|
"""
|
2017-05-11 20:27:01 +01:00
|
|
|
IterateZip opens the zip file at path and returns a generator of entry objects
|
|
|
|
for each file in it.
|
2015-02-13 23:38:43 +00:00
|
|
|
"""
|
|
|
|
with zipfile.ZipFile(path, 'r') as zip_file:
|
|
|
|
for info in zip_file.infolist():
|
2015-02-27 22:23:16 +00:00
|
|
|
if info.filename.endswith('/'):
|
|
|
|
continue
|
2017-05-11 20:27:01 +01:00
|
|
|
yield FileEntry(info.filename, None, zip_file.open(info))
|
2015-02-13 23:38:43 +00:00
|
|
|
|
|
|
|
|
2017-05-11 20:27:01 +01:00
|
|
|
def IterateTar(path, compression):
|
2015-02-13 23:38:43 +00:00
|
|
|
"""
|
2017-05-11 20:27:01 +01:00
|
|
|
IterateTar opens the tar.gz or tar.bz2 file at path and returns a generator of
|
|
|
|
entry objects for each file in it.
|
2015-02-13 23:38:43 +00:00
|
|
|
"""
|
2017-05-11 20:27:01 +01:00
|
|
|
with tarfile.open(path, 'r:' + compression) as tar_file:
|
2015-02-13 23:38:43 +00:00
|
|
|
for info in tar_file:
|
|
|
|
if info.isdir():
|
2017-05-11 20:27:01 +01:00
|
|
|
pass
|
|
|
|
elif info.issym():
|
|
|
|
yield SymlinkEntry(info.name, None, info.linkname)
|
|
|
|
elif info.isfile():
|
|
|
|
yield FileEntry(info.name, info.mode, tar_file.extractfile(info))
|
|
|
|
else:
|
2015-02-13 23:38:43 +00:00
|
|
|
raise ValueError('Unknown entry type "%s"' % (info.name, ))
|
|
|
|
|
|
|
|
|
|
|
|
def main(args):
|
2015-02-27 22:23:16 +00:00
|
|
|
parser = optparse.OptionParser(usage='Usage: %prog ARCHIVE OUTPUT')
|
|
|
|
parser.add_option('--no-prefix', dest='no_prefix', action='store_true',
|
|
|
|
help='Do not remove a prefix from paths in the archive.')
|
|
|
|
options, args = parser.parse_args(args)
|
|
|
|
|
|
|
|
if len(args) != 2:
|
|
|
|
parser.print_help()
|
2015-02-13 23:38:43 +00:00
|
|
|
return 1
|
|
|
|
|
2015-02-27 22:23:16 +00:00
|
|
|
archive, output = args
|
2015-02-13 23:38:43 +00:00
|
|
|
|
|
|
|
if not os.path.exists(archive):
|
|
|
|
# Skip archives that weren't downloaded.
|
|
|
|
return 0
|
|
|
|
|
2016-12-02 04:48:00 +00:00
|
|
|
with open(archive) as f:
|
|
|
|
sha256 = hashlib.sha256()
|
|
|
|
while True:
|
|
|
|
chunk = f.read(1024 * 1024)
|
|
|
|
if not chunk:
|
|
|
|
break
|
|
|
|
sha256.update(chunk)
|
|
|
|
digest = sha256.hexdigest()
|
|
|
|
|
|
|
|
stamp_path = os.path.join(output, ".boringssl_archive_digest")
|
|
|
|
if os.path.exists(stamp_path):
|
|
|
|
with open(stamp_path) as f:
|
|
|
|
if f.read().strip() == digest:
|
|
|
|
print "Already up-to-date."
|
|
|
|
return 0
|
|
|
|
|
2015-02-13 23:38:43 +00:00
|
|
|
if archive.endswith('.zip'):
|
|
|
|
entries = IterateZip(archive)
|
|
|
|
elif archive.endswith('.tar.gz'):
|
2017-05-11 20:27:01 +01:00
|
|
|
entries = IterateTar(archive, 'gz')
|
|
|
|
elif archive.endswith('.tar.bz2'):
|
|
|
|
entries = IterateTar(archive, 'bz2')
|
2015-02-13 23:38:43 +00:00
|
|
|
else:
|
|
|
|
raise ValueError(archive)
|
|
|
|
|
|
|
|
try:
|
|
|
|
if os.path.exists(output):
|
|
|
|
print "Removing %s" % (output, )
|
|
|
|
shutil.rmtree(output)
|
|
|
|
|
|
|
|
print "Extracting %s to %s" % (archive, output)
|
|
|
|
prefix = None
|
2015-02-27 22:23:16 +00:00
|
|
|
num_extracted = 0
|
2017-05-11 20:27:01 +01:00
|
|
|
for entry in entries:
|
2015-02-27 22:23:16 +00:00
|
|
|
# Even on Windows, zip files must always use forward slashes.
|
2017-05-11 20:27:01 +01:00
|
|
|
if '\\' in entry.path or entry.path.startswith('/'):
|
|
|
|
raise ValueError(entry.path)
|
2015-02-27 22:23:16 +00:00
|
|
|
|
|
|
|
if not options.no_prefix:
|
2017-05-11 20:27:01 +01:00
|
|
|
new_prefix, rest = entry.path.split('/', 1)
|
2015-02-27 22:23:16 +00:00
|
|
|
|
|
|
|
# Ensure the archive is consistent.
|
|
|
|
if prefix is None:
|
|
|
|
prefix = new_prefix
|
|
|
|
if prefix != new_prefix:
|
|
|
|
raise ValueError((prefix, new_prefix))
|
|
|
|
else:
|
2017-05-11 20:27:01 +01:00
|
|
|
rest = entry.path
|
2015-02-27 22:23:16 +00:00
|
|
|
|
|
|
|
# Extract the file into the output directory.
|
|
|
|
fixed_path = CheckedJoin(output, rest)
|
2015-02-13 23:38:43 +00:00
|
|
|
if not os.path.isdir(os.path.dirname(fixed_path)):
|
|
|
|
os.makedirs(os.path.dirname(fixed_path))
|
2017-05-11 20:27:01 +01:00
|
|
|
if isinstance(entry, FileEntry):
|
|
|
|
with open(fixed_path, 'wb') as out:
|
|
|
|
shutil.copyfileobj(entry.fileobj, out)
|
|
|
|
elif isinstance(entry, SymlinkEntry):
|
|
|
|
os.symlink(entry.target, fixed_path)
|
|
|
|
else:
|
|
|
|
raise TypeError('unknown entry type')
|
2015-02-13 23:38:43 +00:00
|
|
|
|
|
|
|
# Fix up permissions if needbe.
|
|
|
|
# TODO(davidben): To be extra tidy, this should only track the execute bit
|
|
|
|
# as in git.
|
2017-05-11 20:27:01 +01:00
|
|
|
if entry.mode is not None:
|
|
|
|
os.chmod(fixed_path, entry.mode)
|
2015-02-27 22:23:16 +00:00
|
|
|
|
|
|
|
# Print every 100 files, so bots do not time out on large archives.
|
|
|
|
num_extracted += 1
|
|
|
|
if num_extracted % 100 == 0:
|
|
|
|
print "Extracted %d files..." % (num_extracted,)
|
2015-02-13 23:38:43 +00:00
|
|
|
finally:
|
|
|
|
entries.close()
|
|
|
|
|
2016-12-02 04:48:00 +00:00
|
|
|
with open(stamp_path, 'w') as f:
|
|
|
|
f.write(digest)
|
2015-02-27 22:23:16 +00:00
|
|
|
|
2016-12-02 04:48:00 +00:00
|
|
|
print "Done. Extracted %d files." % (num_extracted,)
|
2015-02-13 23:38:43 +00:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2015-02-27 22:23:16 +00:00
|
|
|
sys.exit(main(sys.argv[1:]))
|