Skip to content

Commit

Permalink
Merge branch 'feature/compression_rework' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
jpenney committed Nov 29, 2011
2 parents 556410b + 3016f41 commit 25b5b08
Show file tree
Hide file tree
Showing 6 changed files with 158 additions and 134 deletions.
28 changes: 15 additions & 13 deletions pdar/__init__.py
Expand Up @@ -14,16 +14,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.

PDAR_VERSION = '1.0'
DEFAULT_HASH_TYPE = 'sha1' #backwards compat
'''Portable Delta ARchives'''

PDAR_VERSION = '1.0.1'
DEFAULT_HASH_TYPE = 'sha1' # backwards compat

# pylint: disable=W0401
from pdar.archive import *
from pdar.entry import *
from pdar.errors import *
from pdar.patcher import *
from pdar.entry import *
from pdar.archive import *

import sys
# pylint: enable=W0401
import os
import sys


__author__ = 'Jason Penney'
__copyright__ = 'Copyright 2011, Jason Penney'
Expand All @@ -35,13 +39,11 @@
__description__ = 'Portable Delta ARchives'
__long_description__ = '''
Supports creating and applying **P**ortable **D**elta **Ar**chive
(PDAR) files. They can be used to distribute collections of patches in
(PDAR) files. They can be used to distribute collections of patches in
the form of binary deltas wrapped in a single file.
'''

pdar_ = sys.modules[__name__]
pdar_.__doc__ = os.linesep.join(
[pdar_.__description__, '', pdar_.__long_description__])
del pdar_


__pdar__ = sys.modules[__name__]
__pdar__.__doc__ = os.linesep.join(
[__pdar__.__description__, '', __pdar__.__long_description__])
del __pdar__
117 changes: 63 additions & 54 deletions pdar/archive.py
Expand Up @@ -14,28 +14,28 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import fnmatch
import re
import os
import tarfile
import logging
import filecmp

from bz2 import BZ2File
from datetime import datetime
from gzip import GzipFile
from tempfile import SpooledTemporaryFile
from pkg_resources import parse_version

from pdar.errors import *
from pdar import PDAR_VERSION, DEFAULT_HASH_TYPE
from pdar.entry import *
from pdar.errors import *
from pdar.patcher import DEFAULT_PATCHER_TYPE
from pdar import PDAR_VERSION, DEFAULT_HASH_TYPE
from pkg_resources import parse_version
from shutil import rmtree
from tempfile import SpooledTemporaryFile, mkstemp
import filecmp
import fnmatch
import logging
import os
import re
import tarfile

__all__ = ['PDArchive', 'PDAR_MAGIC', 'PDAR_ID']

PDAR_MAGIC = 'PDAR'
PDAR_ID = '%s%03d%c' % (
PDAR_MAGIC, int(float(PDAR_VERSION)), 0)
PDAR_MAGIC, int(parse_version(PDAR_VERSION)[0]), 0)

ARCHIVE_HEADER_VERSION = 'pdar_version'
ARCHIVE_HEADER_CREATED = 'pdar_created_datetime'
Expand All @@ -59,15 +59,13 @@ def __init__(self, orig_path, dest_path, patterns=['*'], payload=None,
pattern_re = re.compile(pattern_re)

def target_gen(path):
for root, dirs, files in os.walk(path):
for root, dummy, files in os.walk(path):
for dest in (
os.path.normcase(
os.path.join(root, f)) for f in files \
if pattern_re.match(f)):
yield os.path.relpath(dest, path)

from pprint import pprint

orig_targets = set(target_gen(orig_path))
dest_targets = set(target_gen(dest_path))

Expand Down Expand Up @@ -111,17 +109,17 @@ def target_gen(path):

for target in matches:
copied_targets.append((target, source, target))

if move_match:
target = move_match
moved_targets.append((target, source, target))

def add_entry(targets, cls):
args = list(targets)
args += [ orig_path, dest_path, self.hash_type ]
entry = cls.create(*args)
args += [orig_path, dest_path, self.hash_type]
entry = cls.create(*args) # pylint: disable=W0142
if entry:
logging.info("adding '%s' entry for: %s"
logging.info("adding '%s' entry for: %s"
% (entry.type_code, entry.target))
self._patches.append(entry)
else:
Expand Down Expand Up @@ -152,9 +150,8 @@ def add_entry(targets, cls):

else:
raise InvalidParameterError(
"You must pass either 'orig_path', 'dest_path', and 'patterns' "
" OR 'payload'")

"You must pass either 'orig_path', 'dest_path', and "
"'patterns' OR 'payload'")

@property
def hash_type(self):
Expand All @@ -176,34 +173,43 @@ def save(self, path, force=False):
if os.path.exists(path) and not force:
raise RuntimeError('File already exists: %s' % path)
with SpooledTemporaryFile() as tmpfile:
gzfile = GzipFile(mode='wb', fileobj=tmpfile, compresslevel=9)
tfile = tarfile.open(
mode='w', fileobj=tmpfile,
format=tarfile.PAX_FORMAT,
pax_headers={
ARCHIVE_HEADER_VERSION: unicode(self.pdar_version),
ARCHIVE_HEADER_CREATED: unicode(
self.created_datetime.isoformat()),
ARCHIVE_HEADER_HASH_TYPE: unicode(self.hash_type)})

try:
tfile = tarfile.open(
mode='w', fileobj=gzfile,
format=tarfile.PAX_FORMAT,
pax_headers={
ARCHIVE_HEADER_VERSION: unicode(self.pdar_version),
ARCHIVE_HEADER_CREATED: unicode(
self.created_datetime.isoformat()),
ARCHIVE_HEADER_HASH_TYPE: unicode(self.hash_type)})

try:
for patch in self.patches:
patch.pax_dump(tfile)
finally:
tfile.close()
tmpfile.flush()
for patch in self.patches:
patch.pax_dump(tfile)
finally:
gzfile.close()

tfile.close()
tmpfile.flush()

# find best compression
archive_path = None
for comp in [GzipFile, BZ2File]:
dummy, test_path = mkstemp()
os.close(dummy)
compfile = comp(test_path, mode='wb',
compresslevel=9)
tmpfile.seek(0)
compfile.writelines(tmpfile)
compfile.close()
if not archive_path or os.path.getsize(
archive_path) > os.path.getsize(test_path):
if archive_path and os.path.exists(archive_path):
os.unlink(archive_path)
archive_path = test_path

with open(path, 'wb') as patchfile:
patchfile.write(PDAR_ID)
tmpfile.seek(0)
patchfile.writelines(tmpfile)
patchfile.write(chr(0))
with open(archive_path, 'rb') as archive:
patchfile.writelines(archive)
patchfile.flush()
patchfile.close()

def patch(self, path=None, patcher=None):
if patcher is None:
Expand All @@ -213,17 +219,20 @@ def patch(self, path=None, patcher=None):

@classmethod
def load(cls, path):
with open(path, 'rb') as patchfile:
file_id = patchfile.read(len(PDAR_ID))
if not file_id.startswith(PDAR_MAGIC):
raise PDArchiveFormatError("Not a pdar file: %s" % (path))
if file_id != PDAR_ID:
raise PDArchiveFormatError(
"Unsupported pdar version ID '%s': %s"
% (file_id[len(PDAR_MAGIC):-1], path))
with SpooledTemporaryFile() as archive:
with open(path, 'rb') as patchfile:
file_id = patchfile.read(len(PDAR_ID))
if not file_id.startswith(PDAR_MAGIC):
raise PDArchiveFormatError("Not a pdar file: %s" % (path))
if file_id != PDAR_ID:
raise PDArchiveFormatError(
"Unsupported pdar version ID '%s': %s"
% (file_id[len(PDAR_MAGIC):-1], path))
archive.writelines(patchfile)
archive.seek(0)
patches = []
payload = {}
tfile = tarfile.open(mode='r:*', fileobj=patchfile)
tfile = tarfile.open(mode='r:*', fileobj=archive)
try:
payload.update(tfile.pax_headers)
if 'created_datetime' in payload:
Expand Down
48 changes: 21 additions & 27 deletions pdar/console.py
Expand Up @@ -17,30 +17,32 @@
# limitations under the License.

import argparse
import locale
import logging
import os
import pdar
import pdar.errors
import os
import logging
import locale
import shutil


def pdar_create(args):
archive = pdar.PDArchive(orig_path=args.path1,
dest_path=args.path2,
archive = pdar.PDArchive(orig_path=args.path1,
dest_path=args.path2,
patterns=args.patterns)
if args.backup:
if os.path.exists(args.archive_name):
backup_name = '.'.join([args.archive_name, 'bak'])
logging.debug("creating backup of '%s' -> '%s'" % (
args.archive_name, backup_name))
args.force = True
shutil.copy(args.archive_name,
shutil.copy(args.archive_name,
'.'.join([args.archive_name, 'bak']))
logging.debug("saving archive: %s" % args.archive_name)
archive.save(args.archive_name, args.force)
logging.debug("Success!")
return 0



def pdar_apply(args):
archive = pdar.PDArchive.load(args.archive_name)
if args.output_path:
Expand All @@ -54,8 +56,6 @@ def pdar_apply(args):
archive.patch(path)
return 0




def pdar_info(args):
_pdar_info_header = '''\
Expand All @@ -77,10 +77,9 @@ def pdar_info(args):

_pdar_entry_line_format = ' %s%ds %s%ds %ss' % (
'%(type)', max_type_str_width,
'%(size)', max_size_str_width,
'%(target)'
)

'%(size)', max_size_str_width,
'%(target)')

print _pdar_info_header % {
'archive_name': args.archive_name,
'pdar_version': archive.pdar_version,
Expand All @@ -95,23 +94,22 @@ def pdar_info(args):
print _pdar_entry_line_format % {
'type': '-' * max_type_str_width,
'size': '-' * max_size_str_width,
'target': '-' * max_target_str_width }
'target': '-' * max_target_str_width}

for entry in sorted(entry_info,
for entry in sorted(entry_info,
key=lambda ent: ent[1]):
print _pdar_entry_line_format % {
'size': entry[0],
'target': entry[1],
'type': entry[2]}

return 0



def pdar_cmd():

if locale.getlocale() == (None, None):
locale.setlocale(locale.LC_ALL,'')
locale.setlocale(locale.LC_ALL, '')

parser = argparse.ArgumentParser(
description='utility for manipulating portable delta archives')
Expand Down Expand Up @@ -143,7 +141,7 @@ def pdar_cmd():
'backup existing archive before overwriting '
'(implies force, existing backups may be lost).'),
dest='backup', action='store_true')

parser_create.add_argument(
'archive_name',
help='path to output pdar archive')
Expand All @@ -162,8 +160,7 @@ def pdar_cmd():
parser_apply = subparsers.add_parser(
'apply',
description='apply pdar archive as patch',
help='apply pdar archive as patch'
)
help='apply pdar archive as patch')
parser_apply.set_defaults(func=pdar_apply)
parser_apply.add_argument(
'-o', '--output-path',
Expand All @@ -176,7 +173,7 @@ def pdar_cmd():
parser_apply.add_argument(
'path',
help='path to which pdar will be applied')

parser_info = subparsers.add_parser(
'info',
description='show info about pdar archive',
Expand All @@ -187,12 +184,12 @@ def pdar_cmd():
help='path to output pdar archive')

args = parser.parse_args()

# configure logging

logging.basicConfig(format="%(message)s",
level=args.log_level)

if args.log_level == logging.DEBUG:
parser.exit(args.func(args))

Expand All @@ -205,6 +202,3 @@ def pdar_cmd():
except Exception, err:
logging.error(str(err))
parser.exit(1)



0 comments on commit 25b5b08

Please sign in to comment.