Skip to content
Permalink
boar-py3-devel
Switch branches/tags

Name already in use

A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
Go to file
 
 
Cannot retrieve contributors at this time
executable file 1595 lines (1438 sloc) 64 KB
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
# Copyright 2010 Mats Ekberg
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
from __future__ import division
from __future__ import print_function
from past.builtins import execfile
from builtins import map
from builtins import str
from builtins import range
from builtins import object
from past.utils import old_div
import sys
import os
import time
import cProfile
import posixpath
import errno
import shutil
from optparse import OptionParser, SUPPRESS_HELP
from blobrepo import repository
from blobrepo.sessions import bloblist_fingerprint
from boar_exceptions import *
import client
import front
from front import Front, set_file_contents, verify_repo
import workdir
from common import *
import deduplication
from ordered_dict import OrderedDict
from boar_common import *
import boarserve
json = get_json_module()
def beta(f):
assert f.__name__.startswith("cmd_")
def beta_warning(*args, **kwargs):
print("WARNING: The '%s' command should be considered beta, meaning that there may be bugs\n and usage details will probably change in the future." % f.__name__[4:])
f(*args, **kwargs)
return beta_warning
BOAR_VERSION = "boar-devel.16-Nov-2012"
def print_help():
print("""Boar version %s
Usage: boar <command>
Commands:
ci Commit changes in a work directory
clone Create or update a clone of a repository
co Check out files from the repository
diffrepo Check if two repositories are identical
getprop Get session properties, such as file ignore lists
info Show some information about the current workdir
import Import the contents of a folder into your repository
list Show the contents of a repository or snapshot
locate Check if some non-versioned files are already present in a repository
log Show changes and log messages
ls Show the contents of a specific sub directory of a snapshot
mkrepo Create a new repository
mksession Create a new session
setprop Set session properties, such as file ignore lists
serve Make a repository accessible over the network
status List any changes in the current work directory
update Update the current work directory from the repository
verify Verify the integrity of the repository
For most commands, you can type "boar <command> --help" to get more
information. The full command reference is available online at
http://code.google.com/p/boar/wiki/CommandReference
""" % BOAR_VERSION)
not_a_workdir_msg = "This directory is not a boar workdir"
def list_sessions(front, show_meta = False, verbose = False):
sessions_count = {}
for sid in front.get_session_ids():
session_info = front.get_session_info(sid)
name = session_info.get("name", "<no name>")
if not show_meta and name.startswith("__"):
continue
sessions_count[name] = sessions_count.get(name, 0) + 1
for name in sorted(sessions_count.keys()):
print(name, "(" + str(sessions_count[name]) + " revs)")
def list_revisions(front, session_name):
sids = front.get_session_ids(session_name)
if not sids:
raise UserError("There is no such session: %s" % session_name)
for sid in sids:
session_info = front.get_session_info(sid)
log_message = session_info.get("log_message", "<not specified>")
bloblist = front.get_session_bloblist(sid)
if front.get_base_id(sid):
is_base = "(delta)"
else:
is_base = "(standalone)"
print("Revision id", str(sid), "(" + session_info['date'] + "),", \
len(bloblist), "files,", is_base, "Log: %s" % (log_message))
def dump_all_revisions(front):
sids = front.get_session_ids()
deleted_sids = front.get_deleted_snapshots()
for sid in sids:
session_info = front.get_session_info(sid)
log_message = session_info.get("log_message", None)
name = session_info.get("name", None)
base = front.get_base_id(sid)
deleted = sid in deleted_sids
#bloblist = front.get_session_bloblist(sid)
#print json.dumps((sid, base, name, front.get_session_fingerprint(sid), log_message, deleted)), front.get_session_load_stats(sid)
print(json.dumps((sid, base, name, front.get_session_fingerprint(sid), log_message, deleted)))
def list_files(front, session_name, revision):
try:
revision = int(revision)
except:
raise UserError("Illegal revision string: '%s'" % revision)
session_info = front.get_session_info(revision)
if session_info == None or session_info.get("name") != session_name:
raise UserError("There is no such session/revision")
for info in front.get_session_bloblist(revision):
print(info['filename'], str(old_div(info['size'],1024)+1) + "k")
def verify_manifests(front, sid, verbose = False, required_manifests = []):
assert type(verbose) == bool
bloblist = front.get_session_bloblist(sid)
blobdict = bloblist_to_dict(bloblist)
manifests = [ f for f in list(blobdict.keys()) if parse_manifest_name(f)[0]]
manifests.sort()
result = True
remaining_required_manifests = set(required_manifests)
for manifest_filename in manifests:
hashname, expected_manifest_hash = parse_manifest_name(manifest_filename)
if hashname not in ("md5",):
warning("Cannot verify manifest %s - hash type not supported" % manifest_filename)
continue
manifests_result = "OK"
manifest_blob = blobdict[manifest_filename]['md5sum']
manifest_basedir, _ = posixpath.split(manifest_filename)
manifest_raw = front.get_blob(manifest_blob).read()
# TODO: test manifest checksum
if expected_manifest_hash:
calculated_manifest_md5 = md5sum(manifest_raw)
if expected_manifest_hash != calculated_manifest_md5:
print("%s CORRUPTED MANIFEST" % manifest_filename)
result = False
continue
else:
print("%s is valid" % manifest_filename)
if calculated_manifest_md5 in remaining_required_manifests:
del remaining_required_manifests[calculated_manifest_md5]
md5data = parse_md5sum(manifest_raw.decode("utf-8-sig"))
for md5, filename in md5data:
session_path = posixpath.join(manifest_basedir, filename)
if session_path in blobdict and blobdict[session_path]['md5sum'] == md5:
if verbose: print(manifest_filename, filename, "OK")
else:
result = False
manifest_result = "ERROR"
print(manifest_filename, filename, "ERROR")
print(manifest_filename, manifests_result)
if remaining_required_manifests:
result = False
print("Missing manifests:", remaining_required_manifests)
return result
def cmd_locate(args):
if len(args) == 0:
args = ["--help"]
parser = OptionParser(usage="usage: boar locate <session name> [[file/dir] [file/dir] ...]")
(options, args) = parser.parse_args(args)
if len(args) == 0:
raise UserError("You must specify which session to look in.")
sessionName = args[0]
files_to_look_for = args[1:]
if not files_to_look_for:
files_to_look_for = ["."]
files_to_look_for = [tounicode(os.path.abspath(fn)) for fn in files_to_look_for]
front = connect_to_repo(get_repo_url())
revision = front.find_last_revision(sessionName)
if not revision:
raise UserError("No such session: %s" % sessionName)
missing = []
found = 0
inverted_bloblist = invert_bloblist(front.get_session_bloblist(revision))
for root in files_to_look_for:
if os.path.isdir(root):
tree = get_tree(root, absolute_paths = True)
tree.sort()
else:
tree = [root]
for f in tree:
csum = md5sum_file(f)
blobinfos = list(map(dict, inverted_bloblist.get(csum, [])))
if not blobinfos:
print("Missing:", f)
missing.append(f)
else:
print("OK:", f)
found += 1
for bi in blobinfos:
print(" " + bi['filename'])
print("%s files exists in the given session, %s do not." % (found, len(missing)))
@beta
def cmd_scanblocks(args):
import hashlib
front = connect_to_repo(get_repo_url())
filename, = args
recipe = deduplication.recepify(front, filename)
print(json.dumps(recipe, indent=2))
@beta
def cmd_exportrev(args):
"""This command will export revision metadata and the blobs introduced
by each revision. This might be useful e.g. when you need to make
incremental backups of a repository.
"""
parser = OptionParser(usage="usage: boar exportrev [<start rev>]:[<end rev>] > <destination path>")
(options, args) = parser.parse_args(args)
assert len(args) == 2, "Too few arguments"
front = connect_to_repo(get_repo_url())
arg = args[0]
dest = os.path.abspath(args[1])
assert os.path.exists(dest) and os.path.isdir(dest)
if ":" in arg:
a, b = arg.split(":")
if a == "":
a = 1
if b == "":
b = front.repo.get_highest_used_revision()
start_index = int(a)
last_index = int(b)
else:
start_index = last_index = int(arg)
revspec = "s %d to %d" % (start_index, last_index) if start_index != last_index else " " + str(start_index)
print("Exporting revision%s to %s" % (revspec, dest))
assert start_index <= last_index
for i in range(start_index, last_index+1):
if os.path.exists(os.path.join(dest, str(i))):
raise UserError("Path already exists: %s"% os.path.join(dest, str(i)))
all_session_ids = front.get_session_ids()
for i in range(start_index, last_index+1):
if i not in all_session_ids:
raise UserError("Revision %d does not exist in the repository" % i)
blobs_by_rev = front.repo.get_introduced_blobs()
for i in range(start_index, last_index+1):
print("Exporting", i)
src = front.repo.get_session_path(i)
shutil.copytree(src, os.path.join(dest, str(i)))
for md5 in blobs_by_rev[i]:
shutil.copy(front.repo.get_blob_path(md5), os.path.join(dest, str(i), md5))
def cmd_status(args):
parser = OptionParser(usage="usage: boar status [options]")
parser.add_option("-v", "--verbose", dest = "verbose", action="store_true",
help="Show information about unchanged files")
parser.add_option("-q", "--quiet", dest = "quiet", action="store_true", default=False,
help="Do not print any progress information")
(options, args) = parser.parse_args(args)
wd = workdir.init_workdir(ucwd)
if not wd:
raise UserError(not_a_workdir_msg)
if args:
raise UserError("Too many arguments")
if wd.front.is_deleted(wd.revision):
raise UserError("The current snapshot has been deleted in the repository.")
wd.use_progress_printer(not options.quiet)
unchanged_files, new_files, modified_files, deleted_files, renamed_files, ignored_files \
= wd.get_changes_with_renames(wd.revision)
filestats = {}
for f in new_files:
filestats[f] = "A"
for f in modified_files:
filestats[f] = "M"
for f in deleted_files:
filestats[f] = "D"
for old_name, new_name in renamed_files:
filestats[old_name + " => " + new_name] = "R"
if options.verbose:
for f in unchanged_files:
filestats[f] = " "
for f in ignored_files:
filestats[f] = "i"
filenames = list(filestats.keys())
filenames.sort()
for f in filenames:
print(filestats[f], f)
def cmd_info(args):
parser = OptionParser(usage="usage: boar info")
(options, args) = parser.parse_args(args)
if len(args) != 0:
raise UserError("Info command does not accept any arguments.")
wd = workdir.load_workdir_parameters(ucwd)
if not wd:
raise UserError(not_a_workdir_msg)
offset = ""
if wd["offset"]:
offset = "/" + wd["offset"]
if wd:
print("Repository:", wd["repoUrl"])
print("Session / Path:", wd["sessionName"] + offset)
print("Snapshot id:", wd["revision"])
print("Workdir root:", wd["root"])
def cmd_mkrepo(args):
if len(args) == 0:
args = ["--help"]
parser = OptionParser(usage="usage: boar mkrepo [-d|--enable-deduplication] <new repo path>")
parser.add_option("-d", "--enable-deduplication", dest = "dedup", action="store_true",
help="Enable deduplication for this repository")
(options, args) = parser.parse_args(args)
if len(args) > 1:
raise UserError("Too many arguments")
repopath, = args
if os.path.exists(repopath):
raise UserError("File or directory already exists: %s" % repopath)
repository.create_repository(repopath, enable_deduplication = options.dedup)
def cmd_list(args):
parser = OptionParser(usage="usage: boar list [session name [snapshot id]]")
parser.add_option("-m", "--show-meta", dest = "show_meta", action="store_true",
help="Show meta sessions (stores session properties, normally hidden)")
parser.add_option("-d", "--dump", dest = "dump", action="store_true",
help="Dump a machine readable listing of all revisions and their properties")
(options, args) = parser.parse_args(args)
if len(args) > 2:
raise UserError("Too many arguments")
front = connect_to_repo(get_repo_url())
if options.dump:
if args:
raise UserError("a dump can not be combined with other arguments")
dump_all_revisions(front)
elif len(args) == 0:
list_sessions(front, options.show_meta)
elif len(args) == 1:
list_revisions(front, args[0])
elif len(args) == 2:
list_files(front, args[0], args[1])
else:
raise UserError("Too many arguments")
class _ChangePrinter(object):
def __init__(self, front):
self.front = front
self.bloblists = {}
self.bloblists_order = []
def get_comparer(self, sid):
front = self.front
current_bloblist = get_cached_bloblist(front, sid)
previous_rev = front.get_predecessor(sid)
previous_bloblist = get_cached_bloblist(front, previous_rev) if previous_rev else []
return treecompare_bloblists(previous_bloblist, current_bloblist)
def print_changes(self, sid):
comparer = self.get_comparer(sid)
unchanged_files, added_files, modified_files, deleted_files, renamed_files = comparer.as_sets()
filestats = {}
for f in added_files:
filestats[f] = "A"
for f in modified_files:
filestats[f] = "M"
for f in deleted_files:
filestats[f] = "D"
for old_name, new_name in renamed_files:
filestats[old_name + " => " + new_name] = "R"
print("Changed paths:")
for f in sorted(filestats.keys()):
print(filestats[f], f)
def is_affected(self, sid, path):
comparer = self.get_comparer(sid)
all_changed_filenames = comparer.all_changed_filenames()
affected = path in all_changed_filenames
if not affected:
for affected_filename in all_changed_filenames:
if is_child_path(path, affected_filename):
return True
return affected
def _parse_range(s, default_lower, default_upper):
assert default_lower <= default_upper
try:
return int(s), int(s)
except ValueError:
pass
m = re.match("^(\d*):(\d*)$", s)
if not m:
raise UserError("Ranges must be given as N:M where N and M may be an empty string or an integer")
lower = int(m.group(1)) if m.group(1) else default_lower
upper = int(m.group(2)) if m.group(2) else default_upper
return lower, upper
def cmd_log(args):
parser = OptionParser(usage="usage: boar log [-v|--verbose] [-r|--revision <rev>] [<session name>[/path]]")
parser.add_option("-v", "--verbose", dest = "verbose", action="store_true",
help="List detailed change information about each revision")
parser.add_option("-r", "--revision", action="store", dest = "revision_range",
help='Only show the specified revision(s). Accepts a single revision, or a range on the form "N:M"')
(options, args) = parser.parse_args(args)
if len(args) > 1:
raise UserError("Too many arguments")
session, offset = None, None
cmdline_repo = get_repo_url_commandline()
env_repo = get_repo_url_env()
wd = workdir.init_workdir(ucwd)
if cmdline_repo:
if args:
session, offset = parse_sessionpath(args[0])
front = connect_to_repo(cmdline_repo)
elif wd:
front = wd.front
if args and (args[0].startswith("/") or args[0].startswith("\\")):
raise UserError("Path must not be absolute")
workdir_cwd = strip_path_offset(wd.root, ucwd, separator=os.sep)
offset = u""
if args:
offset = wd.wd_sessionpath(os.path.join(workdir_cwd, args[0]))
session = wd.sessionName
elif env_repo:
if args:
session, offset = parse_sessionpath(args[0])
front = connect_to_repo(env_repo)
else:
raise UserError("You must use this command in a workdir or specify a repository to operate on")
if options.revision_range:
range_start, range_end = _parse_range(options.revision_range, 1, VERY_LARGE_NUMBER)
else:
range_start, range_end = 1, front.get_highest_used_revision()
anything_printed = False
change_printer = _ChangePrinter(front)
if session != None and not front.find_last_revision(session):
raise UserError("No such session: %s" % session)
for sid in reversed(front.get_session_ids(session)):
if not (range_start <= sid <= range_end):
continue
if offset and not change_printer.is_affected(sid, offset):
continue
anything_printed = True
session_info = front.get_session_info(sid)
if session_info['name'] == "__deleted":
continue
log_message = session_info.get("log_message", "")
linecount = len(log_message.splitlines())
line_s = "lines" if linecount != 1 else "line"
print("-" * 80)
print("r%s | %s | %s | %s log %s" % (sid, session_info['name'],
session_info.get('date', "<no date>"), linecount, line_s))
if options.verbose:
change_printer.print_changes(sid)
print()
if log_message != "":
print(log_message)
if anything_printed:
print("-" * 80)
def cmd_ls(args):
parser = OptionParser(usage="usage: boar ls <session name>[/path]")
parser.add_option("-r", "--revision", action="store", dest = "revision", type="int",
help="The revision to list (defaults to latest)")
parser.add_option("-v", "--verbose", dest = "verbose", action="store_true",
help="List more information about the files.")
(options, args) = parser.parse_args(args)
front = connect_to_repo(get_repo_url())
if len(args) == 0 and options.revision:
session_info = front.get_session_info(options.revision)
session_name = session_info.get('name', None)
path = u""
elif len(args) == 0 and not options.revision:
list_sessions(front, show_meta = False, verbose = options.verbose)
return
elif len(args) > 1:
raise UserError("Too many arguments")
else:
session_name, path = split_path_from_start(args[0])
path = path.rstrip("/")
if options.revision:
revision = options.revision
else:
revision = front.find_last_revision(session_name)
if not revision:
raise UserError("There is no session with the name '%s'" % session_name)
session_info = front.get_session_info(revision)
if session_info == None or session_info.get("name") != session_name:
raise UserError("There is no such session/revision")
def print_info(info, path, seen_dirs):
if path == info['filename']:
subpath = os.path.basename(path)
else:
subpath = strip_path_offset(path, info['filename'])
if "/" in subpath:
dirname, rest = split_path_from_start(subpath)
if dirname not in subdirs:
print(dirname + "/")
subdirs.add(dirname)
elif options.verbose:
print(subpath, str(old_div(info['size'],1024)+1) + "kB")
else:
print(subpath)
subdirs = set()
anything_printed = False
bloblist = get_cached_bloblist(front, revision)
for info in sorted_bloblist(bloblist):
if is_child_path(path, info['filename']) or path == info['filename']:
print_info(info, path, subdirs)
anything_printed = True
if path != "" and not anything_printed:
raise UserError("No such file or directory found in session: "+path)
def get_cached_bloblist(front, revision):
return front.get_session_bloblist(revision)
def cmd_verify(args):
parser = OptionParser(usage="usage: boar verify [options]")
parser.add_option("-q", "--quick", dest = "quick", action="store_true",
help="Only check that the repository looks reasonably ok (skip blob checksumming)")
(options, args) = parser.parse_args(args)
if args:
raise UserError("Too many arguments")
front = connect_to_repo(get_repo_url())
verify_repo(front, verify_blobs = not options.quick, verbose = True)
def cmd_stats(args):
parser = OptionParser(usage="usage: boar stats")
(options, args) = parser.parse_args(args)
if args:
raise UserError("Too many arguments")
front = connect_to_repo(get_repo_url())
for name, value in front.get_stats():
print("%-30s %s" % (name, value))
@beta
def cmd_manifests(args):
parser = OptionParser(usage="usage: boar manifests [options] <session name>")
parser.add_option("-e", "--require", dest = "required_manifests", action="append", default=[],
help="The verification will fail unless a manifest with this md5 checksum exists in the snapshot")
# -A tests all sessions
# -r revision to test
# -e --require <hashsum>
(options, args) = parser.parse_args(args)
if not args:
raise UserError("Too few arguments")
elif len(args) != 1:
raise UserError("Too many arguments")
for cs in options.required_manifests:
if not is_md5sum(cs):
raise UserError("Not a valid md5 checksum: %s" % cs)
session_name = args[0]
front = connect_to_repo(get_repo_url())
latest_sid = front.find_last_revision(session_name)
if not latest_sid:
raise UserError("No such session found: %s" % (session_name))
all_ok = verify_manifests(front, latest_sid, False, options.required_manifests)
if not all_ok:
raise UserError("Some manifest files failed verification")
def cmd_repair(args):
parser = OptionParser(usage="usage: boar repair [options]")
parser.add_option("-f", "--force", dest = "force", action="store_true",
help="Do not scan for errors before repairing")
(options, args) = parser.parse_args(args)
clean = False
repo_url = get_repo_url()
if repo_url.startswith("boar://") or repo_url.startswith("boar+"):
raise UserError("Repairing can only be executed with a local boar repository")
if options.force:
front = connect_to_repo(get_repo_url())
else:
try:
front = connect_to_repo(get_repo_url())
print("Verifying repo before repair...")
clean = verify_repo(front)
except repository.SoftCorruptionError as e:
print("Repairable error found:", e)
except Exception as e:
print("Possible hard error found (repairing may not help):", e)
if clean:
print("No errors found. Not repairing anything.")
return
repo = front.repo
if repo.deduplication_enabled():
print("Repairing blocks database")
blobs = repo.get_raw_blob_names()
repo.blocksdb.begin()
for blob in blobs:
print(blob)
reader = repo.get_blob_reader(blob)
bc = deduplication.BlockChecksum(repository.DEDUP_BLOCK_SIZE)
while reader.bytes_left():
bc.feed_string(reader.read(repository.DEDUP_BLOCK_SIZE))
for offset, rolling, md5 in bc.harvest():
repo.blocksdb.add_block(blob, offset, md5)
repo.blocksdb.add_rolling(rolling)
repo.blocksdb.commit()
else:
print("Nothing to do")
def cmd_import(args):
parser = OptionParser(usage="usage: boar import [options] <folder to import> <session name>[/path/]")
parser.add_option("-v", "--verbose", dest = "verbose", action="store_true",
help=SUPPRESS_HELP)
parser.add_option("-m", "--message", dest = "message", metavar = "ARG",
help="An optional log message describing this import")
parser.add_option("-n", "--dry-run", dest = "dry_run", action="store_true", default=False,
help="Don't actually do anything. Just show what will happen.")
parser.add_option("-e", "--allow-empty", dest = "allow_empty", action="store_true", default = False,
help="Always check in a new revision, even if there are no changes to commit.")
parser.add_option("-w", "--create-workdir", dest = "create_workdir", action="store_true", default=False,
# Deprecated. Replaced by -W
help=SUPPRESS_HELP)
parser.add_option("-W", "--no-workdir", dest = "no_workdir", action="store_true", default = False,
help="Do not turn the imported directory into a workdir")
parser.add_option("-q", "--quiet", dest = "quiet", action="store_true", default = False,
help="Do not print any progress information")
parser.add_option("--ignore-errors", dest = "ignore_errors", action="store_true", default=False,
help="Continue operation even if unreadable files are detected.")
base_session = None
if len(args) == 0:
args = ["--help"]
(options, args) = parser.parse_args(args)
if len(args) != 2:
raise UserError("Wrong number of arguments")
if options.dry_run:
options.no_workdir = True
if options.create_workdir and options.no_workdir:
raise UserError("Conflicting arguments")
#if not (options.create_workdir or options.no_workdir):
# raise UserError("You must either create a workdir or not.")
path_to_ci = tounicode(os.path.abspath(args[0]))
wd = workdir.init_workdir(path_to_ci)
if wd:
raise UserError("This is already a boar workdir. Use workdir commands to check in changes.")
import_spec = tounicode(args[1]).replace("\\", "/")
if "/" in import_spec:
session_name, session_offset = import_spec.split("/", 1)
session_offset = session_offset.rstrip("/")
else:
session_name, session_offset = import_spec, u""
if not os.path.exists(path_to_ci):
raise UserError("Path to check in does not exist: " + path_to_ci)
repourl = get_repo_url()
front = connect_to_repo(repourl)
if not front.find_last_revision(session_name):
raise UserError("No session with the name '%s' exists." % (session_name))
wd = workdir.Workdir(repourl, session_name, session_offset, None, path_to_ci)
wd.setLogOutput(sys.stdout, close_when_done=False)
wd.use_progress_printer(not options.quiet)
log_message = None
if options.message:
log_message = tounicode(options.message)
session_id = wd.checkin(write_meta = not options.no_workdir,
fail_on_modifications = True, add_only = True, dry_run = options.dry_run,
log_message = log_message, ignore_errors = options.ignore_errors,
allow_empty = options.allow_empty)
if session_id:
print("Checked in session id", session_id)
else:
notice("Nothing was imported.", sys.stdout)
def cmd_update(args):
parser = OptionParser(usage="usage: boar update [options]")
parser.add_option("-r", "--revision", action="store", dest = "revision", type="int",
help="The revision to update to (defaults to latest)")
parser.add_option("-i", "--ignore-errors", action="store_true", dest = "ignore_errors",
help="Do not abort the update if there are errors while writing.")
parser.add_option("-c", "--ignore-changes", action="store_true", dest = "ignore_changes",
help="Update the workdir revision but do not update the workdir contents.")
parser.add_option("-q", "--quiet", dest = "quiet", action="store_true", default = False,
help="Do not print any progress information")
(options, args) = parser.parse_args(args)
if len(args) != 0:
raise UserError("Update does not accept any non-option arguments")
wd = workdir.init_workdir(ucwd)
if not wd:
raise UserError(not_a_workdir_msg)
wd.use_progress_printer(not options.quiet)
new_revision = options.revision
old_revision = wd.revision
deleted_old_revision = wd.front.is_deleted(old_revision)
if not new_revision:
new_revision = wd.front.find_last_revision(wd.sessionName)
assert not wd.front.is_deleted(new_revision) # Should not be possible, but could potentially cause deletion of workdir files
if deleted_old_revision:
# We can't know what has actually changed in the
# workdir. Let's assume that any differences with latest revision
# are modifications, to avoid overwriting any un-committed
# workdir changes.
old_revision = new_revision
if options.ignore_changes:
wd.update_revision(options.revision)
else:
have_added_or_modified = wd.update(
old_revision = old_revision, new_revision = new_revision, ignore_errors = options.ignore_errors)
if have_added_or_modified and deleted_old_revision:
warn("The old revision that you are updating to was deleted from the repository. " +
"It had additional files that are not in your current working directory or " +
"files whose contents has changed since then. These files cannot be restored. " +
"Make sure the workdir does not contain out of date data before you commit.",
sys.stdout) # warn to stdout to make sure it comes after "update" messages
print("Workdir now at revision", wd.revision)
def cmd_ci(args):
parser = OptionParser(usage="usage: boar ci [options] [files]")
parser.add_option("-m", "--message", dest = "message", metavar = "ARG",
help="An optional log message describing this commit")
parser.add_option("-a", "--add-only", dest = "addonly", action="store_true",
help="Only new files will be committed. Modified and deleted files will be ignored.")
parser.add_option("-e", "--allow-empty", dest = "allow_empty", action="store_true", default = False,
help="Always check in a new revision, even if there are no changes to commit.")
parser.add_option("-q", "--quiet", dest = "quiet", action="store_true", default = False,
help="Do not print any progress information")
(options, args) = parser.parse_args(args)
wd = workdir.init_workdir(ucwd)
if not wd:
raise UserError(not_a_workdir_msg)
wd.use_progress_printer(not options.quiet)
log_message = None
if options.message:
log_message = tounicode(options.message)
included_files = []
while args: # Add the included files
fn = args.pop(0)
if fn.startswith("/") or fn.startswith("\\"):
raise UserError("Path must not be absolute")
workdir_cwd = strip_path_offset(wd.root, ucwd, separator=os.sep)
path_in_session = wd.wd_sessionpath(os.path.join(workdir_cwd, fn))
path_in_workdir = os.path.join(workdir_cwd, fn)
abs_path = os.path.join(wd.root, path_in_workdir)
if os.path.isdir(abs_path):
raise UserError("Directories can not be committed explicitly")
included_files.append(path_in_session)
if not included_files:
included_files = None
session_id = wd.checkin(add_only = options.addonly,
log_message = log_message,
allow_empty = options.allow_empty,
include=included_files)
if session_id != None:
print("Checked in session id", session_id)
else:
notice("Didn't find any changes to check in.", sys.stdout)
def cmd_relocate(args):
if not args:
args.append("--help")
parser = OptionParser(usage="usage: boar relocate <repository>")
(options, args) = parser.parse_args(args)
if len(args) > 1:
raise UserError("Too many arguments")
repourl = args[0]
front = client.connect(repourl)
metafile = os.path.join(workdir.find_meta(ucwd), "info")
metadata = read_json(metafile)
metadata['repo_path'] = repourl
replace_file(metafile, dumps_json(metadata))
print("New location is %s" % repourl)
def cmd_sessions(args):
parser = OptionParser(usage="usage: boar sessions [options]")
parser.add_option("-j", "--json", action="store_true", dest = "json",
help="Format output as a json data format list")
(options, args) = parser.parse_args(args)
if args:
raise UserError("Too many arguments")
stdout = dedicated_stdout()
globals()["suppress_finishmessage"] = True
front = connect_to_repo(get_repo_url())
names = front.get_session_names()
names.sort()
if options.json:
json.dump(names, stdout, indent = 4)
else:
for name in names:
stdout.write(name.encode("utf-8"))
stdout.write("\n")
def cmd_revisions(args):
parser = OptionParser(usage="usage: boar revisions [options] <session name>")
parser.add_option("-j", "--json", action="store_true", dest = "json",
help="Format output as a json data format list")
(options, args) = parser.parse_args(args)
if len(args) != 1:
raise UserError("Wrong number of arguments")
session_name = args.pop(0)
assert isinstance(session_name, str)
stdout = dedicated_stdout()
globals()["suppress_finishmessage"] = True
front = connect_to_repo(get_repo_url())
revs = front.get_session_ids(session_name)
revs.sort()
if options.json:
json.dump(revs, stdout, indent = 4)
else:
for rev in revs:
stdout.write(str(rev))
stdout.write("\n")
def cmd_contents(args):
unparsed_args = args
parser = OptionParser(usage="usage: boar contents [<session name>]")
parser.add_option("--md5sum", action="store_true", dest = "md5sum",
help="Output is compatible with classic md5sum format (excludes some information)")
parser.add_option("-r", "--revision", action="store", dest = "revision", type="int",
help="The revision to fetch")
parser.add_option("--punycode", action="store_true", dest = "punycode",
help="The session name will be given in the punycode format")
(options, args) = parser.parse_args(args)
wd = workdir.init_workdir(ucwd)
if wd and not options.revision and not args:
options.revision = wd.revision
args = [wd.sessionName]
elif len(args) == 0:
raise UserError("You must specify a session name")
elif len(args) > 1:
raise UserError("Too many arguments")
session_name = args.pop(0)
if options.punycode:
session_name = str(session_name).decode("punycode")
stdout = dedicated_stdout()
globals()["suppress_finishmessage"] = True
front = connect_to_repo(get_repo_url())
if options.revision:
if options.revision in front.get_session_ids(session_name):
sessionId = options.revision
else:
raise UserError("There is no snapshot %s for session %s" % (options.revision, session_name))
else:
sessionId = front.find_last_revision(session_name)
if not sessionId:
raise UserError("No such session found: %s" % (session_name))
dump = OrderedDict()
dump['session_name'] = session_name
dump['revision'] = sessionId
dump['fingerprint'] = front.get_session_fingerprint(sessionId)
entries = []
for bi in front.get_session_bloblist(sessionId):
entries.append(OrderedDict([('filename', bi['filename']),
('size', bi['size']),
('md5', bi['md5sum']),
('mtime', bi['mtime'])
]))
dump['files'] = entries
if options.md5sum:
for bi in dump['files']:
stdout.write(bi['md5'])
stdout.write(" *")
stdout.write(bi['filename'].encode("utf-8"))
stdout.write("\n")
else:
json.dump(dump, stdout, indent = 4)
stdout.write("\n")
def cmd_mksession(args):
if len(args) == 0:
args = ["--help"]
parser = OptionParser(usage="usage: boar mksession <new session name>")
(options, args) = parser.parse_args(args)
if len(args) != 1:
raise UserError("mksession requires a single valid session name as argument")
session_name, = args
front = connect_to_repo(get_repo_url())
if front.find_last_revision(session_name) != None:
raise UserError("There already exists a session named '%s'" % (session_name))
front.mksession(session_name)
print("New session '%s' was created successfully" % (session_name))
@beta
def cmd_mkstandalone(args):
if len(args) == 0:
args = ["--help"]
parser = OptionParser(usage="usage: boar mkstandalone <session name>")
(options, args) = parser.parse_args(args)
if len(args) != 1:
raise UserError("mkstandalone requires a single existing session name as argument")
session_name, = args
front = connect_to_repo(get_repo_url())
sid = front.create_base_snapshot(session_name)
print("New standalone snapshot %s created for session %s" % (sid, session_name))
def cmd_serve(args):
parser = OptionParser(usage=
"""usage: boar serve [options] <repository path>
WARNING: This Boar server has no authentication or encryption.
Your repository will be open for reading and writing to
anyone who is able to connect to the address and port you
specify.""")
parser.add_option("-p", "--port", action="store", dest = "port", type="int", default=None, metavar = "PORT",
help="The port that the network server will listen to (default: 10001)")
parser.add_option("-a", "--address", dest = "address", metavar = "ADDR", default=None,
help="The address that the network server will listen on (default: all interfaces)")
parser.add_option("-S", "--stdio-server", dest = "use_stdio", action="store_true",
help=SUPPRESS_HELP)
if len(args) == 0:
args = ["--help"]
(options, args) = parser.parse_args(args)
if len(args) == 0:
raise UserError("You must specify a repository to serve.")
elif len(args) > 1:
raise UserError("Too many arguments.")
repopath = str(os.path.abspath(args.pop()))
if options.use_stdio and (options.port != None or options.address != None):
raise UserError("Stdio server (-S) does not accept --port or --address options.")
if options.port == None: options.port = 10001
if options.address == None: options.address = ""
if options.use_stdio:
boarserve.init_stdio_server(repopath).serve()
else:
boarserve.run_socketserver(repopath, options.address, options.port)
def cmd_truncate(args):
if len(args) == 0:
args = ["--help"]
parser = OptionParser(usage="usage: boar truncate <session name>")
(options, args) = parser.parse_args(args)
if len(args) > 1:
raise UserError("Too many arguments")
session_name, = args
front = connect_to_repo(get_repo_url())
if not front.find_last_revision(session_name):
raise UserError("There is no session with the name '%s'" % session_name)
sid = front.truncate(session_name)
print("Session %s has been truncated to revision %s" % (session_name, sid))
def parse_sessionpath(s):
s = tounicode(s)
s = s.replace("\\", "/")
if s.startswith("/"):
raise UserError("Session path must not start with a slash")
session_name, throwaway, offset = s.partition("/")
offset = offset.rstrip("/")
assert isinstance(session_name, str)
assert isinstance(offset, str)
return session_name, offset
def cmd_cat(args):
parser = OptionParser(usage="usage: boar cat [options] [<session path>|blob id]")
parser.add_option("-B", "--blob", action="store_true", dest = "blob",
help="Fetch by blob id string instead of session path")
parser.add_option("-r", "--revision", action="store", dest = "revision", type="int",
help="The revision to fetch")
parser.add_option("--punycode", action="store_true", dest = "punycode",
help="The filename will be given in the punycode format")
(options, args) = parser.parse_args(args)
if not args:
raise UserError("You must specify one or more filenames or blobids")
stdout = dedicated_stdout()
globals()["suppress_finishmessage"] = True
front = connect_to_repo(get_repo_url())
if not options.blob:
blobids = []
while args:
path = args.pop(0)
if options.punycode:
path = str(path).decode("punycode")
session_name, session_path = parse_sessionpath(path)
if options.revision:
if options.revision not in front.get_session_ids(session_name):
raise UserError("There is no such revision of the given session")
revision = options.revision
else:
revision = front.find_last_revision(session_name)
if not revision:
raise UserError("No such session found: %s" % (session_name))
blobid = _get_blobid(front, revision, session_path)
if not blobid:
raise UserError("No such file exists in the given session/revision: %s" % (session_path))
blobids.append(blobid)
for blobid in blobids:
datasource = front.get_blob(blobid)
while datasource.bytes_left():
stdout.buffer.write(datasource.read(4096))
else:
blob_parts = []
if options.revision:
raise UserError("--revision option can not be combined with --blob option")
if options.punycode:
raise UserError("--punycode option can not be combined with --blob option")
while args:
blobspec = args.pop(0)
m = re.match(r"^(.{32})(\[(\d*:\d*)\])?$", blobspec)
if not m:
raise UserError("Illegal blob specification")
blobid = m.group(1)
range_spec = m.group(3)
if not is_md5sum(blobid):
raise UserError("Not a valid blobid: %s" % blobid)
if not front.has_blob(blobid):
raise UserError("Blob does not exist in repository: %s" % blobid)
lower_bound, upper_bound = 0, front.get_blob_size(blobid)
if range_spec:
lower_bound, upper_bound = _parse_range(range_spec, lower_bound, upper_bound)
blob_parts.append((blobid, lower_bound, upper_bound - lower_bound))
for blob_part in blob_parts:
blobid, offset, size = blob_part
datasource = front.get_blob(blobid, offset, size)
while datasource.bytes_left():
stdout.buffer.write(datasource.read(4096))
def cmd_export(args):
parser = OptionParser(usage="usage: boar export [options] [<blobid>:<destination filename>] ...")
# parser.add_option("-f", "--revision", action="store", dest = "revision", type="int",
# help="The revision to fetch")
(options, args) = parser.parse_args(args)
if not args:
raise UserError("You must specify one or more export specifications")
#stdout = dedicated_stdout()
#globals()["suppress_finishmessage"] = True
front = connect_to_repo(get_repo_url())
for exportspec in args:
blobid, destination = exportspec.split(":")
assert front.has_blob(blobid)
assert not os.path.exists(destination)
for exportspec in args:
blobid, destination = exportspec.split(":")
assert not os.path.exists(destination)
fo = open(destination, "w")
datasource = front.get_blob(blobid)
while datasource.bytes_left():
fo.write(datasource.read(4096))
fo.close()
def _get_blobid(front, revision, session_path):
"""Convenience function to fetch the id of a blob given a revision
and a session path name."""
bloblist = get_cached_bloblist(front, revision)
for b in bloblist:
if b['filename'] == session_path:
return b['md5sum']
return None
def cmd_co(args):
parser = OptionParser(usage="usage: boar co [options] <session name>[/path/] [workdir name]")
parser.add_option("-r", "--revision", action="store", dest = "revision", type="int",
help="The revision to check out (default is latest)")
parser.add_option("-l", "--symlink", dest = "symlink", action="store_true",
help="Create symlinks to the repository instead of copying the data. (DANGEROUS)")
(options, args) = parser.parse_args(args)
if not args:
raise UserError("You must specify a session name with an optional subpath (i.e 'MyPictures/summer2010')")
if len(args) > 2:
raise UserError("Too many arguments")
co_spec = tounicode(args.pop(0))
co_spec = co_spec.replace("\\", "/")
if co_spec.startswith("/"):
raise UserError("Checkout specification must not start with a slash")
session_name, throwaway, offset = co_spec.partition("/")
offset = offset.rstrip("/")
workdir_name = co_spec.rstrip("/").split("/").pop()
workdir_path = os.path.abspath(workdir_name)
if args:
workdir_path = tounicode(os.path.abspath(args.pop(0)))
if os.path.exists(workdir_path):
raise UserError("Workdir path '%s' already exists" % (workdir_path))
assert not args # Args parsing complete
repourl = get_repo_url()
front = connect_to_repo(repourl)
if options.symlink:
if os.name == "nt":
raise UserError("Symlinks can not be used on windows")
if not (repourl.startswith("/") or repourl.startswith("file://")):
raise UserError("Symlinks can only be used with a local repository specification")
if front.deduplication_enabled():
raise UserError("Symlinks can not be used with deduplicated repositories")
latest_sid = front.find_last_revision(session_name)
if not latest_sid:
raise UserError("No such session found: %s" % (session_name))
if options.revision:
if options.revision not in front.get_session_ids(session_name):
raise UserError("There is no such revision of the given session")
sid = options.revision
else:
sid = latest_sid
print("Checking out to workdir", workdir_path)
os.mkdir(workdir_path)
wd = workdir.Workdir(repourl, session_name, offset, sid, workdir_path, front = front)
wd.checkout(symlink=options.symlink)
def cmd_setprop(args):
parser = OptionParser(usage="usage: boar setprop [options] <session name> <property> [new value]")
parser.add_option("-f", "--file", action="store", dest = "file",
help="Read the new property value from the given file")
(options, args) = parser.parse_args(args)
if len(args) < 2:
raise UserError("You must specify a session and a property name")
if len(args) > 3:
raise UserError("Too many arguments")
if not options.file and len(args) != 3:
raise UserError("Not enough arguments")
if options.file and len(args) > 2:
raise UserError("You can not specify both a source file and a new value")
front = connect_to_repo(get_repo_url())
if options.file:
session_name, property_name = args
try:
with safe_open(options.file, "rb") as f:
new_value = f.read().decode("utf-8")
except Exception as e:
raise UserError("Problems reading file %s: %s" % (options.file, e))
else:
session_name, property_name, new_value = args
if property_name == "ignore":
valid_lines = [line for line in new_value.splitlines() if line]
front.set_session_ignore_list(session_name, valid_lines)
elif property_name == "include":
valid_lines = [line for line in new_value.splitlines() if line]
front.set_session_include_list(session_name, valid_lines)
else:
raise UserError("Property name must be one of the following: ignore, include")
def cmd_getprop(args):
parser = OptionParser(usage="usage: boar getprop [options] <session name> <property>")
parser.add_option("-f", "--file", action="store", dest = "file",
help="Write the property value to the given file instead of printing it")
(options, args) = parser.parse_args(args)
if len(args) < 2:
raise UserError("You must specify a session and a property name")
if len(args) > 2:
raise UserError("Too many arguments")
session_name, property_name = args
front = connect_to_repo(get_repo_url())
property_value = u""
if property_name == "ignore":
ignore_list = front.get_session_ignore_list(session_name)
for item in ignore_list:
property_value += item + os.linesep
else:
raise UserError("Property name must be one of the following: ignore")
if options.file:
try:
with open(options.file, "w") as f:
f.write(property_value)
except Exception as e:
raise UserError("Problems writing file %s: %s" % (options.file, e))
else:
print(property_value)
def cmd_find(front, args):
cs, sessionName = args
sessionId = front.find_last_revision(sessionName)
for bi in front.get_session_bloblist(sessionId):
if bi['md5sum'] == cs:
print(bi['filename'])
@beta
def cmd_lostfiles(args):
restore_missing = "-u" in args
wd = workdir.init_workdir(ucwd)
if not wd:
raise UserError(not_a_workdir_msg)
for bi in wd.get_bloblist(wd.revision):
if not wd.exists_in_workdir(bi['md5sum']):
print("Missing:", bi['filename'], bi['md5sum'])
if restore_missing:
wd.fetch_file(bi['filename'], bi['md5sum'], overwrite = False)
def __clone_once(source_front, target_front, skip_verify=False):
if front.is_identical(source_front, target_front):
print("Repositories are already identical")
return
if not skip_verify:
print("Quick verifying source repo")
verify_repo(source_front, verify_blobs = False)
print("Quick verifying destination repo")
verify_repo(target_front, verify_blobs = False)
front.clone(source_front, target_front)
if not skip_verify:
print("Performing full verify on cloned repo")
verify_repo(target_front, verify_blobs = True)
print("Repos are in sync.")
def __clone_replicate(source_front, target_front, skip_verify = False):
if not skip_verify:
print("Quick verifying source repo")
verify_repo(source_front, verify_blobs = False)
print("Quick verifying destination repo")
verify_repo(target_front, verify_blobs = False)
print("Entering continous replication mode.")
print("Will replicate all incoming changes on %s to %s" % (source_front, target_front))
while True:
if front.is_identical(source_front, target_front):
time.sleep(10)
continue
print("Incoming changes found. Cloning...")
__clone_once(source_front, target_front, skip_verify=True)
print("Waiting for incoming changes...")
def cmd_clone(args):
if len(args) == 0:
args = ["--help"]
parser = OptionParser(usage="usage: boar clone <source repo> <destination repo>")
parser.add_option("-r", "--replicate", dest = "replicate", action="store_true",
help="Continously replicate any changes from the master to the clone (does not return)")
parser.add_option("--skip-verification", dest = "skip_verify", action="store_true",
help="No verifications will be performed. WARNING: If the source repo is corrupt, the clone may also become corrupt.")
(options, args) = parser.parse_args(args)
if len(args) != 2:
raise UserError("You must specify one source repository and one destination repository.")
repopath1, repopath2 = args
source_front = client.connect(repopath1)
if not client.is_boar_url(repopath2) and not os.path.exists(repopath2):
repository.create_repository(repopath2, enable_deduplication = source_front.deduplication_enabled())
target_front = client.connect(repopath2)
if not front.is_continuation(base_front = target_front, cont_front = source_front):
raise UserError("The source repo is not a continuation of the destination repo. Cannot clone.")
if options.replicate:
__clone_replicate(source_front, target_front, skip_verify = options.skip_verify)
else:
__clone_once(source_front, target_front, skip_verify = options.skip_verify)
def cmd_diffrepo(args):
if len(args) == 0:
args = ["--help"]
parser = OptionParser(usage="usage: boar diffrepo <repo 1> <repo 2>")
(options, args) = parser.parse_args(args)
if len(args) != 2:
raise UserError("You must specify exactly two existing repositories.")
repopath1, repopath2 = args
if repopath1.startswith("boar://") or repopath2.startswith("boar://"):
raise UserError("Can only compare local repositories")
repopath1 = os.path.abspath(repopath1)
repopath2 = os.path.abspath(repopath2)
repo1 = Front(client.user_friendly_open_local_repository(repopath1))
repo2 = Front(client.user_friendly_open_local_repository(repopath2))
if front.is_identical(repo1, repo2):
assert front.is_identical(repo2, repo1)
print("Repositories are identical")
return_code = 0
else:
print("Repositories differ")
return_code = 1
return return_code
@beta
def cmd_findlost(args):
front = connect_to_repo(get_repo_url())
if len(args) != 2:
raise UserError("You must specify a session and a directory to store the findings in.")
sessionName = args[0]
outputDir = args[1]
all_revisions = front.get_session_ids(sessionName)
#last_revision = front.find_last_revision(sessionName)
progress = SimpleProgressPrinter(sys.stdout, "Loading revisions")
last_revision = all_revisions.pop()
all_blobs = {}
for n, rev in enumerate(all_revisions):
progress.update(1.0 * n / len(all_revisions))
current_revision_blobs = invert_bloblist(front.get_session_bloblist(rev))
for blobid, blobinfos in list(current_revision_blobs.items()):
if blobid in all_blobs:
all_blobs[blobid].extend(blobinfos)
else:
all_blobs[blobid] = blobinfos[:]
#print "Finding last: ", last_revision
last_revision_blobs = invert_bloblist(front.get_session_bloblist(last_revision))
lost_blobids = set(all_blobs.keys()) - set(last_revision_blobs.keys())
progress.update(1.0)
progress.finished()
print(len(lost_blobids), "lost")
tocopy = []
for blobid in lost_blobids:
filenames = set([blobinfo['filename'] for blobinfo in all_blobs[blobid]])
for filename in sorted(filenames):
print(blobid.ljust(32), filename)
tocopy.append((blobid, filename))
#blobid = ""
for blobid, filename in tocopy:
output_path = os.path.join(outputDir, filename)
print("Writing", blobid, output_path)
dirname = os.path.dirname(output_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
n = 0
while os.path.exists(output_path):
n += 1
output_path = os.path.join(outputDir, filename + ("(%d)" % n))
with open(output_path, "w") as f:
datareader = front.get_blob(blobid)
while datareader.bytes_left() > 0:
f.write(datareader.read(2**14))
@beta
def cmd_export_md5(wd, args):
wd.export_md5()
def get_repo_url_commandline():
global cmdline_repo
if cmdline_repo == None:
return None
url = cmdline_repo
if not client.is_boar_url(url):
url = os.path.abspath(url)
assert isinstance(url, str)
return url
def get_repo_url_env():
url = tounicode(os.getenv("REPO_PATH"))
if url == None:
return None
if not client.is_boar_url(url):
return os.path.abspath(url)
assert isinstance(url, str)
return url
def get_repo_url():
url = get_repo_url_commandline()
if not url:
url = get_repo_url_env()
if not url:
raise UserError("You need to specify a repository to operate on. "+\
"Use the --repo option or set $REPO_PATH.")
return url
def connect_to_repo(repourl):
return client.connect(repourl)
def json_bug_test():
if json_has_bug():
raise UserError("Your Python version seems to contain a known json decoding bug (http://bugs.python.org/issue10038). It affects Python 2.7 and 2.7.1. You should upgrade (or downgrade) your Python installation.")
def main():
json_bug_test()
args = sys.argv[1:]
global cmdline_repo
cmdline_repo = None
# --EXEC is only intended for whitebox testing code that needs
# invasive access. It should never be used in normal usage.
if args and args[0] == "--EXEC":
execfile(args[1])
args = args[2:]
if "--version" in args:
if len(args) != 1:
raise UserError("The --version option can not be combined with other options")
print("Boar, version %s" % BOAR_VERSION)
if not deduplication.cdedup_version:
print("Deduplication module not installed")
else:
print("Deduplication module v%s" % deduplication.cdedup_version)
print("Copyright (C) 2010-2012 Mats Ekberg.")
print("Licensed under the Apache License, Version 2.0")
return 0
for i in range(0, len(args)):
# This is ridiculous, but I just can't get OptParse to just
# look for --repo without exploding on other "unknown options".
# TODO: make less silly
if args[i] == "--repo":
args.pop(i)
try:
cmdline_repo = args.pop(i)
break
except:
raise UserError("You must specify a valid repository after --repo")
if args[i].startswith("--repo="):
_, cmdline_repo = args.pop(i).split("=")
break
if len(args) == 0:
print_help()
return 1
if args[0] == "mkrepo":
return cmd_mkrepo(args[1:])
elif args[0] == "import":
return cmd_import(args[1:])
elif args[0] == "list":
return cmd_list(args[1:])
elif args[0] == "log":
return cmd_log(args[1:])
elif args[0] == "ls":
return cmd_ls(args[1:])
elif args[0] == "verify":
return cmd_verify(args[1:])
elif args[0] == "manifests":
return cmd_manifests(args[1:])
elif args[0] == "repair":
return cmd_repair(args[1:])
elif args[0] == "cat":
return cmd_cat(args[1:])
elif args[0] == "co":
return cmd_co(args[1:])
elif args[0] == "status":
return cmd_status(args[1:])
elif args[0] == "info":
return cmd_info(args[1:])
elif args[0] == "ci":
return cmd_ci(args[1:])
elif args[0] == "update":
return cmd_update(args[1:])
elif args[0] == "find":
front = get_repo_url()
return cmd_find(front, args[1:])
elif args[0] == "locate":
return cmd_locate(args[1:])
elif args[0] == "lostfiles":
return cmd_lostfiles(args[1:])
elif args[0] == "contents":
return cmd_contents(args[1:])
elif args[0] == "mksession":
return cmd_mksession(args[1:])
elif args[0] == "mkstandalone":
return cmd_mkstandalone(args[1:])
elif args[0] == "revisions":
return cmd_revisions(args[1:])
elif args[0] == "relocate":
return cmd_relocate(args[1:])
elif args[0] == "serve":
# "suppress_finishmessage" is a hack to avoid duplicate
# "finished" messages when connecting to a boar server.
# TODO: make less hackish
globals()["suppress_finishmessage"] = True
return cmd_serve(args[1:])
elif args[0] == "sessions":
return cmd_sessions(args[1:])
elif args[0] == "stats":
return cmd_stats(args[1:])
elif args[0] == "truncate":
return cmd_truncate(args[1:])
elif args[0] == "exportmd5":
wd = workdir.init_workdir(ucwd)
return cmd_export_md5(wd, args[1:])
elif args[0] == "clone":
return cmd_clone(args[1:])
elif args[0] == "diffrepo":
return cmd_diffrepo(args[1:])
elif args[0] == "setprop":
return cmd_setprop(args[1:])
elif args[0] == "getprop":
return cmd_getprop(args[1:])
elif args[0] == "scanblocks":
return cmd_scanblocks(args[1:])
elif args[0] == "export":
return cmd_export(args[1:])
elif args[0] == "exportrev":
return cmd_exportrev(args[1:])
elif args[0] == "findlost":
return cmd_findlost(args[1:])
else:
print_help()
return 1
return_code = 0
if __name__ == "__main__":
t1 = time.time()
if os.name == "nt":
import msvcrt
# Without this, windows wininetd server breaks, as well as all binary
# stdout output, as in "cat".
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
sys.argv = list(map(tounicode, sys.argv))
global ucwd
ucwd = tounicode(os.getcwd())
if 'BOAR_PROF' in os.environ:
print("PROFILING ENABLED")
cProfile.run('main()', "prof.txt")
import pstats
p = pstats.Stats('prof.txt')
p.sort_stats('cum').print_stats(20)
sys.exit(0)
try:
return_code = main()
except KeyboardInterrupt:
print()
print("ERROR: Operation cancelled by user")
return_code = 1
except UserError as e:
#import traceback
#print "-"*60
#traceback.print_exc(file=sys.stdout)
#print "-"*60
print("ERROR:", str(e))
return_code = 1
except repository.MisuseError as e:
print("REPO USAGE ERROR:", str(e))
return_code = 1
except repository.CorruptionError as e:
print("REPO CORRUPTION:", str(e))
return_code = 13
except repository.SoftCorruptionError as e:
print("SOFT REPO CORRUPTION:", str(e))
print("No need to panic. This means that there are problems in some non-vital cache files in the repository.")
print("Execute a 'repair' command on the repository to fix this problem.")
return_code = 7
t2 = time.time()
if not globals().get("suppress_finishmessage", False):
print("Finished in", round(t2-t1, 2), "seconds")
#import common
#print "Processed", common._file_reader_sum, "bytes"
sys.exit(return_code)