rbloomfilename = git.repo_rel(bloomfilename)
if not os.path.exists(bloomfilename):
log(bloomfilename)
- add_error("bloom: %s not found to ruin" % rbloomfilename)
+ add_error("Bloom filter: %s not found to ruin!" % rbloomfilename)
return
b = bloom.ShaBloom(bloomfilename, readwrite=True, expected=1)
b.map[16:16+2**b.bits] = '\0' * 2**b.bits
rbloomfilename = git.repo_rel(bloomfilename)
ridx = git.repo_rel(idx)
if not os.path.exists(bloomfilename):
- log("bloom: %s: does not exist." % rbloomfilename)
+ add_error("Error: %s does not exist!" % rbloomfilename)
return
b = bloom.ShaBloom(bloomfilename)
if not b.valid():
- add_error("bloom: %r is invalid." % rbloomfilename)
+ add_error("Error: %r is invalid!" % rbloomfilename)
return
base = os.path.basename(idx)
if base not in b.idxnames:
- log("bloom: %s does not contain the idx." % rbloomfilename)
+ add_error("Error: %s does not contain the idx!" % rbloomfilename)
return
if base == idx:
idx = os.path.join(path, idx)
- log("bloom: bloom file: %s" % rbloomfilename)
- log("bloom: checking %s" % ridx)
+ log("Checking bloom file: %s" % rbloomfilename)
+ log(" checking %s" % ridx)
for objsha in git.open_idx(idx):
if not b.exists(objsha):
- add_error("bloom: ERROR: object %s missing"
+ add_error("Error: Object %s is missing!"
% str(objsha).encode('hex'))
if os.path.exists(outfilename) and not opt.force:
b = bloom.ShaBloom(outfilename)
if not b.valid():
- debug1("bloom: Existing invalid bloom found, regenerating.")
+ debug1("Bloom filter: Existing invalid item found, regenerating ...")
b = None
add = []
add_count = 0
rest_count = 0
for i,name in enumerate(glob.glob('%s/*.idx' % path)):
- progress_update('bloom: counting (%d) ...' % i)
+ progress_update('Bloom filter: Counting (%d) ...' % i)
ix = git.open_idx(name)
ixbase = os.path.basename(name)
if b and (ixbase in b.idxnames):
total = add_count + rest_count
if not add:
- debug1("bloom: nothing to do.")
+ debug1("Bloom filter: Nothing to do.")
return
if b:
if len(b) != rest_count:
- debug1("bloom: size %d != idx total %d, regenerating"
+ debug1("Bloom filter: size %d != idx total %d, regenerating ..."
% (len(b), rest_count))
b = None
elif (b.bits < bloom.MAX_BLOOM_BITS and
b.pfalse_positive(add_count) > bloom.MAX_PFALSE_POSITIVE):
- debug1("bloom: regenerating: adding %d entries gives "
- "%.2f%% false positives.\n"
+ debug1("Bloom filter: Adding %d entries gives %.2f%% "
+ "false positives, regenerating ..."
% (add_count, b.pfalse_positive(add_count)))
b = None
else:
del rest
del rest_count
- msg = b is None and 'creating from' or 'adding'
+ msg = b is None and 'Creating from' or 'Adding'
if not _first: _first = path
dirprefix = (_first != path) and git.repo_rel(path)+': ' or ''
- progress_update('bloom: %s%s %d file%s (%d object%s) ...'
+ progress_update('Bloom filter: %s%s %d file%s (%d object%s) ...'
% (dirprefix, msg,
len(add), len(add)!=1 and 's' or '',
add_count, add_count!=1 and 's' or ''))
icount = 0
for name in add:
ix = git.open_idx(name)
- progress_update('bloom: writing %.2f%% (%d/%d objects) ...'
+ progress_update('Bloom filter: Writing %.2f%% (%d/%d objects) ...'
% (icount*100.0/add_count, icount, add_count))
b.add_idx(ix)
count += 1
if tfname:
os.rename(tfname, outfilename)
- progress_end('bloom: %s%s %d file%s (%d object%s), done.'
+ progress_end('Bloom filter: %s%s %d file%s (%d object%s), done.'
% (dirprefix, msg,
len(add), len(add)!=1 and 's' or '',
add_count, add_count!=1 and 's' or ''))
paths = opt.dir and [opt.dir] or git.all_packdirs()
for path in paths:
- debug1('bloom: scanning %s' % path)
+ debug1('Bloom filter: Scanning %s ...' % path)
outfilename = opt.output or os.path.join(path, 'bup.bloom')
if opt.check:
check_bloom(path, outfilename, opt.check)
continue
try:
if af == socket.AF_INET6:
- log("bup daemon: listening on [%s]:%s" % sa[:2])
+ log("Listening on [%s]:%s." % sa[:2])
else:
- log("bup daemon: listening on %s:%s" % sa[:2])
+ log("Listening on %s:%s." % sa[:2])
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(sa)
s.listen(1)
socks.append(s)
if not socks:
- log('bup daemon: listen socket: %s' % e.args[1])
+ log('Listen socket: %s' % e.args[1])
sys.exit(1)
try:
for l in rl:
s, src = l.accept()
try:
- log("Socket accepted connection from %s" % (src,))
+ log("Socket accepted connection from %s ..." % (src,))
fd1 = os.dup(s.fileno())
fd2 = os.dup(s.fileno())
s.close()
l.shutdown(socket.SHUT_RDWR)
l.close()
-debug1("bup daemon: done")
+debug1("bup daemon done.")
random.seed(opt.seed)
for name in extra:
- log('Damaging "%s"...' % name)
+ log('Damaging "%s" ...' % name)
f = open(name, 'r+b')
st = os.fstat(f.fileno())
size = st.st_size
stdout=nullf, stderr=nullf, stdin=nullf)
rv = p.wait()
except OSError:
- log('fsck: warning: par2 not found; disabling recovery features.')
+ log('fsck: Warning: "par2" not found; disabling recovery features!')
else:
par2_ok = 1
try:
quick_verify(base)
except Exception, e:
- debug('error: %s' % e)
+ debug('Error: %s!' % e)
return 1
return 0
else:
rresult = par2_repair(base)
if rresult != 0:
action_result = 'failed'
- log('%s par2 repair: failed (%d)' % (last, rresult))
+ log('%s par2 repair: Failed (%d)!' % (last, rresult))
code = rresult
else:
action_result = 'repaired'
- log('%s par2 repair: succeeded (0)' % last)
+ log('%s par2 repair: Succeeded (0).' % last)
code = 100
else:
action_result = 'failed'
- log('%s par2 verify: failed (%d)' % (last, vresult))
+ log('%s par2 verify: Failed (%d)!' % (last, vresult))
code = vresult
else:
action_result = 'ok'
gresult = git_verify(base)
if gresult != 0:
action_result = 'failed'
- log('%s git verify: failed (%d)' % (last, gresult))
+ log('%s Git verify: Failed (%d)!' % (last, gresult))
code = gresult
else:
if par2_ok and opt.generate:
presult = par2_generate(base)
if presult != 0:
action_result = 'failed'
- log('%s par2 create: failed (%d)' % (last, presult))
+ log('%s par2 create: Failed (%d)!' % (last, presult))
code = presult
else:
action_result = 'generated'
if par2_exists and os.stat(base + '.par2').st_size == 0:
par2_exists = 0
sys.stdout.flush()
- debug('fsck: checking %s (%s)'
+ debug('fsck: Checking %s (%s) ...'
% (last, par2_ok and par2_exists and 'par2' or 'git'))
if not opt.verbose:
progress_update('fsck (%d/%d) ...' % (count + 1, len(extra)))
try:
sys.exit(do_pack(base, last, par2_exists))
except Exception, e:
- log('exception: %r' % e)
+ log('Exception: %r' % e)
sys.exit(99)
while len(outstanding):
try:
import readline
except ImportError:
- log('* readline module not available: line editing disabled.')
+ log('* "readline" module not available: Line editing disabled.')
readline = None
if readline:
outf.close()
except Exception, e:
rv = 1
- log(' error: %s' % e)
+ log(' error: %s!' % e)
elif cmd == 'help' or cmd == '?':
log('Commands: ls cd pwd cat get mget help quit')
elif cmd == 'quit' or cmd == 'exit' or cmd == 'bye':
raise Exception('no such command %r' % cmd)
except Exception, e:
rv = 1
- log('error: %s' % e)
+ log('Error: %s!' % e)
#raise
sys.exit(rv)
try:
import fuse
except ImportError:
- log('error: cannot find the python "fuse" module; please install it')
+ log('Error: Cannot find the python "fuse" module; please install it!')
sys.exit(1)
def check_index(reader):
try:
- log('check: checking forward iteration...')
+ log('Testing forward iteration ...')
e = None
d = {}
for e in reader.forward_iter():
assert(e.sha != index.EMPTY_SHA)
assert(e.gitmode)
assert(not e or e.name == '/') # last entry is *always* /
- log('check: checking normal iteration...')
+ log('Testing normal iteration ...')
last = None
for e in reader:
if last:
assert(last > e.name)
last = e.name
except:
- log('index error! at %r' % e)
+ log('Index error at %r!' % e)
raise
- log('check: passed.')
+ log('Check passed.')
def clear_index(indexfile):
try:
os.remove(path)
if opt.verbose:
- log('clear: removed %s' % path)
+ log('Removed "%s".' % path)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if wi.count:
wr = wi.new_reader()
if opt.check:
- log('check: before merging: oldfile')
+ log('Check before merging: oldfile ...')
check_index(ri)
- log('check: before merging: newfile')
+ log('Check before merging: newfile ...')
check_index(wr)
mi = index.Writer(indexfile, msw, tmax)
handle_ctrl_c()
if opt.check:
- log('check: starting initial check.')
+ log('Starting initial index check ...')
check_index(index.Reader(indexfile))
if opt.clear:
- log('clear: clearing index.')
+ log('Clearing index ...')
clear_index(indexfile)
excluded_paths = parse_excludes(flags, o.fatal)
try:
git.init_repo() # local repo
except git.GitError, e:
- log("bup: error: could not init repository: %s" % e)
+ log("Cannot initialize repository: %s!" % e)
sys.exit(1)
if opt.remote:
outfile.write(blob)
except KeyError, e:
outfile.flush()
- log('error: %s' % e)
+ log('Error: %s!' % e)
ret = 1
sys.exit(ret)
try:
ix = git.open_idx(name)
except git.GitError, e:
- add_error('%s: %s' % (name, e))
+ add_error('%s: %s!' % (name, e))
continue
if len(opt.find) == 40:
if ix.exists(bin):
f = open('/proc/self/status')
except IOError, e:
if not _linux_warned:
- log('Warning: %s' % e)
+ log('Warning: %s!' % e)
_linux_warned = 1
return {}
for line in f:
report((c+1)*opt.number)
if bloom._total_searches:
- print ('bloom: %d objects searched in %d steps: avg %.3f steps/object'
+ print ('bloom: %d objects searched in %d steps: avg %.3f steps/object.'
% (bloom._total_searches, bloom._total_steps,
bloom._total_steps*1.0/bloom._total_searches))
if midx._total_searches:
- print ('midx: %d objects searched in %d steps: avg %.3f steps/object'
+ print ('midx: %d objects searched in %d steps: avg %.3f steps/object.'
% (midx._total_searches, midx._total_steps,
midx._total_steps*1.0/midx._total_searches))
if git._total_searches:
- print ('idx: %d objects searched in %d steps: avg %.3f steps/object'
+ print ('idx: %d objects searched in %d steps: avg %.3f steps/object.'
% (git._total_searches, git._total_steps,
git._total_steps*1.0/git._total_searches))
-print 'Total time: %.3fs' % (time.time() - start)
+print 'Total time: %.3fs.' % (time.time() - start)
try:
ix = git.open_idx(name)
except git.GitError, e:
- add_error('%s: %s' % (name, e))
+ add_error('%s: %s!' % (name, e))
return
for count,subname in enumerate(ix.idxnames):
sub = git.open_idx(os.path.join(os.path.dirname(name), subname))
% (count, len(ix.idxnames),
git.shorten_hash(subname), ecount, len(sub)), False)
if not sub.exists(e):
- add_error("%s: %s: %s missing from idx"
+ add_error("%s: %s: %s missing from idx!"
% (nicename, git.shorten_hash(subname),
str(e).encode('hex')))
if not ix.exists(e):
- add_error("%s: %s: %s missing from midx"
+ add_error("%s: %s: %s missing from midx!"
% (nicename, git.shorten_hash(subname),
str(e).encode('hex')))
prev = None
if not (ecount % 1234):
progress_update(' Ordering: %d/%d ...' % (ecount, len(ix)), False)
if not e >= prev:
- add_error('%s: ordering error: %s < %s'
+ add_error('%s: Ordering error: %s < %s!'
% (nicename,
str(e).encode('hex'), str(prev).encode('hex')))
prev = e
if not _first: _first = outdir
dirprefix = (_first != outdir) and git.repo_rel(outdir)+': ' or ''
- debug1('midx: %s%screating from %d files (%d objects).'
+ debug1('Multi-index: %s%screating from %d files (%d objects) ...'
% (dirprefix, prefixstr, len(infilenames), total))
if (opt.auto and (total < 1024 and len(infilenames) < 3)) \
or ((opt.auto or opt.force) and len(infilenames) < 2) \
or (opt.force and not total):
- debug1('midx: nothing to do.')
+ debug1('Multi-index: Nothing to do.')
return
pages = int(total/SHA_PER_PAGE) or 1
bits = int(math.ceil(math.log(pages, 2)))
entries = 2**bits
- debug1('midx: table size: %d (%d bits)' % (entries*4, bits))
+ debug1('Multi-index: Table size %d (%d bits).' % (entries*4, bits))
unlink(outfilename)
with atomically_replaced_file(outfilename, 'wb') as f:
already[iname] = 1
any = 1
if not any:
- debug1('%r is redundant' % mname)
+ debug1('%r is redundant!' % mname)
unlink(mname)
already[mname] = 1
DESIRED_HWM = opt.force and 1 or 5
DESIRED_LWM = opt.force and 1 or 2
existed = dict((name,1) for sz,name in all)
- debug1('midx: %d indexes; want no more than %d.'
+ debug1('Multi-index: %d indexes; want no more than %d.'
% (len(all), DESIRED_HWM))
if len(all) <= DESIRED_HWM:
- debug1('midx: nothing to do.')
+ debug1('Multi-index: Nothing to do.')
while len(all) > DESIRED_HWM:
all.sort()
part1 = [name for sz,name in all[:len(all)-DESIRED_LWM+1]]
part2 = all[len(all)-DESIRED_LWM+1:]
all = list(do_midx_group(path, part1)) + part2
if len(all) > DESIRED_HWM:
- debug1('Still too many indexes (%d > %d). Merging again.'
+ debug1('Still too many indexes (%d > %d). Merging again ...'
% (len(all), DESIRED_HWM))
if opt['print']:
midxes = []
paths = opt.dir and [opt.dir] or git.all_packdirs()
for path in paths:
- debug1('midx: scanning %s' % path)
+ debug1('Multi-index: Scanning %s ...' % path)
midxes += glob.glob(os.path.join(path, '*.midx'))
for name in midxes:
check_midx(name)
elif opt.auto or opt.force:
paths = opt.dir and [opt.dir] or git.all_packdirs()
for path in paths:
- debug1('midx: scanning %s' % path)
+ debug1('Multi-index: scanning %s ...' % path)
do_midx_dir(path)
else:
o.fatal("you must use -f or -a or provide input filenames")
subcmd = extra
-debug2('bup mux: starting %r' % (extra,))
+debug2('bup mux: Starting "%r" ...' % (extra,))
outr, outw = os.pipe()
errr, errw = os.pipe()
prv = p.wait()
if prv:
- debug1('%s exited with code %d' % (extra[0], prv))
+ debug1('bup mux: %s exited with code %d!' % (extra[0], prv))
-debug1('bup mux: done')
+debug1('bup mux: Done (%d).' % prv)
sys.exit(prv)
_helpers.write_random(sys.stdout.fileno(), total, opt.seed,
opt.verbose and 1 or 0)
else:
- log('error: not writing binary data to a terminal. Use -f to force.')
+ log('Error: Not writing binary data to a terminal. Use -f to force.')
sys.exit(1)
ret = 0
for d in extra:
if not valid_restore_path(d):
- add_error("ERROR: path %r doesn't include a branch and revision" % d)
+ add_error("Error: Path %r doesn't include a branch and revision!" % d)
continue
path,name = os.path.split(d)
try:
# (i.e. /foo/what/ever/.), then also restore what/ever's
# metadata to the current directory.
if not isdir:
- add_error('%r: not a directory' % d)
+ add_error('%r: Not a directory!' % d)
else:
do_root(n, opt.sparse, owner_map, restore_root_meta = (name == '.'))
else:
try:
cli = client.Client(opt.remote)
except client.ClientError, e:
- log('error: %s' % e)
+ log('Error: %s!' % e)
sys.exit(1)
oldref = refname and cli.read_ref(refname) or None
w = cli.new_packwriter(compression_level=opt.compress)
except IOError, ex:
if ex.errno != EACCES:
raise
- log('error: cannot access %r; have you run bup index?' % indexfile)
+ log('Error: Cannot access %r; have you run bup index?' % indexfile)
sys.exit(1)
hlink_db = hlinkdb.HLinkDB(indexfile + '.hlink')
if opt.smaller and ent.size >= opt.smaller:
if exists and not hashvalid:
if opt.verbose:
- log('skipping large file "%s"' % ent.name)
+ log('Skipping large file "%s".' % ent.name)
lastskip_name = ent.name
continue
w.new_blob, w.new_tree, [f],
keep_boundaries=False)
except (IOError, OSError), e:
- add_error('%s: %s' % (ent.name, e))
+ add_error('%s: %s!' % (ent.name, e))
lastskip_name = ent.name
else:
if stat.S_ISDIR(ent.mode):
def _set_mode():
global dumb_server_mode
dumb_server_mode = os.path.exists(git.repo('bup-dumb-server'))
- debug1('bup server: serving in %s mode'
+ debug1('bup server: Serving in %s mode.'
% (dumb_server_mode and 'dumb' or 'smart'))
# OK. we now know the path is a proper repository. Record this path in the
# environment so that subprocesses inherit it and know where to operate.
os.environ['BUP_DIR'] = git.repodir
- debug1('bup server: bupdir is %r' % git.repodir)
+ debug1('bup server: bupdir is "%r".' % git.repodir)
_set_mode()
def init_dir(conn, arg):
git.init_repo(arg)
- debug1('bup server: bupdir initialized: %r' % git.repodir)
+ debug1('bup server: bupdir initialized: "%r".' % git.repodir)
_init_session(arg)
conn.ok()
n = struct.unpack('!I', ns)[0]
#debug2('expecting %d bytes' % n)
if not n:
- debug1('bup server: received %d object%s.'
+ debug1('bup server: Received %d object%s.'
% (w.count, w.count!=1 and "s" or ''))
fullpath = w.close(run_midx=not dumb_server_mode)
if fullpath:
conn.ok()
return
elif n == 0xffffffff:
- debug2('bup server: receive-objects suspended.')
+ debug2('bup server: Receive-objects suspended.')
suspended_w = w
conn.ok()
return
-
+
shar = conn.read(20)
crcr = struct.unpack('!I', conn.read(4))[0]
n -= 20 + 4
buf = conn.read(n) # object sizes in bup are reasonably small
#debug2('read %d bytes' % n)
- _check(w, n, len(buf), 'object read: expected %d bytes, got %d\n')
+ _check(w, n, len(buf), 'Object read: Expected %d bytes, got %d\n')
if not dumb_server_mode:
oldpack = w.exists(shar, want_source=True)
if oldpack:
assert(oldpack.endswith('.idx'))
(dir,name) = os.path.split(oldpack)
if not (name in suggested):
- debug1("bup server: suggesting index %s"
+ debug1("bup server: Suggesting index %s"
% git.shorten_hash(name))
debug1("bup server: because of object %s"
% shar.encode('hex'))
suggested.add(name)
continue
nw, crc = w._raw_write((buf,), sha=shar)
- _check(w, crcr, crc, 'object read: expected crc %d, got %d\n')
+ _check(w, crcr, crc, 'Object read: Expected CRC %d, got %d\n')
# NOTREACHED
conn.write(struct.pack('!I', len(blob)))
conn.write(blob)
except KeyError, e:
- log('server: error: %s' % e)
+ log('bup server: Error: %s' % e)
conn.write('\0\0\0\0')
conn.error(e)
else:
if extra:
o.fatal('no arguments expected')
-debug2('bup server: reading from stdin.')
+debug2('bup server: Reading from stdin.')
commands = {
'quit': None,
line = _line.strip()
if not line:
continue
- debug1('bup server: command: %r' % line)
+ debug1('bup server: Command: "%r" ...' % line)
words = line.split(' ', 1)
cmd = words[0]
rest = len(words)>1 and words[1] or ''
else:
raise Exception('unknown server command: %r' % line)
-debug1('bup server: done')
+debug1('bup server: Done.')
it = cp.get(line.strip())
next(it, None) # skip the file type
except KeyError, e:
- add_error('error: %s' % e)
+ add_error('Error: %s!' % e)
continue
yield IterToFile(it)
files = read_ids()
secs = time.time() - start_time
size = hashsplit.total_split
if opt.bench:
- log('bup: %.2fkbytes in %.2f secs = %.2f kbytes/sec'
+ log('bup split: %.2fkbytes in %.2f secs = %.2f kbytes/sec.'
% (size/1024., secs, size/1024./secs))
if not check_saved_errors():
# contents of the tag file and pass the hash, and we already know
# about the tag's existance via "tags".
if not opt.force and opt.delete not in tags:
- log("error: tag '%s' doesn't exist" % opt.delete)
+ log("Error: Tag '%s' doesn't exist!" % opt.delete)
sys.exit(1)
tag_file = 'refs/tags/%s' % opt.delete
git.delete_ref(tag_file)
(tag_name, commit) = extra[:2]
if not tag_name:
o.fatal("tag name must not be empty.")
-debug1("args: tag name = %s; commit = %s" % (tag_name, commit))
+debug1("Parameters: Tag name = %s; commit = %s" % (tag_name, commit))
if tag_name in tags and not opt.force:
- log("bup: error: tag '%s' already exists" % tag_name)
+ log("Error: Tag '%s' already exists!" % tag_name)
sys.exit(1)
if tag_name.startswith('.'):
try:
hash = git.rev_parse(commit)
except git.GitError, e:
- log("bup: error: %s" % e)
+ log("Error: %s!" % e)
sys.exit(2)
if not hash:
- log("bup: error: commit %s not found." % commit)
+ log("Error: Commit %s not found!" % commit)
sys.exit(2)
pL = git.PackIdxList(git.repo('objects/pack'))
if not pL.exists(hash):
- log("bup: error: commit %s not found." % commit)
+ log("Error: Commit %s not found!" % commit)
sys.exit(2)
tag_file = git.repo('refs/tags/%s' % tag_name)
try:
tag = file(tag_file, 'w')
except OSError, e:
- log("bup: error: could not create tag '%s': %s" % (tag_name, e))
+ log("Error: Cannot create tag '%s': %s!" % (tag_name, e))
sys.exit(3)
tag.write(hash.encode('hex'))
import tornado.ioloop
import tornado.web
except ImportError:
- log('error: cannot find the python "tornado" module; please install it')
+ log('Error: Cannot find the python "tornado" module; please install it!')
sys.exit(1)
handle_ctrl_c()
@tornado.web.asynchronous
def _process_request(self, path):
path = urllib.unquote(path)
- print 'Handling request for %s' % path
+ print 'Handling request for "%s" ...' % path
try:
n = top.resolve(path)
except vfs.NoSuchFile:
error). In either case, the headers are sent.
"""
if not path.endswith('/') and len(path) > 0:
- print 'Redirecting from %s to %s' % (path, path + '/')
+ print 'Redirecting from %s to %s.' % (path, path + '/')
return self.redirect(path + '/', permanent=True)
try:
except AttributeError, e:
sock = http_server._sockets.values()[0]
- print "Serving HTTP on %s:%d..." % sock.getsockname()
+ print "Serving HTTP on %s:%d ..." % sock.getsockname()
loop = tornado.ioloop.IOLoop.instance()
if opt.browser:
# flush it as one big lump during close().
pages = os.fstat(f.fileno()).st_size / 4096 * 5 # assume k=5
self.delaywrite = expected > pages
- debug1('bloom: delaywrite=%r' % self.delaywrite)
+ debug1('Bloom filter: delaywrite=%r' % self.delaywrite)
if self.delaywrite:
self.map = mmap_readwrite_private(self.rwfile, close=False)
else:
self.map = mmap_read(f)
got = str(self.map[0:4])
if got != 'BLOM':
- log('Warning: invalid BLOM header (%r) in %r' % (got, filename))
+ log('Warning: Invalid BLOM header (%r) in %r!' % (got, filename))
return self._init_failed()
ver = struct.unpack('!I', self.map[4:8])[0]
if ver < BLOOM_VERSION:
- log('Warning: ignoring old-style (v%d) bloom %r'
+ log('Warning: Ignoring old-style (v%d) bloom %r!'
% (ver, filename))
return self._init_failed()
if ver > BLOOM_VERSION:
- log('Warning: ignoring too-new (v%d) bloom %r'
+ log('Warning: Ignoring too-new (v%d) bloom %r!'
% (ver, filename))
return self._init_failed()
def close(self):
if self.map and self.rwfile:
- debug2("bloom: closing with %d entries" % self.entries)
+ debug2("Bloom filter: Closing with %d entries ..." % self.entries)
self.map[12:16] = struct.pack('!I', self.entries)
if self.delaywrite:
self.rwfile.seek(0)
bits = int(math.floor(math.log(expected*MAX_BITS_EACH/8,2)))
k = k or ((bits <= MAX_BLOOM_BITS[5]) and 5 or 4)
if bits > MAX_BLOOM_BITS[k]:
- log('bloom: warning, max bits exceeded, non-optimal')
+ log('Bloom filter: Warning, max bits exceeded, non-optimal!')
bits = MAX_BLOOM_BITS[k]
- debug1('bloom: using 2^%d bytes and %d hash functions' % (bits, k))
+ debug1('Bloom filter: Using 2^%d bytes and %d hash functions.' % (bits, k))
f = f or open(name, 'w+b')
f.write('BLOM')
f.write(struct.pack('!IHHI', BLOOM_VERSION, bits, k, 0))
extra.discard(idx)
self.check_ok()
- debug1('client: removing extra indexes: %s' % extra)
+ debug1('Client: Removing extra indexes: %s' % extra)
for idx in extra:
os.unlink(os.path.join(self.cachedir, idx))
- debug1('client: server requested load of: %s' % needed)
+ debug1('Client: Server requested load of: %s' % needed)
for idx in needed:
self.sync_index(idx)
git.auto_midx(self.cachedir)
mkdirp(self.cachedir)
fn = os.path.join(self.cachedir, name)
if os.path.exists(fn):
- msg = "won't request existing .idx, try `bup bloom --check %s`" % fn
+ msg = "Won't request existing .idx, try `bup bloom --check %s`" % fn
raise ClientError(msg)
self.conn.write('send-index %s\n' % name)
n = struct.unpack('!I', self.conn.read(4))[0]
debug2('%s' % line)
if line.startswith('index '):
idx = line[6:]
- debug1('client: received index suggestion: %s'
+ debug1('Client: Received index suggestion: %s'
% git.shorten_hash(idx))
suggested.append(idx)
else:
assert(line.endswith('.idx'))
- debug1('client: completed writing pack, idx: %s'
+ debug1('Client: Completed writing pack, idx: %s'
% git.shorten_hash(line))
suggested.append(line)
self.check_ok()
try:
st = xstat.lstat(n)
except OSError, e:
- add_error(Exception('%s: %s' % (realpath(n), str(e))))
+ add_error('%s: %s!' % (realpath(n), e))
continue
if (st.st_mode & _IFMT) == stat.S_IFDIR:
n += '/'
debug1('Skipping BUP_DIR.')
continue
if xdev != None and pst.st_dev != xdev:
- debug1('Skipping contents of %r: different filesystem.' % path)
+ debug1('Skipping contents of %r: Different filesystem.' % path)
else:
try:
OsFile(name).fchdir()
except OSError, e:
- add_error('%s: %s' % (prepend, e))
+ add_error('%s: %s!' % (prepend, e))
else:
for i in _recursive_dirlist(prepend=prepend+name, xdev=xdev,
bup_dir=bup_dir,
yield (path, pst)
continue
except OSError, e:
- add_error('recursive_dirlist: %s' % e)
+ add_error('recursive_dirlist(): %s!' % e)
continue
try:
pfile = OsFile(path)
rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
except OSError, e:
# make sure 'args' gets printed to help with debugging
- add_error('%r: exception: %s' % (args, e))
+ add_error('%r: Exception: %s!' % (args, e))
raise
if rv:
- add_error('%r: returned %d' % (args, rv))
+ add_error('%r: Returned %d!' % (args, rv))
args = [path.exe(), 'bloom', '--dir', objdir]
try:
rv = subprocess.call(args, stdout=open('/dev/null', 'w'))
except OSError, e:
# make sure 'args' gets printed to help with debugging
- add_error('%r: exception: %s' % (args, e))
+ add_error('%r: Exception: %s!' % (args, e))
raise
if rv:
- add_error('%r: returned %d' % (args, rv))
+ add_error('%r: Returned %d!' % (args, rv))
def mangle_name(name, mode, gitmode):
broken = False
for n in mx.idxnames:
if not os.path.exists(os.path.join(mxd, n)):
- log('warning: index %s missing!' % n)
+ log('Warning: Index %s missing!' % n)
log(' used by %s\n' % mxf)
broken = True
if broken:
for name in ix.idxnames:
d[os.path.join(self.dir, name)] = ix
elif not ix.force_keep:
- debug1('midx: removing redundant: %s'
+ debug1('Multi-index: Removing redundant: %s'
% os.path.basename(ix.name))
ix.close()
unlink(ix.name)
self.do_bloom = True
else:
self.bloom = None
- debug1('PackIdxList: using %d index%s.'
+ debug1('PackIdxList: Using %d index%s.'
% (len(self.packs), len(self.packs)!=1 and 'es' or ''))
def add(self, hash):
"""
head = read_ref(committish, repo_dir=repo_dir)
if head:
- debug2("resolved from ref: commit = %s" % head.encode('hex'))
+ debug2("Resolved from ref: commit = %s" % head.encode('hex'))
return head
pL = PackIdxList(repo('objects/pack', repo_dir=repo_dir))
os.stat(repo('objects/pack/.'))
except OSError, e:
if e.errno == errno.ENOENT:
- log('error: %r is not a bup repository; run "bup init"'
+ log('Error: %r is not a bup repository; run "bup init"!'
% repo())
sys.exit(15)
else:
- log('error: %s' % e)
+ log('Error: %s!' % e)
sys.exit(14)
wanted = ('1','5','6')
if ver() < wanted:
if not _ver_warned:
- log('warning: git version < %s; bup will be slow.'
+ log('Warning: Git version < %s; bup will be slow.'
% '.'.join(wanted))
_ver_warned = 1
self.get = self._slow_get
sys.stderr.write(buf)
elif fdw == 3:
self.closed = True
- debug2("DemuxConn: marked closed")
+ debug2("DemuxConn: Marked closed.")
return True
def _load_buf(self, timeout):
try:
excluded_patterns.append(re.compile(parameter))
except re.error, ex:
- fatal('invalid --exclude-rx pattern (%s): %s' % (parameter, ex))
+ fatal('Invalid --exclude-rx pattern (%s): %s!' % (parameter, ex))
elif option == '--exclude-rx-from':
try:
f = open(realpath(parameter))
try:
excluded_patterns.append(re.compile(spattern))
except re.error, ex:
- fatal('invalid --exclude-rx pattern (%s): %s' % (spattern, ex))
+ fatal('Invalid --exclude-rx pattern (%s): %s!' % (spattern, ex))
return excluded_patterns
except EOFError:
pass
except:
- log('index metadata in %r appears to be corrupt' % filename)
+ log('Index metadata in %r appears to be corrupt!' % filename)
raise
finally:
m_file.close()
if f:
b = f.read(len(INDEX_HDR))
if b != INDEX_HDR:
- log('warning: %s: header: expected %r, got %r'
+ log('Warning: %s: Expected %r, got %r in header!'
% (filename, INDEX_HDR, b))
else:
st = os.fstat(f.fileno())
p = slashappend(p)
xpaths.append((rp, p))
except OSError, e:
- add_error('reduce_paths: %s' % e)
+ add_error('reduce_paths(): %s!' % e)
xpaths.sort()
paths = []
def merge(*iters):
def pfunc(count, total):
- progress_update('bup: merging indexes (%d/%d) ...' % (count, total), False)
+ progress_update('Merging indexes (%d/%d) ...' % (count, total), False)
def pfinal(count, total):
- progress_end('bup: merging indexes (%d/%d), done.' % (count, total))
+ progress_end('Merging indexes (%d/%d), done.' % (count, total))
return merge_iter(iters, 1024, pfunc, pfinal, key='name')
else:
output_node_info(n, os.path.normpath(path))
except vfs.NodeError, e:
- log('error: %s' % e)
+ log('Error: %s!' % e)
ret = 1
if L:
try:
import xattr
except ImportError:
- log('Warning: Linux xattr support missing; install python-pyxattr.')
+ log('Warning: Linux xattr support missing; install "python-pyxattr"!')
if xattr:
try:
xattr.get_all
except AttributeError:
- log('Warning: python-xattr module is too old; '
- 'install python-pyxattr instead.\n')
+ log('Warning: "python-xattr" module is too old; '
+ 'install "python-pyxattr" instead!')
xattr = None
posix1e = None
try:
import posix1e
except ImportError:
- log('Warning: POSIX ACL support missing; install python-pylibacl.')
+ log('Warning: POSIX ACL support missing; install "python-pylibacl"!')
try:
from bup._helpers import get_linux_file_attr, set_linux_file_attr
os.rmdir(path)
except OSError, e:
if e.errno in (errno.ENOTEMPTY, errno.EEXIST):
- msg = 'refusing to overwrite non-empty dir ' + path
+ msg = 'Refusing to overwrite non-empty dir ' + path
raise Exception(msg)
raise
else:
# FIXME: S_ISDOOR, S_IFMPB, S_IFCMP, S_IFNWK, ... see stat(2).
else:
assert(not self._recognized_file_type())
- add_error('not creating "%s" with unrecognized mode "0x%x"'
+ add_error('Not creating "%s" with unrecognized mode "0x%x"!'
% (path, self.mode))
def _apply_common_rec(self, path, restore_numeric_ids=False):
os.lchown(path, uid, gid)
except OSError, e:
if e.errno == errno.EPERM:
- add_error('lchown: %s' % e)
+ add_error('lchown(): %s!' % e)
elif sys.platform.startswith('cygwin') \
and e.errno == errno.EINVAL:
- add_error('lchown: unknown uid/gid (%d/%d) for %s'
+ add_error('lchown(): Unknown uid/gid (%d/%d) for %s!'
% (uid, gid, path))
else:
raise
if stat.S_ISLNK(st.st_mode):
self.symlink_target = os.readlink(path)
except OSError, e:
- add_error('readlink: %s' % e)
+ add_error('readlink(): %s!' % e)
def _encode_symlink_target(self):
return self.symlink_target
if not posix1e:
if self.posix1e_acl:
- add_error("%s: can't restore ACLs; posix1e support missing."
+ add_error("%s: Cannot restore ACLs; POSIX1e support missing!"
% path)
return
if self.posix1e_acl:
self.linux_attr = attr
except OSError, e:
if e.errno == errno.EACCES:
- add_error('read Linux attr: %s' % e)
+ add_error('Read Linux attr: %s!' % e)
elif e.errno in (errno.ENOTTY, errno.ENOSYS, errno.EOPNOTSUPP):
# Assume filesystem doesn't support attrs.
return
if self.linux_attr:
check_linux_file_attr_api()
if not set_linux_file_attr:
- add_error("%s: can't restore linuxattrs: "
- "linuxattr support missing.\n" % path)
+ add_error("%s: Cannot restore Linux attrs: "
+ "linuxattr support missing!" % path)
return
try:
set_linux_file_attr(path, self.linux_attr)
def _apply_linux_xattr_rec(self, path, restore_numeric_ids=False):
if not xattr:
if self.linux_xattr:
- add_error("%s: can't restore xattr; xattr support missing."
+ add_error("%s: Cannot restore xattr; xattr support missing!"
% path)
return
if not self.linux_xattr:
if not path:
raise Exception('Metadata.apply_to_path() called with no path')
if not self._recognized_file_type():
- add_error('not applying metadata to "%s"' % path
- + ' with unrecognized mode "0x%x"\n' % self.mode)
+ add_error('Not applying metadata to "%s"' % path
+ + ' with unrecognized mode "0x%x"!' % self.mode)
return
num_ids = restore_numeric_ids
for apply_metadata in (self._apply_common_rec,
for path in paths:
safe_path = _clean_up_path_for_archive(path)
if safe_path != path:
- log('archiving "%s" as "%s"' % (path, safe_path))
+ log('Archiving "%s" as "%s".' % (path, safe_path))
if not recurse:
for p in paths:
for meta in _ArchiveIterator(file):
if not meta.path:
print >> sys.stderr, \
- 'bup: no metadata path, but asked to only display path', \
+ 'No metadata path, but asked to only display path', \
'(increase verbosity?)'
sys.exit(1)
print meta.path
print >> sys.stderr, meta.path
xpath = _clean_up_extract_path(meta.path)
if not xpath:
- add_error(Exception('skipping risky path "%s"' % meta.path))
+ add_error(Exception('Skipping risky path "%s"!' % meta.path))
else:
meta.path = xpath
_set_up_path(meta, create_symlinks=create_symlinks)
break
xpath = _clean_up_extract_path(meta.path)
if not xpath:
- add_error(Exception('skipping risky path "%s"' % dir.path))
+ add_error(Exception('Skipping risky path "%s"!' % dir.path))
else:
if os.path.isdir(meta.path):
all_dirs.append(meta)
break
xpath = _clean_up_extract_path(meta.path)
if not xpath:
- add_error(Exception('skipping risky path "%s"' % meta.path))
+ add_error(Exception('Skipping risky path "%s"!' % meta.path))
else:
meta.path = xpath
if verbose:
assert(filename.endswith('.midx'))
self.map = mmap_read(open(filename))
if str(self.map[0:4]) != 'MIDX':
- log('Warning: skipping: invalid MIDX header in %r' % filename)
+ log('Warning: Skipping invalid midx header in %r!' % filename)
self.force_keep = True
return self._init_failed()
ver = struct.unpack('!I', self.map[4:8])[0]
if ver < MIDX_VERSION:
- log('Warning: ignoring old-style (v%d) midx %r'
+ log('Warning: Ignoring old-style (v%d) midx %r!'
% (ver, filename))
self.force_keep = False # old stuff is boring
return self._init_failed()
if ver > MIDX_VERSION:
- log('Warning: ignoring too-new (v%d) midx %r'
+ log('Warning: Ignoring too-new (v%d) midx %r!'
% (ver, filename))
self.force_keep = True # new stuff is exciting
return self._init_failed()
while lines:
l = lines.pop()
if l == '--': break
- out.append('%s: %s\n' % (first_syn and 'usage' or ' or', l))
+ out.append('%s: %s\n' % (first_syn and 'Usage' or ' or', l))
first_syn = False
out.append('\n')
last_was_option = False
optspec = ['help', 'version', 'debug', 'profile', 'bup-dir=']
global_args, subcmd = getopt.getopt(argv[1:], '?VDd:', optspec)
except getopt.GetoptError, ex:
- usage('error: %s' % ex.msg)
+ usage('Error: %s!' % ex.msg)
help_requested = None
do_profile = False
elif opt[0] in ['-d', '--bup-dir']:
os.environ['BUP_DIR'] = opt[1]
else:
- usage('error: unexpected option "%s"' % opt[0])
+ usage('Error: Unexpected option "%s"!' % opt[0])
# Make BUP_DIR absolute, so we aren't affected by chdir (i.e. save -C, etc.).
if 'BUP_DIR' in os.environ:
subcmd[0] = subpath(subcmd_name)
if not os.path.exists(subcmd[0]):
- usage('error: unknown command "%s"' % subcmd_name)
+ usage('Error: Unknown command "%s"!' % subcmd_name)
already_fixed = atoi(os.environ.get('BUP_FORCE_TTY'))
if subcmd_name in ['mux', 'ftp', 'help']:
forward_signals = True
def handler(signum, frame):
- debug1('bup: Signal %d received.' % signum)
+ debug1('bup: Signal %d received!' % signum)
if not p or not forward_signals:
return
if signum != signal.SIGTSTP: