1842 lines
62 KiB
Python
Executable File
1842 lines
62 KiB
Python
Executable File
#!/usr/bin/env python
|
|
|
|
# Josh's SVN wrapper script
|
|
#
|
|
# Please view the README file from the jsvn repository or online at
|
|
# https://github.com/holtrop/jsvn
|
|
|
|
import sys
|
|
import os
|
|
import re
|
|
import time
|
|
from subprocess import *
|
|
import traceback
|
|
import datetime
|
|
import types
|
|
import getopt
|
|
import signal
|
|
import platform
|
|
import tempfile
|
|
import shutil
|
|
|
|
STATUS_LINE_REGEX = r'[ACDIMRX?!~ ][CM ][L ][+ ][SX ][KOTB ]..(.+)'
|
|
|
|
###########################################################################
|
|
# Subcommand Handler Return Values #
|
|
###########################################################################
|
|
RET_OK = 0
|
|
RET_ERR = 1
|
|
RET_REEXEC = 2
|
|
|
|
###########################################################################
|
|
# ANSI escape color code values #
|
|
###########################################################################
|
|
COLORS = {
|
|
'black': 0,
|
|
'red': 1,
|
|
'green': 2,
|
|
'yellow': 3,
|
|
'blue': 4,
|
|
'magenta': 5,
|
|
'cyan': 6,
|
|
'white': 7,
|
|
}
|
|
using_color = False
|
|
|
|
###########################################################################
|
|
# Configuration #
|
|
###########################################################################
|
|
def read_config_file(config, path):
|
|
if os.path.exists(path):
|
|
fh = open(path, 'r')
|
|
script = fh.read()
|
|
fh.close()
|
|
try:
|
|
exec(script, config)
|
|
except:
|
|
sys.stderr.write('Configuration file error in "%s":\n' % path)
|
|
traceback.print_exception(sys.exc_info()[0], sys.exc_info()[1],
|
|
None)
|
|
tb = traceback.extract_tb(sys.exc_info()[2])
|
|
for ent in tb[1:]:
|
|
lineno, fn = ent[1:3]
|
|
sys.stderr.write(' File "%s", line %d, in %s\n'
|
|
% (path, lineno, fn))
|
|
|
|
def get_config(svn):
|
|
config = {
|
|
'pager': '',
|
|
'use_pager': True,
|
|
'use_color': True,
|
|
'aliases': {
|
|
# default jsvn aliases
|
|
'tags': 'tag',
|
|
'branches': 'branch'},
|
|
'svn': '',
|
|
'ignore_executable_extensions':
|
|
['.c', '.cc', '.h', '.txt'],
|
|
'stash_externals': False,
|
|
}
|
|
|
|
global_user_config_fname = os.path.expanduser('~/.jsvn')
|
|
read_config_file(config, global_user_config_fname)
|
|
|
|
wc_user_config_fname = get_svn_wc_root(svn) + '/.svn/jsvn'
|
|
read_config_file(config, wc_user_config_fname)
|
|
|
|
return config
|
|
|
|
###########################################################################
|
|
# Utility Functions #
|
|
###########################################################################
|
|
class LogEntry(object):
|
|
def __init__(self, fd):
|
|
self.revision = 0
|
|
self.user = ''
|
|
self.date = ''
|
|
self.lines = ''
|
|
self.message_lines = 0
|
|
self.changed_paths = []
|
|
self.message = []
|
|
self.diffs = []
|
|
self.length = 0
|
|
self.eof = True
|
|
|
|
mode = 'normal'
|
|
for line in iter(fd.readline, ''):
|
|
line = line.rstrip()
|
|
if mode == 'normal' and re.match(r'r\d+\s+\|', line):
|
|
parts = map(lambda x: x.strip(), line.split('|'))
|
|
if len(parts) == 4:
|
|
self.revision = int(parts[0][1:])
|
|
self.user = parts[1]
|
|
self.date = parts[2]
|
|
self.lines_text = parts[3]
|
|
m = re.match('(\d+)\sline', self.lines_text)
|
|
if m is not None:
|
|
self.message_lines = int(m.group(1))
|
|
elif mode == 'normal' and re.match(r'Changed.paths:', line):
|
|
self.changed_paths.append(line)
|
|
mode = 'cp'
|
|
elif re.match(r'-{72}', line):
|
|
if self.length != 0:
|
|
self.eof = False
|
|
break
|
|
self.length -= 1
|
|
elif re.match(r'Index:\s', line):
|
|
self.diffs.append([line])
|
|
mode = 'diff'
|
|
elif mode == 'diff':
|
|
self.diffs[-1].append(line)
|
|
else:
|
|
self.changed_paths.append(line)
|
|
self.length += 1
|
|
if (len(self.changed_paths) > 0 and len(self.diffs) > 0
|
|
and self.changed_paths[-1] == ''):
|
|
self.changed_paths = self.changed_paths[0:len(self.changed_paths)-1]
|
|
if len(self.changed_paths) >= self.message_lines:
|
|
self.message = self.changed_paths[-self.message_lines:]
|
|
self.changed_paths = self.changed_paths[0:-self.message_lines]
|
|
def __len__(self):
|
|
return self.length
|
|
def display(self, out, pretty):
|
|
if pretty == 'default':
|
|
ansi_color(out, 'blue', bold=True)
|
|
out.write('r%d' % self.revision)
|
|
ansi_reset(out)
|
|
out.write(' | ')
|
|
ansi_color(out, 'cyan')
|
|
out.write(self.user)
|
|
ansi_reset(out)
|
|
out.write(' | ')
|
|
ansi_color(out, 'magenta')
|
|
out.write(self.date)
|
|
ansi_reset(out)
|
|
out.write(' | ')
|
|
out.write(self.lines_text)
|
|
out.write('\n')
|
|
for cp in self.changed_paths:
|
|
if re.match(r' [ADM] /', cp):
|
|
action = cp[3]
|
|
if action == 'A':
|
|
ansi_color(out, 'green')
|
|
elif action == 'D':
|
|
ansi_color(out, 'red')
|
|
elif action == 'M':
|
|
ansi_color(out, 'yellow')
|
|
out.write(cp)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
for ml in self.message:
|
|
out.write(ml)
|
|
out.write('\n')
|
|
for d in self.diffs:
|
|
out.write('\n')
|
|
for i in range(2):
|
|
ansi_color(out, 'yellow')
|
|
out.write(d[i])
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
for l in d[2:]:
|
|
colordiff(out, l)
|
|
elif pretty == 'oneline':
|
|
columns = get_terminal_size()[0]
|
|
len_written = 0
|
|
def write(msg):
|
|
out.write(msg)
|
|
return len(msg)
|
|
|
|
ansi_color(out, 'blue', bold=True)
|
|
len_written += write('%d' % self.revision)
|
|
ansi_reset(out)
|
|
|
|
len_written += write(' ')
|
|
ansi_color(out, 'cyan')
|
|
len_written += write(self.user)
|
|
ansi_reset(out)
|
|
|
|
m = re.match('\d{4}-(\d{2}-\d{2}\s\d{2}:\d{2})', self.date)
|
|
if m is not None:
|
|
len_written += write(' ')
|
|
ansi_color(out, 'magenta')
|
|
len_written += write(m.group(1))
|
|
ansi_reset(out)
|
|
|
|
if len(self.message) > 0:
|
|
len_written += write(' ')
|
|
len_remaining = columns - len_written
|
|
if len_remaining > 0:
|
|
msg = '...'.join(filter(lambda x: x != '', self.message))
|
|
msg = msg[:len_remaining]
|
|
write(msg)
|
|
|
|
out.write('\n')
|
|
|
|
# From http://stackoverflow.com/questions/566746/
|
|
# how-to-get-console-window-width-in-python
|
|
def get_terminal_size():
|
|
import os
|
|
env = os.environ
|
|
def ioctl_GWINSZ(fd):
|
|
try:
|
|
import fcntl, termios, struct, os
|
|
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
|
|
except:
|
|
return None
|
|
return cr
|
|
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
|
if not cr:
|
|
try:
|
|
fd = os.open(os.ctermid(), os.O_RDONLY)
|
|
cr = ioctl_GWINSZ(fd)
|
|
os.close(fd)
|
|
except:
|
|
pass
|
|
if not cr:
|
|
try:
|
|
cr = (env['LINES'], env['COLUMNS'])
|
|
except:
|
|
cr = (25, 80)
|
|
return int(cr[1]), int(cr[0])
|
|
|
|
def ansi_color(out, fg=None, bg=None, bold=False):
|
|
if using_color:
|
|
bc = 1 if bold else 0
|
|
if fg is not None:
|
|
out.write('\033[%d;%dm' % (bc, 30 + COLORS[fg]))
|
|
if bg is not None:
|
|
out.write('\033[%d;%dm' % (bc, 40 + COLORS[bg]))
|
|
|
|
def ansi_reset(out):
|
|
if using_color:
|
|
out.write('\033[0m')
|
|
|
|
def colordiff(out, line):
|
|
line = line.rstrip()
|
|
if re.match(r'Index:\s', line):
|
|
ansi_color(out, 'yellow')
|
|
out.write(line)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
return
|
|
if re.match(r'={67}', line):
|
|
ansi_color(out, 'yellow')
|
|
out.write(line)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
return
|
|
if re.match(r'-', line):
|
|
ansi_color(out, 'red')
|
|
out.write(line)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
return
|
|
elif re.match(r'\+', line):
|
|
ansi_color(out, 'green')
|
|
out.write(line)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
return
|
|
m = re.match(r'(@@.*@@)(.*)', line)
|
|
if m is None:
|
|
m = re.match(r'(##.*##)(.*)', line)
|
|
if m is not None:
|
|
ansi_color(out, 'cyan')
|
|
out.write(m.group(1))
|
|
ansi_reset(out)
|
|
out.write(m.group(2))
|
|
out.write('\n')
|
|
return
|
|
out.write(line)
|
|
out.write('\n')
|
|
|
|
def find_in_path(cmd):
|
|
path_entries = os.environ['PATH'].split(os.pathsep)
|
|
for p in path_entries:
|
|
full_path = os.path.join(p, cmd)
|
|
if os.path.exists(full_path):
|
|
return full_path
|
|
return ''
|
|
|
|
def get_svn_url(svn, path='.'):
|
|
for line in Popen([svn, 'info', path], stdout=PIPE).communicate()[0].split('\n'):
|
|
m = re.match(r'^URL:\s*(.*?)\s*$', line)
|
|
if m is not None:
|
|
return m.group(1)
|
|
return ''
|
|
|
|
def get_svn_root_url(svn):
|
|
url = get_svn_url(svn)
|
|
parts = url.split('/')
|
|
for i in range(0, len(parts)):
|
|
if parts[i] in ('trunk', 'tags', 'branches'):
|
|
return '/'.join(parts[:i])
|
|
return ''
|
|
|
|
def get_svn_repo_real_root_url(svn):
|
|
proc = Popen([svn, 'info'], stdout=PIPE, stderr=PIPE)
|
|
for line in proc.communicate()[0].split('\n'):
|
|
m = re.match(r'Repository Root: (.*)$', line)
|
|
if m is not None:
|
|
return m.group(1)
|
|
return ''
|
|
|
|
def get_svn_wc_root(svn):
|
|
proc = Popen([svn, 'info'], stdout=PIPE, stderr=PIPE)
|
|
for line in proc.communicate()[0].split('\n'):
|
|
m = re.match(r'Working Copy Root Path: (.*)$', line)
|
|
if m is not None:
|
|
return m.group(1)
|
|
return ''
|
|
|
|
def get_svn_wc_revision(svn):
|
|
for line in Popen([svn, 'info'], stdout=PIPE).communicate()[0].split('\n'):
|
|
m = re.match(r'Revision: (\d+)$', line)
|
|
if m is not None:
|
|
return int(m.group(1))
|
|
return 0
|
|
|
|
def get_svn_rel_path(svn):
|
|
url = get_svn_url(svn)
|
|
parts = url.split('/')
|
|
for i in range(0, len(parts) - 1):
|
|
if parts[i] == 'trunk' or i > 0 and parts[i-1] in ('tags', 'branches'):
|
|
return '/' + '/'.join(parts[i+1:])
|
|
return '/'
|
|
|
|
def get_svn_top_level(svn):
|
|
url = get_svn_url(svn)
|
|
parts = url.split('/')
|
|
for i in range(0, len(parts)):
|
|
if parts[i] == 'trunk' or i > 0 and parts[i-1] in ('tags', 'branches'):
|
|
return '/'.join(parts[:i+1])
|
|
return ''
|
|
|
|
def get_svn_branch_list(svn):
|
|
colist = []
|
|
root = get_svn_root_url(svn)
|
|
lines = Popen([svn, 'ls', root + '/branches'],
|
|
stdout=PIPE, stderr=PIPE).communicate()[0].split('\n')
|
|
for line in lines:
|
|
if re.match(r'^\s*$', line) is None:
|
|
colist.append(re.sub(r'/$', '', line))
|
|
return colist
|
|
|
|
def get_svn_tag_list(svn):
|
|
colist = []
|
|
root = get_svn_root_url(svn)
|
|
lines = Popen([svn, 'ls', root + '/tags'],
|
|
stdout=PIPE, stderr=PIPE).communicate()[0].split('\n')
|
|
for line in lines:
|
|
if re.match(r'^\s*$', line) is None:
|
|
colist.append(re.sub(r'/$', '', line))
|
|
return colist
|
|
|
|
def is_file_under_vc(svn, path):
|
|
p = Popen([svn, 'info', path], stdout=PIPE, stderr=PIPE)
|
|
under_vc = False
|
|
for line in iter(p.stdout.readline, ''):
|
|
if re.match('Path:', line):
|
|
under_vc = True
|
|
return under_vc
|
|
|
|
def get_svn_property(svn, prop, path):
|
|
return Popen([svn, 'propget', prop, path], stdout=PIPE).communicate()[0]
|
|
|
|
def set_svn_property(svn, prop, val, path):
|
|
Popen([svn, 'propset', prop, val, path], stdout=PIPE).wait()
|
|
|
|
def del_svn_property(svn, prop, path):
|
|
Popen([svn, 'propdel', prop, path], stdout=PIPE).wait()
|
|
|
|
def filter_update(pout, out):
|
|
external = ''
|
|
external_printed = True
|
|
any_external_printed = False
|
|
for line in iter(pout.readline, ''):
|
|
m = re.match(r"Fetching external item into '(.*)':", line)
|
|
if m is not None:
|
|
external = m.group(1)
|
|
external_printed = False
|
|
continue
|
|
if re.match(r'\s*$', line):
|
|
continue
|
|
if re.match(r'External at revision ', line):
|
|
if external_printed:
|
|
out.write(line)
|
|
continue
|
|
if re.match(r'(Updated.to|At) revision', line):
|
|
if any_external_printed:
|
|
out.write('\n')
|
|
out.write(line)
|
|
continue
|
|
|
|
# anything not matched yet will cause an external to be shown
|
|
if not external_printed:
|
|
out.write("\nExternal '%s':\n" % external)
|
|
external_printed = True
|
|
any_external_printed = True
|
|
if re.match(r'[ADUCGER ]{2}[B ][C ] ', line):
|
|
action = line[0]
|
|
prop_action = line[1]
|
|
if action == 'A':
|
|
ansi_color(out, 'green')
|
|
elif action == 'D':
|
|
ansi_color(out, 'red')
|
|
elif action == 'U':
|
|
ansi_color(out, 'cyan')
|
|
elif action == 'C':
|
|
ansi_color(out, 'yellow')
|
|
elif action == 'G':
|
|
ansi_color(out, 'magenta')
|
|
elif action == ' ':
|
|
if prop_action == 'U':
|
|
ansi_color(out, 'cyan')
|
|
out.write(line.rstrip())
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
continue
|
|
out.write(line)
|
|
|
|
def filter_status(line, out):
|
|
line = line.rstrip()
|
|
action = line[0]
|
|
prop_action = line[1]
|
|
if action == 'A':
|
|
ansi_color(out, 'green')
|
|
elif action == 'M':
|
|
ansi_color(out, 'cyan')
|
|
elif action == 'C':
|
|
ansi_color(out, 'yellow')
|
|
elif action == 'D':
|
|
ansi_color(out, 'red')
|
|
elif action == 'R':
|
|
ansi_color(out, 'magenta')
|
|
elif action == ' ' and prop_action == 'M':
|
|
ansi_color(out, 'cyan')
|
|
out.write(line)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
|
|
def get_unknowns(svn):
|
|
unknowns = []
|
|
pout = Popen([svn, 'status'], stdout=PIPE).stdout
|
|
for line in iter(pout.readline, ''):
|
|
m = re.match(r'\? (.*)$', line)
|
|
if m is not None:
|
|
unknowns.append(m.group(1))
|
|
return unknowns
|
|
|
|
def descendant_path(child, parent):
|
|
if parent == '.':
|
|
parent = os.getcwd()
|
|
if child[0] != '/' and parent[0] == '/':
|
|
child = os.getcwd() + '/' + child
|
|
elif child[0] == '/' and parent[0] != '/':
|
|
parent = os.getcwd() + '/' + parent
|
|
if child == parent:
|
|
return True
|
|
if child.startswith(parent):
|
|
if child[len(parent)] == '/':
|
|
return True
|
|
return False
|
|
|
|
def get_stashes_dir(svn):
|
|
stashes_dir = get_svn_wc_root(svn) + '/.svn/stashes'
|
|
if not os.path.isdir(stashes_dir):
|
|
os.mkdir(stashes_dir)
|
|
return stashes_dir
|
|
|
|
def get_stash_ids(svn):
|
|
stashes_dir = get_stashes_dir(svn)
|
|
stash_files = os.listdir(stashes_dir)
|
|
stash_ids = {}
|
|
for sf in stash_files:
|
|
m = re.match('stash\.(\d+)$', sf)
|
|
if m is not None:
|
|
stash_ids[int(m.group(1))] = 1
|
|
return sorted(stash_ids.keys())
|
|
|
|
def get_stash_fname(svn, idx):
|
|
return get_stashes_dir(svn) + '/stash.%d' % idx
|
|
|
|
def get_next_stash_idx(svn):
|
|
stash_ids = get_stash_ids(svn)
|
|
idx = 1
|
|
if len(stash_ids) > 0:
|
|
idx = stash_ids[-1] + 1
|
|
return idx
|
|
|
|
# return (URL, path) tuple containing the full URL to the ref and the
|
|
# repo-relative path (ex: /branches/XXX)
|
|
# if ref is an actual file, URL is returned as an empty string
|
|
# tags resolve before branches
|
|
def resolve_reference(svn, ref):
|
|
def get_url_and_path_from_local_path(local_path):
|
|
root = get_svn_root_url(svn)
|
|
real_root = get_svn_repo_real_root_url(svn)
|
|
url = root + local_path
|
|
path = url[len(real_root):]
|
|
return (url, path)
|
|
if os.path.exists(ref):
|
|
return ('', ref)
|
|
if ref == 'trunk':
|
|
return get_url_and_path_from_local_path('/trunk')
|
|
tl = get_svn_tag_list(svn)
|
|
if ref in tl:
|
|
return get_url_and_path_from_local_path('/tags/' + ref)
|
|
bl = get_svn_branch_list(svn)
|
|
if ref in bl:
|
|
return get_url_and_path_from_local_path('/branches/' + ref)
|
|
# ref was not an actual file, 'trunk', a tag name, or a branch name
|
|
return ('', ref)
|
|
|
|
def find_branched_revision(svn, branch_url, branch_path, base_path):
|
|
p = Popen([svn, 'log', '-v', branch_url], stdout=PIPE)
|
|
search_path = branch_path
|
|
for line in iter(p.stdout.readline, ''):
|
|
m = re.match('\s+A\s+(.*?)\s+\(from\s+(.*?):(\d+)\)\s*$', line)
|
|
if m is not None:
|
|
new_path, old_path, rev = m.group(1, 2, 3)
|
|
if new_path == search_path:
|
|
if old_path == base_path:
|
|
try:
|
|
p.kill()
|
|
except OSError:
|
|
pass
|
|
return (int(rev), old_path)
|
|
search_path = old_path
|
|
return (-1, '')
|
|
|
|
def find_common_ancestor(svn, url1, path1, url2, path2):
|
|
r, path = find_branched_revision(svn, url1, path1, path2)
|
|
if r < 0:
|
|
r, path = find_branched_revision(svn, url2, path2, path1)
|
|
return r, path
|
|
|
|
def filter_add_output(fh, out, svn, config):
|
|
for line in iter(fh.readline, ''):
|
|
line = line.strip()
|
|
m = re.match('A\s+(.*)$', line)
|
|
if m is not None:
|
|
path = m.group(1)
|
|
for ext in config['ignore_executable_extensions']:
|
|
if path.endswith(ext):
|
|
del_svn_property(svn, 'svn:executable', path)
|
|
out.write(line)
|
|
out.write('\n')
|
|
|
|
def relpath(path):
|
|
cwdprefix = os.getcwd() + '/'
|
|
if path.startswith(cwdprefix):
|
|
return path[len(cwdprefix):]
|
|
return path
|
|
|
|
###########################################################################
|
|
# Subcommand Handlers #
|
|
###########################################################################
|
|
def add_h(argv, svn, out, config):
|
|
argv = argv[1:] # strip off 'add'
|
|
if len(argv) == 0:
|
|
# do not handle if no targets are passed
|
|
# no output filtering needed since nothing will be added
|
|
return RET_REEXEC
|
|
if len(filter(lambda x: x.startswith('-'), argv)) != 0:
|
|
# do not handle if any options are passed
|
|
p = Popen([svn, 'add'] + argv, stdout=PIPE)
|
|
filter_add_output(p.stdout, out, svn, config)
|
|
return RET_OK
|
|
# for each target specified, check if there are unversioned items
|
|
# underneath it (for directories) and add them as well
|
|
# if none are found, fall back to the native svn add
|
|
unknowns = get_unknowns(svn)
|
|
for path in argv:
|
|
if path == '.':
|
|
path = os.getcwd()
|
|
if path.endswith('/'):
|
|
path = path[:-1]
|
|
found_one = False
|
|
for u in unknowns:
|
|
if descendant_path(u, path):
|
|
p = Popen([svn, 'add', u], stdout=PIPE)
|
|
filter_add_output(p.stdout, out, svn, config)
|
|
found_one = True
|
|
if not found_one:
|
|
p = Popen([svn, 'add', path], stdout=PIPE)
|
|
filter_add_output(p.stdout, out, svn, config)
|
|
return RET_OK
|
|
|
|
def bisect_h(argv, svn, out, config):
|
|
def usage():
|
|
sys.stderr.write('''Usage: bisect <operation>
|
|
Operations:
|
|
init initialize a new bisect operation
|
|
bad mark the current revision as bad - containing the change sought
|
|
good mark the current revision as good - older than the change sought
|
|
reset terminate the bisect operation and return to the original revision
|
|
''')
|
|
return RET_ERR
|
|
if len(argv) < 2:
|
|
return usage()
|
|
action = argv[1]
|
|
if action not in ('init', 'bad', 'good', 'reset'):
|
|
return usage()
|
|
wc_root = get_svn_wc_root(svn)
|
|
bisect_dir = '%s/.svn/bisect' % wc_root
|
|
def get_rev_from_file(fname):
|
|
path = '%s/%s' % (bisect_dir, fname)
|
|
if not os.path.exists(path):
|
|
return -1
|
|
fh = open(path, 'r')
|
|
line = fh.readline()
|
|
fh.close()
|
|
m = re.match('(\d+)$', line)
|
|
if m is None:
|
|
return -2
|
|
return int(m.group(1))
|
|
def write_rev_to_file(fname, rev):
|
|
fh = open('%s/%s' % (bisect_dir, fname), 'w')
|
|
fh.write(str(rev) + '\n')
|
|
fh.close()
|
|
def rm_bisect_files():
|
|
for f in os.listdir(bisect_dir):
|
|
os.unlink('%s/%s' % (bisect_dir, f))
|
|
def get_revs_between(start, end):
|
|
revs = []
|
|
proc = Popen([svn, 'log', '-r%d:%d' % (start, end)], stdout=PIPE)
|
|
for line in iter(proc.stdout.readline, ''):
|
|
m = re.match(r'r(\d+).*\|.*\|.*\|', line)
|
|
if m is not None:
|
|
rev = int(m.group(1))
|
|
if rev > start and rev < end:
|
|
revs.append(rev)
|
|
return revs
|
|
def do_bisect():
|
|
good_rev = get_rev_from_file('good')
|
|
bad_rev = get_rev_from_file('bad')
|
|
if good_rev < 0 or bad_rev < 0:
|
|
return
|
|
revs = get_revs_between(good_rev, bad_rev)
|
|
if len(revs) < 1:
|
|
if get_svn_wc_revision(svn) != bad_rev:
|
|
update_h(['update', '-r%d' % bad_rev], svn, out, config)
|
|
out.write('The first bad revision is %d\n' % get_svn_wc_revision(svn))
|
|
return
|
|
rev = revs[len(revs) / 2]
|
|
update_h(['update', '-r%d' % rev], svn, out, config)
|
|
out.write('Bisect: inspecting revision %d, %d revisions remaining\n'
|
|
% (rev, len(revs)))
|
|
def init_err():
|
|
sys.stderr.write('Error: did you bisect init first?\n')
|
|
return RET_ERR
|
|
if action == 'init':
|
|
if not os.path.exists(bisect_dir):
|
|
os.mkdir(bisect_dir)
|
|
rm_bisect_files()
|
|
write_rev_to_file('start', get_svn_wc_revision(svn))
|
|
out.write('Initialized for bisect\n')
|
|
elif action == 'bad':
|
|
if get_rev_from_file('start') < 0:
|
|
return init_err()
|
|
write_rev_to_file('bad', get_svn_wc_revision(svn))
|
|
do_bisect()
|
|
elif action == 'good':
|
|
if get_rev_from_file('start') < 0:
|
|
return init_err()
|
|
write_rev_to_file('good', get_svn_wc_revision(svn))
|
|
do_bisect()
|
|
elif action == 'reset':
|
|
rev = get_rev_from_file('start')
|
|
if rev < 0:
|
|
return init_err()
|
|
rm_bisect_files()
|
|
update_h(['update', '-r%d' % rev], svn, out, config)
|
|
return RET_OK
|
|
|
|
# branch # list branches
|
|
# branch -d name # delete branch <name>
|
|
# branch [-s] name [source[@rev]] # create branch <name> [from <source> [at revision <rev>]]
|
|
def branch_h(argv, svn, out, config):
|
|
argv = argv[1:] # strip 'branch' command
|
|
options, args = getopt.getopt(argv, 'ds')
|
|
origin = get_svn_top_level(svn)
|
|
root = get_svn_root_url(svn)
|
|
if origin == '' or root == '':
|
|
sys.stderr.write("Could not determine origin/root URL\n")
|
|
return RET_ERR
|
|
do_switch = False
|
|
for opt, val in options:
|
|
if opt == '-d':
|
|
# delete branch
|
|
if len(args) < 1:
|
|
sys.stderr.write('Must supply branch name\n')
|
|
return RET_ERR
|
|
Popen([svn, 'rm', root + '/branches/' + args[0], '-m',
|
|
"Removed branch '%s'" % args[0]], stdout=out).wait()
|
|
return RET_OK
|
|
elif opt == '-s':
|
|
do_switch = True
|
|
if len(args) == 0:
|
|
bl = ['trunk'] + get_svn_branch_list(svn)
|
|
current = origin.split('/')[-1]
|
|
bl.sort()
|
|
for b in bl:
|
|
if b == current:
|
|
out.write('*')
|
|
ansi_color(out, 'green')
|
|
else:
|
|
out.write(' ')
|
|
out.write(b)
|
|
if b == current:
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
return RET_OK
|
|
branch_name = args[0]
|
|
bl = get_svn_branch_list(svn)
|
|
if branch_name in bl:
|
|
sys.stderr.write('Error: branch %s already exists\n' % branch_name)
|
|
return RET_ERR
|
|
comment = "Created '%s' branch" % branch_name
|
|
branch_path = root + '/branches/' + branch_name
|
|
branch_source = origin
|
|
if len(args) >= 2:
|
|
source = args[1]
|
|
m = re.match(r'(.*?)(@\d+)?$', source)
|
|
if m is not None:
|
|
ref_name, rev_str = m.group(1, 2)
|
|
url, path = resolve_reference(svn, ref_name)
|
|
if url != '':
|
|
branch_source = url
|
|
if rev_str is not None:
|
|
branch_source += rev_str
|
|
else:
|
|
branch_source = source
|
|
Popen([svn, 'copy', branch_source, branch_path, '-m', comment], stdout=out).wait()
|
|
if do_switch:
|
|
return switch_h(['switch', branch_name], svn, out, config)
|
|
return RET_OK
|
|
|
|
# tag -m old new # rename tag <old> to <new>
|
|
# tag -d name # delete tag <name>
|
|
# tag # list all tags
|
|
# tag name [source[@rev]] # create tag <name> [from source [at revision rev]]
|
|
def tag_h(argv, svn, out, config):
|
|
argv = argv[1:] # strip command
|
|
options, args = getopt.getopt(argv, 'dmv')
|
|
origin = get_svn_top_level(svn)
|
|
root = get_svn_root_url(svn)
|
|
verbose = False
|
|
if origin == '' or root == '':
|
|
sys.stderr.write("Could not determine origin/root URL\n")
|
|
return RET_ERR
|
|
tag_list = get_svn_tag_list(svn)
|
|
for opt, val in options:
|
|
if opt == '-d':
|
|
if len(args) != 1:
|
|
sys.stderr.write('Error: must supply tag name\n')
|
|
return RET_ERR
|
|
tag_name = args[0]
|
|
if not tag_name in tag_list:
|
|
sys.stderr.write('Tag %s not found!\n' % tag_name)
|
|
return RET_ERR
|
|
# delete tag
|
|
Popen([svn, 'rm', root + '/tags/' + tag_name, '-m',
|
|
"Removed tag '%s'" % tag_name], stdout=out).wait()
|
|
return RET_OK
|
|
elif opt == '-m':
|
|
if len(args) != 2:
|
|
sys.stderr.write('Error: must supply old and new tag names\n')
|
|
return RET_ERR
|
|
old_tag_name, tag_name = args
|
|
if not old_tag_name in tag_list:
|
|
sys.stderr.write('Tag %s not found!\n' % old_tag_name)
|
|
return RET_ERR
|
|
Popen([svn, 'mv',
|
|
root + '/tags/' + old_tag_name, root + '/tags/' + tag_name,
|
|
'-m', "Renamed tag '%s' to '%s'" % (old_tag_name, tag_name)],
|
|
stdout=out).wait()
|
|
return RET_OK
|
|
elif opt == '-v':
|
|
verbose = True
|
|
else:
|
|
sys.stderr.write('Unrecognized option to "tag" command\n')
|
|
return RET_ERR
|
|
if len(args) == 0:
|
|
tag_list.sort()
|
|
max_tagname_length = 0
|
|
if verbose:
|
|
for t in tag_list:
|
|
if len(t) > max_tagname_length:
|
|
max_tagname_length = len(t)
|
|
for t in tag_list:
|
|
if verbose:
|
|
out.write(('%%-%ds' % max_tagname_length) % t)
|
|
date = ''
|
|
origin = ''
|
|
rev = ''
|
|
pout = Popen([svn, 'log', '-v', '--stop-on-copy', '%s/tags/%s' % (root, t)],
|
|
stdout=PIPE).stdout
|
|
for line in iter(pout.readline, ''):
|
|
m = re.match(r'r\d+\s*\|[^|]+\|\s*(\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2})', line)
|
|
if m is not None:
|
|
date = m.group(1)
|
|
m = re.match(r'\s\s\sA\s.*\/tags\/%s\s\(from\s(.*):(\d+)' % t, line)
|
|
if m is not None:
|
|
origin = re.sub(r'.*\/(trunk|tags|branches)\b', r'\1', m.group(1))
|
|
rev = m.group(2)
|
|
if date != '':
|
|
out.write(' ')
|
|
ansi_color(out, 'magenta')
|
|
out.write(date)
|
|
ansi_reset(out)
|
|
if origin != '':
|
|
out.write(' ')
|
|
ansi_color(out, 'yellow')
|
|
out.write(origin)
|
|
ansi_reset(out)
|
|
ansi_color(out, 'blue', bold=True)
|
|
out.write('@' + rev)
|
|
ansi_reset(out)
|
|
else:
|
|
out.write(t)
|
|
out.write('\n')
|
|
return RET_OK
|
|
tag_name = args[0]
|
|
if tag_name in tag_list:
|
|
sys.stderr.write('Error: tag "%s" already exists\n' % tag_name)
|
|
return RET_ERR
|
|
tag_source = origin
|
|
if len(args) >= 2:
|
|
source = args[1]
|
|
m = re.match(r'(.*?)(@\d+)?$', source)
|
|
if m is not None:
|
|
ref_name, rev_str = m.group(1, 2)
|
|
url, path = resolve_reference(svn, ref_name)
|
|
if url != '':
|
|
tag_source = url
|
|
if rev_str is not None:
|
|
tag_source += rev_str
|
|
else:
|
|
tag_source = source
|
|
Popen([svn, 'copy', tag_source,
|
|
root + '/tags/' + tag_name,
|
|
'-m', "Created '%s' tag" % tag_name], stdout=out).wait()
|
|
return RET_OK
|
|
|
|
def switch_h(argv, svn, out, config):
|
|
if len(argv) < 2:
|
|
return RET_REEXEC
|
|
switched = False
|
|
root = get_svn_root_url(svn)
|
|
path = get_svn_rel_path(svn)
|
|
while True:
|
|
if argv[1] == 'trunk':
|
|
pout = Popen([svn, 'switch', root + '/trunk' + path],
|
|
stdout=PIPE).stdout
|
|
filter_update(pout, out)
|
|
switched = True
|
|
break
|
|
bl = get_svn_branch_list(svn)
|
|
if argv[1] in bl:
|
|
pout = Popen([svn, 'switch', root + '/branches/' + argv[1] + path],
|
|
stdout=PIPE).stdout
|
|
filter_update(pout, out)
|
|
switched = True
|
|
break
|
|
tl = get_svn_tag_list(svn)
|
|
if argv[1] in tl:
|
|
pout = Popen([svn, 'switch', root + '/tags/' + argv[1] + path],
|
|
stdout=PIPE).stdout
|
|
filter_update(pout, out)
|
|
switched = True
|
|
break
|
|
# argument is not a tag/branch name
|
|
break
|
|
if switched:
|
|
url = get_svn_url(svn)
|
|
out.write('URL: %s\n' % url)
|
|
return RET_OK
|
|
pout = Popen([svn] + argv, stdout=PIPE).stdout
|
|
filter_update(pout, out)
|
|
return RET_OK
|
|
|
|
def merge_h(argv, svn, out, config):
|
|
if len(argv) < 2:
|
|
return RET_REEXEC
|
|
root = get_svn_root_url(svn)
|
|
branches = get_svn_branch_list(svn)
|
|
if not argv[1] in branches:
|
|
return RET_REEXEC
|
|
lines = Popen([svn, 'log', '--stop-on-copy', root + '/branches/' + argv[1]],
|
|
stdout=PIPE).communicate()[0].split('\n')
|
|
rev = 0
|
|
for line in lines:
|
|
m = re.match(r'^r(\d+)\s', line)
|
|
if m is not None:
|
|
rev = m.group(1)
|
|
if rev == 0:
|
|
sys.stderr.write('Could not get first branch revision\n')
|
|
return RET_ERR
|
|
path = get_svn_rel_path(svn)
|
|
Popen([svn, 'merge', '-r%s:HEAD' % rev,
|
|
root + '/branches/' + argv[1] + path, '.'], stdout=out).wait()
|
|
return RET_OK
|
|
|
|
def watch_lock_h(argv, svn, out, config):
|
|
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
|
if len(argv) < 2:
|
|
return RET_ERR
|
|
path = argv[1]
|
|
if os.path.exists(path):
|
|
# Get the repository URL of the file being watched
|
|
p = Popen([svn, 'info', path], stdout=PIPE)
|
|
lines = p.communicate()[0].split('\n')
|
|
for line in lines:
|
|
m = re.match(r'URL: (.*)', line)
|
|
if m is not None:
|
|
path = m.group(1)
|
|
break
|
|
|
|
last_lock_owner = ''
|
|
while True:
|
|
lock_owner = ''
|
|
p = Popen([svn, 'info', path], stdout=PIPE)
|
|
lines = p.communicate()[0].split('\n')
|
|
for line in lines:
|
|
m = re.match(r'Lock\sOwner:\s*(.*)', line)
|
|
if m is not None:
|
|
lock_owner = m.group(1)
|
|
break
|
|
if lock_owner == '':
|
|
break
|
|
if lock_owner != last_lock_owner:
|
|
out.write('Locked by: %s\n' % lock_owner)
|
|
last_lock_owner = lock_owner
|
|
time.sleep(60)
|
|
|
|
out.write('''
|
|
_ _ _ _ _ _
|
|
| | | |_ __ | | ___ ___| | _____ __| | |
|
|
| | | | '_ \| |/ _ \ / __| |/ / _ \/ _` | |
|
|
| |_| | | | | | (_) | (__| < __/ (_| |_|
|
|
\___/|_| |_|_|\___/ \___|_|\_\___|\__,_(_)
|
|
|
|
''')
|
|
return RET_OK
|
|
|
|
def users_h(argv, svn, out, config):
|
|
path = '.'
|
|
if len(argv) > 1:
|
|
path = argv[1]
|
|
users = {}
|
|
p = Popen([svn, 'log', '-q', path], stdout=PIPE)
|
|
for line in iter(p.stdout.readline, ''):
|
|
m = re.match('r\d+\s*\|([^|]+)\|', line)
|
|
if m is not None:
|
|
user = m.group(1).strip()
|
|
if not user.lower() in users:
|
|
users[user.lower()] = [user, 1]
|
|
else:
|
|
users[user.lower()][1] += 1
|
|
values = users.values()
|
|
values.sort(key = lambda x: x[1], reverse = True)
|
|
for v in values:
|
|
out.write("%8d %s\n" % (v[1], v[0]))
|
|
return RET_OK
|
|
|
|
def binaries_h(argv, svn, out, config, base_path = '.'):
|
|
for ent in os.listdir(base_path):
|
|
if ent in ('.', '..', '.svn'):
|
|
continue
|
|
ent_path = os.sep.join([base_path, ent])
|
|
if not is_file_under_vc(svn, ent_path):
|
|
continue
|
|
if os.path.isfile(ent_path):
|
|
mime_type = get_svn_property(svn, 'svn:mime-type', ent_path)
|
|
if mime_type != '' and not re.match(r'text/.*', mime_type):
|
|
# we found a binary file
|
|
needs_lock = get_svn_property(svn, 'svn:needs-lock', ent_path)
|
|
if needs_lock:
|
|
out.write('* ')
|
|
elif len(argv) >= 2 and argv[1] == '--set-lock':
|
|
set_svn_property(svn, 'svn:needs-lock', '*', ent_path)
|
|
out.write('S ')
|
|
else:
|
|
out.write(' ')
|
|
out.write(ent_path)
|
|
out.write('\n')
|
|
elif os.path.isdir(ent_path):
|
|
binaries_h(argv, svn, out, config, os.sep.join([base_path, ent]))
|
|
return RET_OK
|
|
|
|
def lockable_h(argv, svn, out, config):
|
|
if len(argv) >= 2 and argv[1] == '--status':
|
|
for ob in argv[2:]:
|
|
ob_path = os.sep.join([base_path, ob])
|
|
|
|
needs_lock = get_svn_property(svn, 'svn:needs-lock', ob_path)
|
|
if needs_lock:
|
|
out.write('* ')
|
|
else:
|
|
out.write(' ')
|
|
out.write(ob_path)
|
|
out.write('\n')
|
|
|
|
elif len(argv) >= 2 and argv[1] == '--remove':
|
|
for ob in argv[2:]:
|
|
ob_path = os.sep.join([base_path, ob])
|
|
del_svn_property(svn, 'svn:needs-lock', ob_path)
|
|
|
|
else:
|
|
# note this is the default assumed operation
|
|
for ob in argv[1:]:
|
|
ob_path = os.sep.join([base_path, ob])
|
|
set_svn_property(svn, 'svn:needs-lock', '*', ob_path)
|
|
return RET_OK
|
|
|
|
def diff_h(argv, svn, out, config):
|
|
doing_summarize = '--summarize' in argv
|
|
for i, v in enumerate(argv):
|
|
m = re.match('(.*?)(\.\.\.?)(.*)$', v)
|
|
if m is not None:
|
|
ref1, operator, ref2 = m.group(1, 2, 3)
|
|
url1, path1 = resolve_reference(svn, ref1)
|
|
if url1 == '':
|
|
continue
|
|
url2, path2 = resolve_reference(svn, ref2)
|
|
if url2 == '':
|
|
continue
|
|
if operator == '...':
|
|
# amend url1 to include the pegged revision from where ref2
|
|
# originally branched from it
|
|
r, path = find_common_ancestor(svn, url2, path2, url1, path1)
|
|
if r < 0:
|
|
sys.stderr.write(('Could not find revision where "%s" ' +
|
|
'branched from "%s"\n') % (ref2, ref1))
|
|
return RET_ERR
|
|
url1 = get_svn_repo_real_root_url(svn) + path + '@%d' % r
|
|
argv = argv[:i] + [url1, url2] + argv[i + 1:]
|
|
break
|
|
pout = Popen([svn] + argv, stdout=PIPE).stdout
|
|
root_url = get_svn_root_url(svn)
|
|
for line in iter(pout.readline, ''):
|
|
if doing_summarize:
|
|
line = line.replace(root_url, '')
|
|
filter_status(line, out)
|
|
else:
|
|
colordiff(out, line)
|
|
return RET_OK
|
|
|
|
def log_h(argv, svn, out, config):
|
|
filters = []
|
|
pretty = 'default'
|
|
for i, v in enumerate(argv):
|
|
m = re.match('(.*)(\.\.)(.*)$', v)
|
|
if m is not None:
|
|
ref1, operator, ref2 = m.group(1, 2, 3)
|
|
url1, path1 = resolve_reference(svn, ref1)
|
|
if url1 == '':
|
|
continue
|
|
url2, path2 = resolve_reference(svn, ref2)
|
|
if url2 == '':
|
|
continue
|
|
r, path = find_common_ancestor(svn, url2, path2, url1, path1)
|
|
if r < 0:
|
|
sys.stderr.write(('Could not find revision where "%s" ' +
|
|
'branched from "%s"\n') % (ref2, ref1))
|
|
return RET_ERR
|
|
argv = argv[:i] + ['-rHEAD:%d' % (r + 1), url2] + argv[i + 1:]
|
|
break
|
|
if not v.startswith('-'):
|
|
url, path = resolve_reference(svn, v)
|
|
if url == '':
|
|
continue
|
|
argv = argv[:i] + [url] + argv[i + 1:]
|
|
found_custom_arg = True
|
|
while found_custom_arg:
|
|
found_custom_arg = False
|
|
for i, v in enumerate(argv):
|
|
if v == '--filter':
|
|
if len(argv) < i + 2:
|
|
sys.stderr.write('Error: --filter requires argument\n')
|
|
return RET_ERR
|
|
m = re.match('(\S+)=(/?)(.*)$', argv[i + 1])
|
|
if m is None:
|
|
sys.stderr.write('Error: Incorrect format for filter argument\n')
|
|
return RET_ERR
|
|
filters.append(m.group(1, 2, 3))
|
|
found_custom_arg = True
|
|
argv = argv[:i] + argv[i+2:]
|
|
break
|
|
m = re.match('--pretty(?:=(.*))?', v)
|
|
if m is not None:
|
|
if m.group(1) is not None:
|
|
pretty = m.group(1)
|
|
argv = argv[:i] + argv[i+1:]
|
|
else:
|
|
if len(argv) < i + 2:
|
|
sys.stderr.write('Error: --pretty requires argument\n')
|
|
return RET_ERR
|
|
pretty = argv[i + 1]
|
|
argv = argv[:i] + argv[i+2:]
|
|
found_custom_arg = True
|
|
break
|
|
def filters_pass(le):
|
|
for f in filters:
|
|
keyword, op, match = f
|
|
if keyword == 'user':
|
|
target = le.user
|
|
elif keyword == 'msg':
|
|
target = ' '.join(le.message)
|
|
else:
|
|
sys.stderr.write('Unknown filter target "%s"\n' % keyword)
|
|
return False
|
|
if op == '/':
|
|
if not re.search(match, target):
|
|
return False
|
|
else:
|
|
if match != target:
|
|
return False
|
|
return True
|
|
pout = Popen([svn] + argv, stdout=PIPE).stdout
|
|
while True:
|
|
le = LogEntry(pout)
|
|
if len(le) > 0 and filters_pass(le):
|
|
if pretty == 'default':
|
|
ansi_color(out, 'yellow')
|
|
out.write('-' * 72)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
le.display(out, pretty)
|
|
if le.eof:
|
|
break
|
|
if pretty == 'default':
|
|
ansi_color(out, 'yellow')
|
|
out.write('-' * 72)
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
return RET_OK
|
|
|
|
def update_h(argv, svn, out, config):
|
|
pout = Popen([svn] + argv, stdout=PIPE).stdout
|
|
filter_update(pout, out)
|
|
return RET_OK
|
|
|
|
def status_h(argv, svn, out, config):
|
|
external = ''
|
|
external_printed = True
|
|
something_printed = False
|
|
pout = Popen([svn] + argv, stdout=PIPE).stdout
|
|
for line in iter(pout.readline, ''):
|
|
line = line.rstrip()
|
|
m = re.match(r"Performing status on external item at '(.*)':", line)
|
|
if m is not None:
|
|
external = m.group(1)
|
|
external_printed = False
|
|
continue
|
|
if re.match(r'\s*$', line):
|
|
continue
|
|
|
|
# look for lines that should be ignored
|
|
if re.match(STATUS_LINE_REGEX, line):
|
|
action = line[0]
|
|
if action == 'X':
|
|
continue # don't print directory externals
|
|
elif line.startswith(' X '):
|
|
continue # don't print unmodified file externals
|
|
|
|
# anything not matched yet will cause an external to be shown
|
|
if not external_printed:
|
|
if something_printed:
|
|
out.write('\n')
|
|
out.write("External '%s':\n" % external)
|
|
external_printed = True
|
|
|
|
# trim working directory from paths in externals
|
|
if external != '':
|
|
m = re.match(STATUS_LINE_REGEX, line)
|
|
if m is not None:
|
|
path = m.group(1)
|
|
if path.startswith(os.getcwd() + os.sep):
|
|
line = line[:8] + path[len(os.getcwd() + os.sep):]
|
|
|
|
# look for lines to highlight
|
|
if re.match(STATUS_LINE_REGEX, line):
|
|
filter_status(line, out)
|
|
something_printed = True
|
|
continue
|
|
out.write(line)
|
|
out.write('\n')
|
|
something_printed = True
|
|
return RET_OK
|
|
|
|
def externals_h(argv, svn, out, config):
|
|
pout = Popen([svn, 'status'], stdout=PIPE).stdout
|
|
for line in iter(pout.readline, ''):
|
|
if re.match(STATUS_LINE_REGEX, line):
|
|
if line[0] == 'X':
|
|
out.write(line[8:])
|
|
return RET_OK
|
|
|
|
def revert_h(argv, svn, out, config):
|
|
argv = argv[1:] # strip off command
|
|
if len(argv) == 0:
|
|
# do not handle if no targets are passed
|
|
return RET_REEXEC
|
|
if len(filter(lambda x: x.startswith('-'), argv)) != 0:
|
|
# do not handle if any options are passed
|
|
Popen([svn, 'revert'] + argv).wait()
|
|
return RET_OK
|
|
did_something = False
|
|
for i, target in enumerate(argv):
|
|
if target.endswith('/'):
|
|
argv[i] = target[:-1]
|
|
p = Popen([svn, 'status'], stdout=PIPE)
|
|
status_lines = []
|
|
for line in iter(p.stdout.readline, ''):
|
|
status_lines.append(line)
|
|
for line in reversed(status_lines):
|
|
m = re.match(STATUS_LINE_REGEX, line)
|
|
if m is not None:
|
|
action = line[0]
|
|
prop_action = line[1]
|
|
if action in ('A', 'M', 'D', '!') or prop_action == 'M':
|
|
fname = m.group(1)
|
|
for target in argv:
|
|
if fname.startswith(os.getcwd() + os.sep):
|
|
fname = fname[len(os.getcwd() + os.sep):]
|
|
if target == '.' or target == fname or fname.startswith(target + os.sep):
|
|
Popen([svn, 'revert', fname]).wait()
|
|
did_something = True
|
|
break
|
|
return RET_OK if did_something else RET_REEXEC
|
|
|
|
def get_svn_contents_to_stash(targets, svn, out, keep, patch, externals):
|
|
s_fd, s_fname = tempfile.mkstemp(prefix = 'svn.stash.')
|
|
r_fd, r_fname = tempfile.mkstemp(prefix = 'svn.stash.')
|
|
os.close(s_fd)
|
|
os.close(r_fd)
|
|
s_fh = open(s_fname, 'w')
|
|
r_fh = open(r_fname, 'w')
|
|
|
|
external_list = []
|
|
directories_added = []
|
|
status_cmd = [svn, 'status']
|
|
if not externals:
|
|
status_cmd += ['--ignore-externals']
|
|
status_proc = Popen(status_cmd, stdout=PIPE)
|
|
for line in iter(status_proc.stdout.readline, ''):
|
|
m = re.match(r"Performing status on external item at '(.*)':", line)
|
|
if m is not None:
|
|
external_list.append(m.group(1))
|
|
m = re.match(STATUS_LINE_REGEX, line)
|
|
if m is not None:
|
|
path = m.group(1)
|
|
if line[0] == 'A':
|
|
if os.path.isdir(path):
|
|
directories_added.append(relpath(path))
|
|
|
|
if externals and len(targets) == 0:
|
|
targets = ['.'] + external_list
|
|
|
|
svars = {
|
|
'revert_list': [],
|
|
'skip_all': not patch,
|
|
'skip_file': True,
|
|
'answer': 'y',
|
|
'index_header': '',
|
|
'hunk_buildup': '',
|
|
'index_fname': '',
|
|
'prompted_for_index': False,
|
|
'quit': False,
|
|
'wrote_index_sf': False,
|
|
'wrote_index_rf': False,
|
|
'binary_file': False,
|
|
'n_insertions': 0,
|
|
'n_deletions': 0,
|
|
}
|
|
def update_answer():
|
|
if not patch:
|
|
svars['answer'] = 'y'
|
|
return
|
|
if svars['skip_file'] or svars['skip_all']:
|
|
return
|
|
if not svars['prompted_for_index']:
|
|
for line in svars['index_header'].rstrip().split('\n'):
|
|
colordiff(out, line)
|
|
svars['prompted_for_index'] = True
|
|
for li in svars['hunk_buildup'].rstrip().split('\n'):
|
|
colordiff(out, li)
|
|
answer = ''
|
|
answers = ('y', 'n', 'yf', 'nf', 'ya', 'na', 'q', '?')
|
|
while answer not in answers:
|
|
ansi_color(out, 'magenta', bold=True)
|
|
out.write('Stash this hunk (%s)? ' % ','.join(answers))
|
|
ansi_reset(out)
|
|
answer = sys.stdin.readline().rstrip().lower()
|
|
if answer == '?':
|
|
out.write('''y: yes, stash this hunk
|
|
n: no, do not stash this hunk
|
|
yf: yes, and stash every hunk from the rest of this file
|
|
nf: no, and do not stash any hunk from the rest of this file
|
|
ya: yes, and stash every remaining hunk
|
|
na: no, and do not stash any remaining hunks
|
|
q: quit and abort stash
|
|
?: show this help
|
|
''')
|
|
answer = ''
|
|
if answer == 'q':
|
|
svars['quit'] = True
|
|
elif answer[1:] == 'a':
|
|
svars['skip_all'] = True
|
|
elif answer[1:] == 'f':
|
|
svars['skip_file'] = True
|
|
svars['answer'] = answer[:1]
|
|
|
|
def flush_hunk():
|
|
if svars['hunk_buildup'] != '':
|
|
update_answer()
|
|
if svars['answer'] == 'y':
|
|
if not svars['wrote_index_sf']:
|
|
s_fh.write(svars['index_header'])
|
|
svars['wrote_index_sf'] = True
|
|
s_fh.write(svars['hunk_buildup'])
|
|
elif svars['answer'] == 'n':
|
|
if not svars['wrote_index_rf']:
|
|
r_fh.write(svars['index_header'])
|
|
svars['wrote_index_rf'] = True
|
|
r_fh.write(svars['hunk_buildup'])
|
|
svars['hunk_buildup'] = ''
|
|
|
|
def flush_file(new_file_name):
|
|
if svars['binary_file']:
|
|
ansi_color(out, 'yellow', bold=True)
|
|
out.write('Warning: not stashing binary file %s' % svars['index_fname'])
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
else:
|
|
flush_hunk()
|
|
if svars['wrote_index_sf']:
|
|
svars['revert_list'].append(svars['index_fname'])
|
|
svars['skip_file'] = False
|
|
svars['hunk_buildup'] = ''
|
|
svars['index_fname'] = new_file_name
|
|
svars['binary_file'] = False
|
|
svars['wrote_index_sf'] = False
|
|
svars['wrote_index_rf'] = False
|
|
svars['prompted_for_index'] = False
|
|
|
|
diff_proc = Popen([svn, 'diff'] + targets, stdout=PIPE)
|
|
for line in iter(diff_proc.stdout.readline, ''):
|
|
m = re.match(r'Index: (.*)', line)
|
|
if m is not None:
|
|
flush_file(m.group(1))
|
|
svars['index_header'] = line
|
|
elif (re.match(r'=+$', line) or
|
|
re.match(r'--- ', line) or
|
|
re.match(r'\+\+\+ ', line)):
|
|
svars['index_header'] += line
|
|
elif (re.match(r'@@ ', line) or re.match(r'Property.changes.on:', line)):
|
|
flush_hunk()
|
|
svars['hunk_buildup'] = line
|
|
elif re.match(r'Cannot display: file.marked.as.a.binary.type', line):
|
|
svars['binary_file'] = True
|
|
else:
|
|
svars['hunk_buildup'] += line
|
|
if line.startswith('+'):
|
|
svars['n_insertions'] += 1
|
|
elif line.startswith('-'):
|
|
svars['n_deletions'] += 1
|
|
if svars['quit']:
|
|
break
|
|
if not svars['quit']:
|
|
flush_file('')
|
|
|
|
for da in directories_added:
|
|
stash_da = len(targets) == 0
|
|
for t in targets:
|
|
if descendant_path(da, t):
|
|
stash_da = True
|
|
break
|
|
if stash_da:
|
|
s_fh.write('#dir: %s\n' % da)
|
|
if not da in svars['revert_list']:
|
|
svars['revert_list'].append(da)
|
|
|
|
s_fh.close()
|
|
r_fh.close()
|
|
if svars['quit']:
|
|
svars['revert_list'] = []
|
|
|
|
return s_fname, r_fname, svars['revert_list'], svars['n_insertions'], svars['n_deletions'], directories_added
|
|
|
|
def stash_save_h(args, svn, out, config, keep, patch, externals):
|
|
owd = os.getcwd()
|
|
wc_dir = get_svn_wc_root(svn)
|
|
os.chdir(wc_dir)
|
|
s_fname, r_fname, revert_list, n_insertions, n_deletions, directories_added = \
|
|
get_svn_contents_to_stash(args, svn, out, keep, patch, externals)
|
|
if len(revert_list) == 0:
|
|
out.write('No changes stashed.\n')
|
|
else:
|
|
if not keep:
|
|
for rf in reversed(revert_list):
|
|
Popen([svn, 'revert', rf], stdout=PIPE).wait()
|
|
if rf in directories_added and len(os.listdir(rf)) == 0:
|
|
os.rmdir(rf)
|
|
if r_fname != '':
|
|
Popen([svn, 'patch', r_fname], stdout=PIPE).wait()
|
|
if s_fname != '':
|
|
stash_idx = get_next_stash_idx(svn)
|
|
stash_fname = get_stash_fname(svn, stash_idx)
|
|
stash_fh = open(stash_fname, 'w')
|
|
s_fh = open(s_fname, 'r')
|
|
for line in iter(s_fh.readline, ''):
|
|
stash_fh.write(line)
|
|
s_fh.close()
|
|
# write stash info
|
|
if n_deletions > 0:
|
|
stash_fh.write('#info: -%d\n' % n_deletions)
|
|
if n_insertions > 0:
|
|
stash_fh.write('#info: +%d\n' % n_insertions)
|
|
if len(revert_list) == 1:
|
|
stash_fh.write('#info: F: %s\n' % revert_list[0])
|
|
else:
|
|
stash_fh.write('#info: F: %d files\n' % len(revert_list))
|
|
now = datetime.datetime.now()
|
|
stash_fh.write('#info: @%04d-%02d-%02d %02d:%02d\n' %
|
|
(now.year, now.month, now.day, now.hour, now.minute))
|
|
stash_fh.close()
|
|
out.write('Created stash %d\n' % stash_idx)
|
|
if s_fname != '':
|
|
os.unlink(s_fname)
|
|
if r_fname != '':
|
|
os.unlink(r_fname)
|
|
os.chdir(owd)
|
|
return RET_OK
|
|
|
|
def stash_list_h(argv, svn, out, config):
|
|
stash_ids = get_stash_ids(svn)
|
|
for si in reversed(stash_ids):
|
|
ins_text = ''
|
|
del_text = ''
|
|
add_text = ''
|
|
modify_text = ''
|
|
delete_text = ''
|
|
summary_text = ''
|
|
date = ''
|
|
stash_fname = get_stash_fname(svn, si)
|
|
fh = open(stash_fname, 'r')
|
|
for line in iter(fh.readline, ''):
|
|
m = re.match(r'#info: (.*)$', line)
|
|
if m is not None:
|
|
info = m.group(1)
|
|
if info.startswith('A:'):
|
|
add_text = info
|
|
elif info.startswith('M:'):
|
|
modify_text = info
|
|
elif info.startswith('D:'):
|
|
delete_text = info
|
|
elif info.startswith('F:'):
|
|
summary_text = info[3:]
|
|
elif info.startswith('-'):
|
|
del_text = info
|
|
elif info.startswith('+'):
|
|
ins_text = info
|
|
elif info.startswith('@'):
|
|
date = info[1:]
|
|
fh.close()
|
|
out.write('%-3d' % si)
|
|
elements = [
|
|
(date, 'cyan'),
|
|
(add_text, 'green'),
|
|
(modify_text, 'yellow'),
|
|
(delete_text, 'red'),
|
|
(summary_text, 'magenta'),
|
|
]
|
|
for elem, color in elements:
|
|
if elem != '':
|
|
out.write(' ')
|
|
ansi_color(out, color)
|
|
out.write(elem)
|
|
ansi_reset(out)
|
|
if del_text != '' or ins_text != '':
|
|
out.write(' (')
|
|
if del_text != '':
|
|
ansi_color(out, 'red')
|
|
out.write(del_text)
|
|
ansi_reset(out)
|
|
if ins_text != '':
|
|
if del_text != '':
|
|
out.write(' ')
|
|
ansi_color(out, 'green')
|
|
out.write(ins_text)
|
|
ansi_reset(out)
|
|
out.write(')')
|
|
out.write('\n')
|
|
return RET_OK
|
|
|
|
def stash_pop_h(args, svn, out, config, keep):
|
|
owd = os.getcwd()
|
|
wc_dir = get_svn_wc_root(svn)
|
|
os.chdir(wc_dir)
|
|
stash_ids = get_stash_ids(svn)
|
|
if len(stash_ids) > 0:
|
|
stash_idx = stash_ids[-1]
|
|
if len(args) >= 1:
|
|
stash_idx = int(args[0])
|
|
stash_fname = get_stash_fname(svn, stash_idx)
|
|
fh = open(stash_fname, 'r')
|
|
for line in iter(fh.readline, ''):
|
|
m = re.match('#dir: (.*)', line)
|
|
if m is not None:
|
|
Popen([svn, 'mkdir', m.group(1)]).wait()
|
|
fh.close()
|
|
p = Popen([svn, 'patch', stash_fname], stdout=PIPE)
|
|
filter_update(p.stdout, out)
|
|
rc = p.wait()
|
|
if rc == 0:
|
|
if not keep:
|
|
os.unlink(stash_fname)
|
|
out.write('Popped stash %d\n' % stash_idx)
|
|
else:
|
|
out.write('Error popping stash %d\n' % stash_idx)
|
|
else:
|
|
out.write('No stashes to pop\n')
|
|
os.chdir(owd)
|
|
return RET_OK
|
|
|
|
def stash_show_h(argv, svn, out, config):
|
|
stash_ids = get_stash_ids(svn)
|
|
if len(stash_ids) > 0:
|
|
stash_id = stash_ids[-1]
|
|
if len(argv) >= 1:
|
|
stash_id = int(argv[0])
|
|
if stash_id in stash_ids:
|
|
stash_fname = get_stash_fname(svn, stash_id)
|
|
fd = open(stash_fname, 'r')
|
|
for line in iter(fd.readline, ''):
|
|
m = re.match('#dir: (.*)', line)
|
|
if m is not None:
|
|
ansi_color(out, 'magenta', bold=True)
|
|
out.write('New Directory: %s' % m.group(1))
|
|
ansi_reset(out)
|
|
out.write('\n')
|
|
continue
|
|
if not re.match('#info:', line):
|
|
colordiff(out, line)
|
|
fd.close()
|
|
else:
|
|
out.write('Invalid stash ID\n')
|
|
else:
|
|
out.write('No stashes to show\n')
|
|
return RET_OK
|
|
|
|
def stash_drop_h(argv, svn, out, config):
|
|
stash_ids = get_stash_ids(svn)
|
|
if len(stash_ids) > 0:
|
|
stash_id = stash_ids[-1]
|
|
if len(argv) >= 1:
|
|
stash_id = int(argv[0])
|
|
stash_fname = get_stash_fname(svn, stash_id)
|
|
os.unlink(stash_fname)
|
|
out.write('Dropped stash %d\n' % stash_id)
|
|
else:
|
|
out.write('No stashes to drop\n')
|
|
return RET_OK
|
|
|
|
def stash_h(argv, svn, out, config):
|
|
argv = argv[1:] # strip 'stash' command
|
|
opts, args = getopt.getopt(argv, 'ekp',
|
|
['list', 'pop', 'show', 'drop', 'externals', 'noexternals',
|
|
'keep', 'patch'])
|
|
keep = False
|
|
patch = False
|
|
externals = config['stash_externals']
|
|
operation = 'save'
|
|
for opt, arg in opts:
|
|
if opt == '--list':
|
|
operation = 'list'
|
|
elif opt == '--pop':
|
|
operation = 'pop'
|
|
elif opt == '--show':
|
|
operation = 'show'
|
|
elif opt == '--drop':
|
|
operation = 'drop'
|
|
elif opt in ('-k', '--keep'):
|
|
keep = True
|
|
elif opt in ('-p', '--patch'):
|
|
patch = True
|
|
elif opt in ('-e', '--externals'):
|
|
externals = True
|
|
elif opt == '--noexternals':
|
|
externals = False
|
|
if operation == 'list':
|
|
return stash_list_h(args, svn, out, config)
|
|
elif operation == 'pop':
|
|
return stash_pop_h(args, svn, out, config, keep)
|
|
elif operation == 'show':
|
|
return stash_show_h(args, svn, out, config)
|
|
elif operation == 'drop':
|
|
return stash_drop_h(args, svn, out, config)
|
|
return stash_save_h(args, svn, out, config, keep, patch, externals)
|
|
|
|
def root_h(argv, svn, out, config):
|
|
out.write(get_svn_root_url(svn) + '\n')
|
|
return RET_OK
|
|
|
|
def url_h(argv, svn, out, config):
|
|
path = '.'
|
|
if len(argv) > 1:
|
|
path = argv[1]
|
|
out.write(get_svn_url(svn, path) + '\n')
|
|
return RET_OK
|
|
|
|
def clean_h(argv, svn, out, config):
|
|
argv = argv[1:] # strip command
|
|
opts, args = getopt.getopt(argv, 'fnx',
|
|
['force', 'dry-run', 'ignore-ignores'])
|
|
force = False
|
|
dry_run = False
|
|
ignore_ignores = False
|
|
for opt, arg in opts:
|
|
if opt in ('-f', '--force'):
|
|
force = True
|
|
elif opt in ('-n', '--dry-run'):
|
|
dry_run = True
|
|
elif opt in ('-x', '--ignore-ignores'):
|
|
ignore_ignores = True
|
|
if not force and not dry_run:
|
|
sys.stderr.write('Error: specify either -n or -f\n')
|
|
return RET_ERR
|
|
if force and dry_run:
|
|
sys.stderr.write('Error: specify only one of -n or -f\n')
|
|
return RET_ERR
|
|
status_args = args
|
|
if ignore_ignores:
|
|
status_args.append('--no-ignore')
|
|
clean_paths = []
|
|
pout = Popen([svn, 'status'] + status_args, stdout=PIPE).stdout
|
|
for line in iter(pout.readline, ''):
|
|
m = re.match(STATUS_LINE_REGEX, line)
|
|
if m is not None:
|
|
action = line[0]
|
|
if action in ('?', 'I'):
|
|
clean_paths.append(m.group(1))
|
|
for cp in clean_paths:
|
|
if dry_run:
|
|
out.write("Would remove %s\n" % cp)
|
|
if force:
|
|
if os.path.isdir(cp):
|
|
shutil.rmtree(cp)
|
|
elif os.path.isfile(cp):
|
|
os.unlink(cp)
|
|
return RET_OK
|
|
|
|
###########################################################################
|
|
# Main #
|
|
###########################################################################
|
|
def do_cmd(argv, realsvn, config, expand=True):
|
|
global using_color
|
|
|
|
if len(argv) == 0:
|
|
Popen([realsvn]).wait()
|
|
return
|
|
|
|
if expand and (argv[0] in config['aliases']):
|
|
# expand aliases
|
|
orig_subcommand = argv[0]
|
|
alias = config['aliases'][argv[0]]
|
|
if hasattr(alias, '__call__'):
|
|
# alias is a python function, call it
|
|
alias(argv)
|
|
return
|
|
elif type(alias) == types.StringType:
|
|
argv = [alias] + argv[1:]
|
|
elif type(alias) == types.ListType:
|
|
argv = alias + argv[1:]
|
|
else:
|
|
sys.stderr.write('Unsupported type for alias "%s"\n' % alias)
|
|
|
|
# after expanding the alias, check if it is an external
|
|
# command to launch
|
|
if argv[0].startswith('!'):
|
|
# execute an external program
|
|
argv[0] = argv[0][1:] # strip leading '!'
|
|
argv = [argv[0], orig_subcommand] + argv[1:]
|
|
Popen(argv, shell=True).wait()
|
|
return
|
|
|
|
# after processing user aliases, apply default Subversion aliases
|
|
svn_aliases = {
|
|
'praise': 'blame',
|
|
'annotate': 'blame',
|
|
'ann': 'blame',
|
|
'cl': 'changelist',
|
|
'co': 'checkout',
|
|
'ci': 'commit',
|
|
'cp': 'copy',
|
|
'del': 'delete',
|
|
'remove': 'delete',
|
|
'rm': 'delete',
|
|
'di': 'diff',
|
|
'?': 'help',
|
|
'h': 'help',
|
|
'ls': 'list',
|
|
'mv': 'move',
|
|
'rename': 'move',
|
|
'ren': 'move',
|
|
'pdel': 'propdel',
|
|
'pd': 'propdel',
|
|
'pedit': 'propedit',
|
|
'pe': 'propedit',
|
|
'pget': 'propget',
|
|
'pg': 'propget',
|
|
'plist': 'proplist',
|
|
'pl': 'proplist',
|
|
'pset': 'propset',
|
|
'ps': 'propset',
|
|
'stat': 'status',
|
|
'st': 'status',
|
|
'sw': 'switch',
|
|
'up': 'update',
|
|
}
|
|
if argv[0] in svn_aliases:
|
|
argv[0] = svn_aliases[argv[0]]
|
|
|
|
out = sys.stdout
|
|
using_pager = False
|
|
using_color = sys.stdout.isatty() and config['use_color']
|
|
if sys.stdout.isatty() and config['use_pager']:
|
|
if (len(argv) >= 1 and argv[0] in
|
|
('blame', 'cat', 'diff', 'help', 'list', 'log',
|
|
'propget', 'proplist')):
|
|
if config['pager'] != '':
|
|
pager = config['pager']
|
|
elif 'PAGER' in os.environ and os.environ['PAGER'] != '':
|
|
pager = os.environ['PAGER']
|
|
else:
|
|
pager = 'less -FRX'
|
|
pager_proc = Popen(pager, shell=True, stdin=PIPE)
|
|
out = pager_proc.stdin
|
|
using_pager = True
|
|
|
|
if realsvn == '':
|
|
sys.stderr.write("Error: 'svn' not found in path\n")
|
|
return 1
|
|
|
|
handlers = {
|
|
'add': add_h,
|
|
'bisect': bisect_h,
|
|
'branch': branch_h,
|
|
'clean': clean_h,
|
|
'externals': externals_h,
|
|
'switch': switch_h,
|
|
'merge': merge_h,
|
|
'tag': tag_h,
|
|
'diff': diff_h,
|
|
'log': log_h,
|
|
'root': root_h,
|
|
'update': update_h,
|
|
'url' : url_h,
|
|
'watch-lock': watch_lock_h,
|
|
'users': users_h,
|
|
'binaries': binaries_h,
|
|
'lockable': lockable_h,
|
|
'status': status_h,
|
|
'stash': stash_h,
|
|
'revert': revert_h,
|
|
}
|
|
|
|
do_native_exec = True
|
|
if argv[0] in handlers:
|
|
r = handlers[argv[0]](argv, realsvn, out, config)
|
|
if r == RET_OK or r == RET_ERR:
|
|
do_native_exec = False
|
|
elif argv[0].startswith('__'):
|
|
# allow double-underscore commands to execute the native
|
|
# subversion command (e.g. "__st")
|
|
argv[0] = argv[0][2:]
|
|
|
|
if do_native_exec:
|
|
Popen([realsvn] + argv, stdout=out).wait()
|
|
|
|
if using_pager:
|
|
out.close()
|
|
pager_proc.wait()
|
|
|
|
def main(argv):
|
|
# Determine the name of the real svn binary
|
|
svn_bin_name = 'svn'
|
|
if platform.system() == 'Windows':
|
|
svn_bin_name = 'svn.exe'
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
realsvn = find_in_path( svn_bin_name )
|
|
config = get_config(realsvn)
|
|
if config['svn']:
|
|
realsvn = config['svn']
|
|
|
|
try:
|
|
# set up execution environment for user-defined function aliases
|
|
def do(cmd, expand=True):
|
|
if type(cmd) == types.StringType:
|
|
cmd = [cmd]
|
|
do_cmd(cmd, realsvn, config, expand)
|
|
config['do'] = do
|
|
config['Popen'] = Popen
|
|
config['PIPE'] = PIPE
|
|
|
|
do_cmd(argv, realsvn, config)
|
|
except KeyboardInterrupt:
|
|
pass
|
|
|
|
return 0
|
|
|
|
if __name__ == "__main__":
|
|
rc = 0
|
|
try:
|
|
rc = main(sys.argv[1:])
|
|
except IOError:
|
|
pass
|
|
sys.exit(rc)
|