Changeset b4944cd04b48…
Parent 154a3761de2e…
by Benjamin Pollack <benjamin@fogcreek.com>
Changes to 11 files · Browse files at b4944cd04b48 Showing diff from parent 154a3761de2e Diff from another changeset...
@@ -1,11 +1,9 @@ '''Base class for store implementations and store-related utility code.'''
-import sys
import os
import tempfile
import binascii
import bfutil
-import shutil
from mercurial import util, node, error, url as url_, hg
from mercurial.i18n import _
@@ -37,6 +35,10 @@ '''Put source file into the store under <filename>/<hash>.'''
raise NotImplementedError('abstract method')
+ def exists(self, hash):
+ '''Check to see if the store contains the given hash.'''
+ raise NotImplementedError('abstract method')
+
def get(self, files):
'''Get the specified big files from the store and write to local
files under repo.root. files is a list of (filename, hash)
|
@@ -1,15 +1,9 @@ '''High-level command functions: bfadd() et. al, plus the cmdtable.'''
import os
-import re
-import errno
-import binascii
import shutil
-import httplib
-import posixpath
-import BaseHTTPServer
-from mercurial import util, commands, match as match_, hg, node, context, error
+from mercurial import util, match as match_, hg, node, context, error
from mercurial.i18n import _
import bfutil, basestore
@@ -156,6 +150,7 @@
hash = fctx.data().strip()
path = bfutil.find_file(rsrc, hash)
+ ### TODO: What if the file is not cached?
data = ''
with open(path, 'rb') as fd:
data = fd.read()
@@ -332,15 +327,18 @@
store = basestore._open_store(rsrc, rdst.path, put=True)
- at = 1
+ at = 0
for hash in files:
ui.progress(_('Uploading bfiles'), at, unit='bfile', total=len(files))
- at += 2
- source = bfutil.find_file(rsrc, hash, False)
+ if store.exists(hash):
+ at += 1
+ continue
+ source = bfutil.find_file(rsrc, hash)
if not source:
raise util.Abort(_('Missing bfile %s needs to be uploaded') % hash)
# XXX check for errors here
store.put(source, hash)
+ at += 1
ui.progress('Uploading bfiles', None)
def verify_bfiles(ui, repo, all=False, contents=False):
@@ -379,7 +377,7 @@ expectedhash = repo[None][bfutil.standin(bfile)].data().strip()
mode = os.stat(repo.wjoin(bfutil.standin(bfile))).st_mode
if not os.path.exists(repo.wjoin(bfile)) or expectedhash != bfutil.hashfile(repo.wjoin(bfile)):
- path = bfutil.find_file(repo, expectedhash, False)
+ path = bfutil.find_file(repo, expectedhash)
if path is None:
toget.append((bfile, expectedhash))
else:
@@ -455,6 +453,7 @@ toget = []
at = 0
updated = 0
+ removed = 0
printed = False
if bfiles:
ui.status(_('Getting changed bfiles\n'))
@@ -462,10 +461,15 @@
for bfile in bfiles:
at += 1
+ if os.path.exists(repo.wjoin(bfile)) and not os.path.exists(repo.wjoin(bfutil.standin(bfile))):
+ os.unlink(repo.wjoin(bfile))
+ removed += 1
+ bfdirstate.forget(bfutil.unixpath(bfile))
+ continue
expectedhash = repo[None][bfutil.standin(bfile)].data().strip()
mode = os.stat(repo.wjoin(bfutil.standin(bfile))).st_mode
if not os.path.exists(repo.wjoin(bfile)) or expectedhash != bfutil.hashfile(repo.wjoin(bfile)):
- path = bfutil.find_file(repo, expectedhash, False)
+ path = bfutil.find_file(repo, expectedhash)
if not path:
toget.append((bfile, expectedhash))
else:
@@ -491,7 +495,6 @@ updated += 1
bfdirstate.normal(bfutil.unixpath(filename))
- removed = 0
for bfile in bfdirstate:
if bfile not in bfiles:
if os.path.exists(repo.wjoin(bfile)):
|
|
@@ -43,8 +43,7 @@ method = getattr(repo, name)
#if not (isinstance(method, types.MethodType) and
# method.im_func is repo.__class__.commitctx.im_func):
- if (isinstance(method, types.FunctionType) and
- method.func_name == 'wrap'):
+ if isinstance(method, types.FunctionType) and method.func_name == 'wrap':
ui.warn(_('kbfiles: repo method %r appears to have already been '
'wrapped by another extension: '
'kbfiles may behave incorrectly\n')
@@ -101,13 +100,7 @@
m = copy.copy(match)
m._files = [tostandin(f) for f in m._files]
- orig_matchfn = m.matchfn
- def matchfn(f):
- if bfutil.is_standin(f):
- return orig_matchfn(bfutil.split_standin(f))
- else:
- return orig_matchfn(f) and not inctx(bfutil.standin(f), ctx2)
- m.matchfn = matchfn
+
# get ignored clean and unknown but remove them later if they were not asked for
try:
result = super(bfiles_repo, self).status(node1, node2, m, True, True, True, subrepos)
@@ -122,7 +115,7 @@ # was already computed using super's status.
bfdirstate = bfutil.open_bfdirstate(ui, self)
match._files = [f for f in match._files if f in bfdirstate]
- s = bfdirstate.status(match, [], True, True, True)
+ s = bfdirstate.status(match, [], listignored, listclean, listunknown)
(unsure, modified, added, removed, missing, unknown, ignored, clean) = s
if parentworking:
for bfile in unsure:
@@ -258,9 +251,29 @@ bfiles = bfutil.list_bfiles(repo)
match = copy.copy(match)
orig_matchfn = match.matchfn
- match._files = [f for f in match._files if f not in bfiles]
+
+ # Check both the list of bfiles and the list of standins because if a bfile was removed, it
+ # won't be in the list of bfiles at this point
match._files += sorted(standins)
+ actualfiles = []
+ for f in match._files:
+ fstandin = bfutil.standin(f)
+
+ # Ignore known bfiles and standins
+ if f in bfiles or fstandin in standins:
+ continue
+
+ # Append directory separator to avoid collisions
+ if not fstandin.endswith('/'):
+ fstandin += '/'
+
+ # Prevalidate matching standin directories
+ if any(st for st in match._files if st.startswith(fstandin)):
+ continue
+ actualfiles.append(f)
+ match._files = actualfiles
+
def matchfn(f):
if orig_matchfn(f):
return f not in bfiles
@@ -451,6 +464,10 @@ for f in remove:
if not after:
os.unlink(repo.wjoin(f))
+ currentdir = os.path.split(f)[0]
+ while currentdir and not os.listdir(repo.wjoin(currentdir)):
+ os.rmdir(repo.wjoin(currentdir))
+ currentdir = os.path.split(currentdir)[0]
bfdirstate.remove(bfutil.unixpath(f))
bfdirstate.write()
@@ -734,6 +751,15 @@ bfcommands.update_bfiles(repo.ui, repo)
return result
+# When we rebase a repository with remotely changed bfiles, we need
+# to explicitly do a clean update so that the entries in .kbf are
+# udpated and the new bfiles are pulled
+def override_pull(orig, ui, repo, source="default", **opts):
+ result = orig(ui, repo, source, **opts)
+ if opts.get('rebase', False):
+ commands.update(repo.ui, repo, clean = True)
+ return result
+
def override_archive(orig, repo, dest, node, kind, decode=True, matchfn=None,
prefix=None, mtime=None, subrepos=None):
# No need to lock because we are only reading history and bfile caches
@@ -799,6 +825,7 @@ getdata = ctx[f].data
if bfutil.is_standin(f):
path = bfutil.find_file(repo, getdata().strip())
+ ### TODO: What if the file is not cached?
f = bfutil.split_standin(f)
def getdatafn():
@@ -992,6 +1019,7 @@ entry[1].extend(summaryopt)
entry = extensions.wrapcommand(commands.table, 'update', override_update)
+ entry = extensions.wrapcommand(commands.table, 'pull', override_pull)
entry = extensions.wrapfunction(filemerge, 'filemerge', override_filemerge)
entry = extensions.wrapfunction(cmdutil, 'copy', override_copy)
|
@@ -2,14 +2,12 @@
import os
import errno
-import binascii
-import tempfile
import inspect
import shutil
import stat
from mercurial import \
- util, dirstate, context, cmdutil, error, match as match_, node
+ util, dirstate, cmdutil, match as match_
from mercurial.i18n import _
short_name = '.kbf'
@@ -110,15 +108,13 @@def in_system_cache(ui, hash):
return os.path.exists(system_cache_path(ui, hash))
-def find_file(repo, hash, getfile=True):
+def find_file(repo, hash):
if in_cache(repo, hash):
+ repo.ui.note(_('Found %s in cache\n') % hash)
return cache_path(repo, hash)
if in_system_cache(repo.ui, hash):
+ repo.ui.note(_('Found %s in system cache\n') % hash)
return system_cache_path(repo.ui, hash)
- if getfile:
- (success, failure) = basetore._open_store(repo).get([(f, hash)])
- if (f, hash) in success:
- return cache_path(repo, hash)
return None
def open_bfdirstate(ui, repo):
@@ -128,7 +124,10 @@ '''
admin = repo.join(long_name)
opener = util.opener(admin)
- bfdirstate = dirstate.dirstate(opener, ui, repo.root)
+ if hasattr(repo.dirstate, '_validate'):
+ bfdirstate = dirstate.dirstate(opener, ui, repo.root, repo.dirstate._validate)
+ else:
+ bfdirstate = dirstate.dirstate(opener, ui, repo.root)
# If the bfiles dirstate does not exist, populate and create it. This
# ensures that we create it on the first meaningful bfiles operation in
@@ -248,8 +247,9 @@
def compose_standin_matcher(repo, rmatcher):
'''Return a matcher that accepts standins corresponding to the files
- accepted by rmatcher.'''
- smatcher = get_standin_matcher(repo)
+ accepted by rmatcher. Pass the list of files in the matcher as the
+ paths specified by the user.'''
+ smatcher = get_standin_matcher(repo, rmatcher.files())
isstandin = smatcher.matchfn
def composed_matchfn(f):
return isstandin(f) and rmatcher.matchfn(split_standin(f))
|
@@ -21,16 +21,19 @@ self.sendfile(source, hash)
self.ui.debug('put %s to remote store\n' % source)
+ def exists(self, hash):
+ return self._verify(hash)
+
def sendfile(self, filename, hash):
if self._verify(hash):
return
self.ui.debug('httpstore.sendfile(%s, %s)\n' % (filename, hash))
baseurl, authinfo = url_.getauthinfo(self.url)
+ fd = None
try:
- fd = open(filename, 'rb')
- request = urllib2.Request(bfutil.urljoin(baseurl, hash))
- request.add_data(fd.read())
+ fd = url_.httpsendfile(filename, 'rb')
+ request = urllib2.Request(bfutil.urljoin(baseurl, hash), fd)
try:
url = self.opener.open(request)
self.ui.note(_('[OK] %s/%s\n') % (self.rawurl, url.geturl()))
@@ -39,7 +42,7 @@ except Exception, e:
raise util.Abort(_('%s') % e)
finally:
- fd.close()
+ if fd: fd.close()
def _getfile(self, tmpfile, filename, hash):
(baseurl, authinfo) = url_.getauthinfo(self.url)
|
@@ -18,6 +18,9 @@ '''Any file that is put must already be in the system wide cache so do nothing.'''
return
+ def exists(self, hash):
+ return bfutil.in_system_cache(self.repo.ui, hash)
+
def _getfile(self, tmpfile, filename, hash):
if bfutil.in_system_cache(self.ui, hash):
return bfutil.system_cache_path(self.ui, hash)
|
@@ -19,6 +19,7 @@ hgt.hg(['init', '-q'])
hgt.writefile('normal1', 'foo')
os.mkdir('sub')
+os.mkdir('sub2')
hgt.writefile('sub/normal2', 'bar')
hgt.writefile('sub/normal3.txt', 'bar2')
hgt.writefile('sub/normal4.txt', 'bar3')
@@ -29,7 +30,9 @@hgt.writefile('big1', 'abc')
hgt.writefile('sub/big2', 'xyz')
hgt.writefile('sub/big3.txt', 'xyz')
-hgt.hg(['add', '-q', '--bf', 'big1', rejoin('sub/big2'), rejoin('sub/big3.txt')])
+hgt.writefile('sub/big4', 'xyz')
+hgt.writefile('sub2/big5', 'xyz')
+hgt.hg(['add', '-q', '--bf', 'big1', rejoin('sub/big2'), rejoin('sub/big3.txt'), rejoin('sub/big4'), rejoin('sub2/big5')])
hgt.hg(['commit', '-m', 'added bfiles'])
hgt.announce('remove sub/*.txt')
@@ -44,6 +47,8 @@hgt.asserttrue(os.path.exists('sub/normal2'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('big1'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('sub/big2'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub/big4'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub2/big5'), 'added file doesnt exist')
hgt.hg(['status'],
stdout=('R sub/big3.txt\n'
'R sub/normal3.txt\n'
@@ -62,6 +67,8 @@hgt.asserttrue(os.path.exists('sub/normal2'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('big1'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('sub/big2'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub/big4'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub2/big5'), 'added file doesnt exist')
hgt.hg(['up'],
stdout=('0 files updated, 0 files merged, 3 files removed, 0 files unresolved\n'
'Getting changed bfiles\n'
@@ -73,6 +80,8 @@hgt.asserttrue(os.path.exists('sub/normal2'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('big1'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('sub/big2'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub/big4'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub2/big5'), 'added file doesnt exist')
hgt.announce('remove single normal files and add')
hgt.hg(['remove', 'normal1', 'sub/normal2'])
@@ -86,6 +95,23 @@hgt.asserttrue(os.path.exists('sub/normal2'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('big1'), 'added file doesnt exist')
hgt.asserttrue(os.path.exists('sub/big2'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub/big4'), 'added file doesnt exist')
+hgt.asserttrue(os.path.exists('sub2/big5'), 'added file doesnt exist')
+hgt.hg(['status'])
+
+hgt.announce('remove single bfile and commit with full path')
+hgt.hg(['remove', 'sub/big4'])
+hgt.hg(['status'],stdout=('R sub/big4\n'))
+hgt.hg(['commit', '-m', 'removing big4', 'sub/big4'])
+hgt.assertfalse(os.path.exists('sub/big4'), 'removed file exists')
+hgt.hg(['status'])
+
+hgt.announce('remove single bfile and commit with partial path')
+hgt.hg(['remove', 'sub2/big5'])
+hgt.hg(['status'],stdout=('R sub2/big5\n'))
+hgt.assertfalse(os.path.exists("sub2"), 'removed directory structure exists')
+hgt.hg(['commit', '-m', 'removing big5', 'sub2'])
+hgt.assertfalse(os.path.exists('sub2/big5'), 'removed file exists')
hgt.hg(['status'])
hgt.announce('remove single bfiles and add')
|
@@ -75,20 +75,22 @@ if not opts.get('force') and not opts.get('new_branch') and None == prepush(repo, other, False, revs)[0]:
return
try:
+ push_size = 1
while len(outgoing) > 0:
ui.debug('start: %d to push\n' % len(outgoing))
- current_push_size = min(max_push_size, len(outgoing))
+ current_push_size = min(push_size, len(outgoing))
ui.debug('pushing: %d\n' % current_push_size)
# force the push, because we checked above that by the time the whole push is done, we'll have merged back to one head
remote_heads = repo.push(other, force=True, revs=outgoing[:current_push_size])
if remote_heads: # push succeeded
outgoing = outgoing[current_push_size:]
- current_push_size = max_push_size
ui.debug('pushed %d ok\n' % current_push_size)
+ if push_size < max_push_size:
+ push_size *= 2
else: # push failed; try again with a smaller size
- current_push_size /= 10
+ push_size /= 2
ui.debug('failed, trying %d\n' % current_push_size)
- if current_push_size == 0:
+ if push_size == 0:
raise UnpushableChangesetError
except UnpushableChangesetError:
ui.status(_('unable to push changeset %s\n') % outgoing[0])
|
@@ -160,7 +160,8 @@ '''))
return True
- source, hashbranch = parseurl(ui.expandpath('default-push', ui.expandpath('default')))
+ target = ui.config('paths', 'default-push') and ui.expandpath('default-push') or source
+ source, hashbranch = parseurl(source)
other = hg.repository(remoteui(repo, opts), source)
revs = addbranchrevs(repo, other, hashbranch)
ui.pushbuffer()
|
|
|
- # Copyright (C) 2010 Fog Creek Software. All rights reserved.
+# Copyright (C) 2011 Fog Creek Software. All rights reserved.
#
# To enable the "kiln" extension put these lines in your ~/.hgrc:
# [extensions]
# kiln = /path/to/kiln.py
#
# For help on the usage of "hg kiln" use:
# hg help kiln
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''provides command-line support for working with Kiln
This extension allows you to directly open up the Kiln page for your
repository, including the annotation, file view, outgoing, and other
pages. Additionally, it will attempt to guess which remote Kiln
repository you wish push to and pull from based on its related repositories.
This extension will also notify you when a Kiln server you access has an
updated version of the Kiln Client and Tools available.
To disable the check for a version 'X.Y.Z' and all lower versions, add the
following line in the [kiln] section of your hgrc:
ignoreversion = X.Y.Z
'''
import os
import re
import urllib
import urllib2
+import subprocess
import sys
from cookielib import MozillaCookieJar
from hashlib import md5
-from mercurial import extensions, commands, demandimport, hg, util, httprepo, match
+from mercurial import extensions, commands, demandimport, hg, util, httprepo, localrepo, match
+from mercurial import ui as hgui
from mercurial import url as hgurl
from mercurial.error import RepoError
from mercurial.i18n import _
from mercurial.node import nullrev
demandimport.disable()
try:
import json
except ImportError:
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), '_custom'))
import json
try:
import webbrowser
def browse(url):
webbrowser.open(escape_reserved(url))
except ImportError:
if os.name == 'nt':
import win32api
def browse(url):
win32api.ShellExecute(0, 'open', escape_reserved(url), None, None, 0)
demandimport.enable()
-KILN_CAPABILITY_PREFIX = 'kiln-'
-KILN_CURRENT_VERSION = '1.0.0'
_did_version_check = False
class APIError(Exception):
def __init__(self, obj):
'''takes a json object for debugging
Inspect self.errors to see the API errors thrown.
'''
self.errors = []
for error in obj['errors']:
data = error['codeError'], error['sError']
self.errors.append('%s: %s' % data)
def __str__(self):
return '\n'.join(self.errors)
def urljoin(*components):
url = components[0]
for next in components[1:]:
if not url.endswith('/'):
url += '/'
if next.startswith('/'):
next = next[1:]
url += next
return url
def _baseurl(ui, path):
remote = hg.repository(ui, path)
url = hgurl.removeauth(remote.url())
if url.lower().find('/kiln/') > 0 or url.lower().find('kilnhg.com/') > 0:
return url
else:
return None
def escape_reserved(path):
reserved = re.compile(
r'^(((com[1-9]|lpt[1-9]|con|prn|aux)(\..*)?)|web\.config' +
r'|clock\$|app_data|app_code|app_browsers' +
r'|app_globalresources|app_localresources|app_themes' +
r'|app_webreferences|bin)$', re.IGNORECASE)
p = path.split('?')
path = p[0]
query = '?' + p[1] if len(p) > 1 else ''
return '/'.join('$' + part
if reserved.match(part) or part.startswith('$')
else part
for part in path.split('/')) + query
def normalize_name(s):
return s.lower().replace(' ', '-')
def call_api(url, data, post=False):
'''returns the json object for the url and the data dictionary
Uses HTTP POST if the post parameter is True and HTTP GET
otherwise. Raises APIError on API errors.
'''
data = urllib.urlencode(data, doseq=True)
try:
if post:
fd = urllib2.urlopen(url, data)
else:
fd = urllib2.urlopen(url + '?' + data)
obj = json.load(fd)
except:
raise util.Abort(_('Path guessing requires Fog Creek Kiln 2.0. If you'
' are running Kiln 2.0 and continue to experience'
' problems, please contact Fog Creek Software.'))
if isinstance(obj, dict) and 'errors' in obj:
raise APIError(obj)
return obj
def login(ui, url):
ui.write(_('realm: %s\n') % url)
user = ui.prompt('username:')
pw = ui.getpass()
authurl = url + 'Api/1.0/Auth/Login'
token = call_api(authurl, dict(sUser=user, sPassword=pw))
if token:
return token
raise util.Abort(_('authorization failed'))
def get_domain(url):
temp = url[url.find('://') + len('://'):]
domain = temp[:temp.find('/')]
port = None
if ':' in domain:
domain, port = domain.split(':', 1)
if '.' not in domain:
domain += '.local'
return domain
def _get_path(path):
if os.name == 'nt':
ret = os.path.expanduser('~\\_' + path)
else:
ret = os.path.expanduser('~/.' + path)
# Cygwin's Python does not always expanduser() properly...
if re.match(r'^[A-Za-z]:', ret) is not None and re.match(r'[A-Za-z]:\\', ret) is None:
ret = re.sub(r'([A-Za-z]):', r'\1:\\', ret)
return ret
-def _versioncheck(ui, repo, str):
+def _upgradecheck(ui, repo):
global _did_version_check
- m = re.match(KILN_CAPABILITY_PREFIX + '(?P<version>[0-9.]+).*', str)
- if _did_version_check or not m:
+ if _did_version_check or not ui.configbool('kiln', 'autoupdate', True):
return
_did_version_check = True
- version = m.group('version')
- server_version = [int(s) for s in version.split('.')]
- my_version = [int(s) for s in KILN_CURRENT_VERSION.split('.')]
- ignore_version = [int(s) for s in ui.config('kiln', 'ignoreversion', '0.0.0').split('.')]
- if server_version > my_version:
- url = urljoin(repo.url()[:repo.url().lower().index('/repo')], 'Tools')
- if server_version > ignore_version:
- if ui.promptchoice(_('You are currently running Kiln client tools version %s. '
- 'Version %s is available.\nUpgrade now? (y/n)') %
- (KILN_CURRENT_VERSION, version), ('&No', '&Yes'), default=0):
- browse(url)
- else:
- if os.name == 'nt':
- config_file = 'Mercurial.ini'
- else:
- config_file = '~/.hgrc'
- ui.write(_('''If you'd like Kiln to stop prompting you about version %s and below, '''
- '''add ignoreversion=%s to the [kiln] section of your %s\n''') % (version, version, config_file))
+ _upgrade(ui, repo)
+
+def _upgrade(ui, repo):
+ ext_dir = os.path.dirname(os.path.abspath(__file__))
+ ui.debug('kiln: checking for extensions upgrade for %s\n' % ext_dir)
+
+ try:
+ r = localrepo.localrepository(hgui.ui(), ext_dir)
+ except RepoError:
+ commands.init(hgui.ui(), dest=ext_dir)
+ r = localrepo.localrepository(hgui.ui(), ext_dir)
+
+ r.ui.setconfig('kiln', 'autoupdate', False)
+ r.ui.pushbuffer()
+ try:
+ source = 'https://developers.kilnhg.com/Repo/Kiln/Group/Kiln-Extensions'
+ if commands.incoming(r.ui, r, bundle=None, force=False, source=source) != 0:
+ # no incoming changesets, or an error. Don't try to upgrade.
+ ui.debug('kiln: no extensions upgrade available\n')
+ return
+ ui.write(_('updating Kiln Extensions at %s... ') % ext_dir)
+ # pull and update return falsy values on success
+ if commands.pull(r.ui, r, source=source) or commands.update(r.ui, r, clean=True):
+ url = urljoin(repo.url()[:repo.url().lower().index('/repo')], 'Tools')
+ ui.write(_('unable to update\nvisit %s to download the newest extensions\n') % url)
else:
- ui.write(_('You are currently running Kiln client tools version %s. '
- 'Version %s is available.\nVisit %s to download the new client tools.\n') %
- (KILN_CURRENT_VERSION, version, url))
- ui.write('\n')
+ ui.write(_('complete\n'))
+ except Exception, e:
+ ui.debug(_('kiln: error updating Kiln Extensions: %s\n') % e)
def is_dest_a_path(ui, dest):
paths = ui.configitems('paths')
for pathname, path in paths:
if pathname == dest:
return True
return False
def is_dest_a_scheme(ui, dest):
destscheme = dest[:dest.find('://')]
if destscheme:
for scheme in hg.schemes:
if destscheme == scheme:
return True
return False
def create_match_list(matchlist):
ret = ''
for m in matchlist:
ret += ' ' + m + '\n'
return ret
def get_username(url):
url = re.sub(r'https?://', '', url)
url = re.sub(r'/.*', '', url)
if '@' in url:
# There should be some login info
# rfind in case it's an email address
username = url[:url.rfind('@')]
if ':' in username:
username = url[:url.find(':')]
return username
# Didn't find anything...
return ''
def get_dest(ui):
from mercurial.dispatch import _parse
try:
cmd_info = _parse(ui, sys.argv[1:])
cmd = cmd_info[0]
dest = cmd_info[2]
if dest:
dest = dest[0]
elif cmd in ['outgoing', 'push']:
dest = 'default-push'
else:
dest = 'default'
except:
dest = 'default'
return ui.expandpath(dest)
def check_kilnapi_token(ui, url):
tokenpath = _get_path('hgkiln')
if (not os.path.exists(tokenpath)) or os.path.isdir(tokenpath):
return ''
domain = get_domain(url)
userhash = md5(get_username(get_dest(ui))).hexdigest()
fp = open(tokenpath, 'r')
ret = ""
for line in fp:
try:
d, u, t = line.split(' ')
except:
raise util.Abort(_('Authentication file %s is malformed.') % tokenpath)
if d == domain and u == userhash:
# Get rid of that newline character...
ret = t[:-1]
fp.close()
return ret
def add_kilnapi_token(ui, url, fbToken):
if not fbToken:
return
tokenpath = _get_path('hgkiln')
if os.path.isdir(tokenpath):
raise util.Abort(_('Authentication file %s exists, but is a directory.') % tokenpath)
domain = get_domain(url)
userhash = md5(get_username(get_dest(ui))).hexdigest()
fp = open(tokenpath, 'a')
fp.write(domain + ' ' + userhash + ' ' + fbToken + '\n')
fp.close()
def delete_kilnapi_tokens():
# deletes the hgkiln file
tokenpath = _get_path('hgkiln')
if os.path.exists(tokenpath) and not os.path.isdir(tokenpath):
os.remove(tokenpath)
def check_kilnauth_token(ui, url):
cookiepath = _get_path('hgcookies')
if (not os.path.exists(cookiepath)) or (not os.path.isdir(cookiepath)):
return ''
cookiepath = os.path.join(cookiepath, md5(get_username(get_dest(ui))).hexdigest())
try:
if not os.path.exists(cookiepath):
return ''
cj = MozillaCookieJar(cookiepath)
except IOError, e:
return ''
domain = get_domain(url)
cj.load(ignore_discard=True, ignore_expires=True)
for cookie in cj:
if domain == cookie.domain:
if cookie.name == 'fbToken':
return cookie.value
def remember_path(ui, repo, path, value):
'''appends the path to the working copy's hgrc and backs up the original'''
paths = dict(ui.configitems('paths'))
# This should never happen.
if path in paths: return
# ConfigParser only cares about these three characters.
if re.search(r'[:=\s]', path): return
audit_path = getattr(repo.opener, 'audit_path',
util.path_auditor(repo.root))
audit_path('hgrc')
audit_path('hgrc.backup')
base = repo.opener.base
util.copyfile(os.path.join(base, 'hgrc'),
os.path.join(base, 'hgrc.backup'))
ui.setconfig('paths', path, value)
try:
fp = repo.opener('hgrc', 'a', text=True)
# Mercurial assumes Unix newlines by default and so do we.
fp.write('\n[paths]\n%s = %s\n' % (path, value))
fp.close()
except IOError, e:
return
def unremember_path(ui, repo):
'''restores the working copy's hgrc'''
audit_path = getattr(repo.opener, 'audit_path',
util.path_auditor(repo.root))
audit_path('hgrc')
audit_path('hgrc.backup')
base = repo.opener.base
if os.path.exists(os.path.join(base, 'hgrc')):
util.copyfile(os.path.join(base, 'hgrc.backup'),
os.path.join(base, 'hgrc'))
def guess_kilnpath(orig, ui, repo, dest=None, **opts):
if not dest:
return orig(ui, repo, **opts)
if os.path.exists(dest) or is_dest_a_path(ui, dest) or is_dest_a_scheme(ui, dest):
return orig(ui, repo, dest, **opts)
else:
targets = get_targets(repo);
matches = []
prefixmatches = []
for target in targets:
url = '%s/%s/%s/%s' % (target[0], target[1], target[2], target[3])
ndest = normalize_name(dest)
ntarget = [normalize_name(t) for t in target[1:4]]
aliases = [normalize_name(s) for s in target[4]]
if ndest.count('/') == 0 and \
(ntarget[0] == ndest or \
ntarget[1] == ndest or \
ntarget[2] == ndest or \
ndest in aliases):
matches.append(url)
elif ndest.count('/') == 1 and \
'/'.join(ntarget[0:2]) == ndest or \
'/'.join(ntarget[1:3]) == ndest:
matches.append(url)
elif ndest.count('/') == 2 and \
'/'.join(ntarget[0:3]) == ndest:
matches.append(url)
if (ntarget[0].startswith(ndest) or \
ntarget[1].startswith(ndest) or \
ntarget[2].startswith(ndest) or \
'/'.join(ntarget[0:2]).startswith(ndest) or \
'/'.join(ntarget[1:3]).startswith(ndest) or \
'/'.join(ntarget[0:3]).startswith(ndest)):
prefixmatches.append(url)
if len(matches) == 0:
if len(prefixmatches) == 0:
# if there are no matches at all, let's just let mercurial handle it.
return orig(ui, repo, dest, **opts)
else:
urllist = create_match_list(prefixmatches)
raise util.Abort(_('%s did not exactly match any part of the repository slug:\n\n%s') % (dest, urllist))
elif len(matches) > 1:
urllist = create_match_list(matches)
raise util.Abort(_('%s matches more than one Kiln repository:\n\n%s') % (dest, urllist))
# Unique match -- perform the operation
try:
remember_path(ui, repo, dest, matches[0])
return orig(ui, repo, matches[0], **opts)
finally:
unremember_path(ui, repo)
def get_tails(repo):
tails = []
for rev in xrange(repo['tip'].rev() + 1):
ctx = repo[rev]
if ctx.p1().rev() == nullrev and ctx.p2().rev() == nullrev:
tails.append(ctx.hex())
if not len(tails):
raise util.Abort(_('Path guessing is only enabled for non-empty repositories.'))
return tails
def get_targets(repo):
targets = []
kilnschemes = repo.ui.configitems('kiln_scheme')
for scheme in kilnschemes:
url = scheme[1]
if url.lower().find('/kiln/') != -1:
baseurl = url[:url.lower().find('/kiln/') + len("/kiln/")]
elif url.lower().find('kilnhg.com/') != -1:
baseurl = url[:url.lower().find('kilnhg.com/') + len("kilnhg.com/")]
else:
continue
tails = get_tails(repo)
token = check_kilnapi_token(repo.ui, baseurl)
if not token:
token = check_kilnauth_token(repo.ui, baseurl)
add_kilnapi_token(repo.ui, baseurl, token)
if not token:
token = login(repo.ui, baseurl)
add_kilnapi_token(repo.ui, baseurl, token)
# We have an token at this point
params = dict(revTails=tails, token=token)
apiurl = baseurl + 'Api/1.0/Repo/Related'
related_repos = call_api(apiurl, params)
targets.extend([[url,
related_repo['sProjectSlug'],
related_repo['sGroupSlug'],
related_repo['sSlug'],
related_repo.get('rgAliases', [])] for related_repo in related_repos])
return targets
def display_targets(repo):
targets = get_targets(repo)
repo.ui.write(_('The following Kiln targets are available for this repository:\n\n'))
for target in targets:
if target[4]:
alias_text = _(' (alias%s: %s)') % ('es' if len(target[4]) > 1 else '', ', '.join(target[4]))
else:
alias_text = ''
repo.ui.write(' %s/%s/%s/%s%s\n' % (target[0], target[1], target[2], target[3], alias_text))
def dummy_command(ui, repo, dest=None, **opts):
'''dummy command to pass to guess_path() for hg kiln
Returns the repository URL if dest has been successfully path
guessed, None otherwise.
'''
return opts['path'] != dest and dest or None
def kiln(ui, repo, *pats, **opts):
'''show the relevant page of the repository in Kiln
This command allows you to navigate straight the Kiln page for a
repository, including directly to settings, file annotation, and
file & changeset viewing.
Typing "hg kiln" by itself will take you directly to the
repository history in kiln. Specify any other options to override
this default. The --rev, --annotate, --file, and --filehistory options
can be used together.
To display a list of valid targets, type hg kiln --targets. To
push or pull from one of these targets, use any unique identifier
from this list as the parameter to the push/pull command.
'''
try:
url = _baseurl(ui, ui.expandpath(opts['path'] or 'default', opts['path'] or 'default-push'))
except RepoError:
url = guess_kilnpath(dummy_command, ui, repo, dest=opts['path'], **opts)
if not url:
raise
if not url:
raise util.Abort(_('this does not appear to be a Kiln-hosted repository\n'))
default = True
def files(key):
allpaths = []
for f in opts[key]:
paths = [path for path in repo['.'].manifest().iterkeys() if re.search(match._globre(f) + '$', path)]
if not paths:
ui.warn(_('cannot find %s') % f)
allpaths += paths
return allpaths
if opts['rev']:
default = False
for ctx in (repo[rev] for rev in opts['rev']):
browse(urljoin(url, 'History', ctx.hex()))
if opts['annotate']:
default = False
for f in files('annotate'):
browse(urljoin(url, 'File', f) + '?view=annotate')
if opts['file']:
default = False
for f in files('file'):
browse(urljoin(url, 'File', f))
if opts['filehistory']:
default = False
for f in files('filehistory'):
browse(urljoin(url, 'FileHistory', f) + '?rev=tip')
if opts['outgoing']:
default = False
browse(urljoin(url, 'Outgoing'))
if opts['settings']:
default = False
browse(urljoin(url, 'Settings'))
if opts['targets']:
default = False
display_targets(repo)
if opts['logout']:
default = False
delete_kilnapi_tokens()
if default or opts['changes']:
browse(url)
def uisetup(ui):
extensions.wrapcommand(commands.table, 'outgoing', guess_kilnpath)
extensions.wrapcommand(commands.table, 'push', guess_kilnpath)
extensions.wrapcommand(commands.table, 'pull', guess_kilnpath)
extensions.wrapcommand(commands.table, 'incoming', guess_kilnpath)
def reposetup(ui, repo):
if issubclass(repo.__class__, httprepo.httprepository):
- for cap in repo.capabilities:
- if cap.startswith(KILN_CAPABILITY_PREFIX):
- _versioncheck(ui, repo, cap)
+ _upgradecheck(ui, repo)
cmdtable = {
'kiln':
(kiln,
[('a', 'annotate', [], _('annotate the file provided')),
('c', 'changes', None, _('view the history of this repository; this is the default')),
('f', 'file', [], _('view the file contents')),
('l', 'filehistory', [], _('view the history of the file')),
('o', 'outgoing', None, _('view the repository\'s outgoing tab')),
('s', 'settings', None, _('view the repository\'s settings tab')),
- ('p', 'path', '', _('override the default URL to use for Kiln')),
+ ('p', 'path', '', _('select which Kiln branch of the repository to use')),
('r', 'rev', [], _('view the specified changeset in Kiln')),
('t', 'targets', None, _('view the repository\'s targets')),
('', 'logout', None, _('log out of Kiln sessions'))],
_('hg kiln [-p url] [-r rev|-a file|-f file|-c|-o|-s|-t|--logout]'))
}
|
@@ -36,7 +36,9 @@ import re
from urllib2 import Request
from cookielib import MozillaCookieJar, Cookie
+import shutil
import sys
+import tempfile
try:
from hashlib import md5
@@ -49,6 +51,37 @@
current_user = None
+class CookieJar(MozillaCookieJar, object):
+ def __init__(self, filename, *args, **kwargs):
+ self.__original_path = filename
+ tf = tempfile.NamedTemporaryFile(delete=False)
+ self.__temporary_path = tf.name
+ tf.close()
+ if os.path.exists(filename):
+ shutil.copyfile(filename, self.__temporary_path)
+ return super(CookieJar, self).__init__(self.__temporary_path, *args, **kwargs)
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ os.unlink(self.__temporary_path)
+ self.__temporary_path = None
+
+ def __del__(self):
+ try:
+ if self.__temporary_path:
+ os.unlink(self.__temporary_path)
+ except (OSError, IOError):
+ pass
+
+ def save(self, *args, **kwargs):
+ super(CookieJar, self).save(*args, **kwargs)
+ try:
+ shutil.copyfile(self.__temporary_path, self.__original_path)
+ except IOError:
+ pass
+
def get_cookiejar(ui):
global current_user
if os.name == 'nt':
@@ -69,7 +102,7 @@ cookie_path = re.sub(r'([A-Za-z]):', r'\1:\\', cookie_path)
try:
- cj = MozillaCookieJar(cookie_path)
+ cj = CookieJar(cookie_path)
if not os.path.exists(cookie_path):
cj.save()
if os.name == 'posix':
|
Loading...