|
# Copyright (C) 2009-2011 by Fog Creek Software. All rights reserved.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
from functools import wraps
import hashlib
import os
import urllib2
from flask import Flask, Response, request
from mercurial import hgweb, util, context
from mercurial.error import LockHeld, RepoLookupError
from werkzeug.exceptions import NotFound, BadRequest
import settings
import simplejson
import Image
import cStringIO
from bugzscout import report_exception
from encoders import EmittableEncoder
from formatter import format_diffs, format_file
from repositories import Repository, RepositoryNotSubsetException, CreatesNewHeadsException, filetuple, hexdecode, determinedisplaysize
from webtasks import asyncpost, queue_repo_index, queue_repo_create, queue_repo_strip
import bfiles
import syncstatus
import urlutil
app = Flask(__name__)
def jsonify(obj):
if isinstance(obj, Response) or isinstance(obj, basestring):
return obj
return Response(enc.encode(obj), mimetype='application/json')
def route(url, methods=['GET'], as_json=True):
def wrapper(f):
@app.route(url, methods=methods)
@wraps(f)
def inner(*args, **kwargs):
r = f(*args, **kwargs)
if as_json:
r = jsonify(r)
return r
return inner
return wrapper
def get(url, as_json=True):
return route(url, methods=['GET'], as_json=as_json)
def post(url, as_json=True):
return route(url, methods=['POST'], as_json=as_json)
def delete(url):
return app.route(url, methods=['DELETE'])
def error(message, code):
return {'type': 'error', 'message': message, 'code': code}
enc = EmittableEncoder()
@get('/repo/<uuid>')
def repo_get(uuid=None):
if uuid:
r = Repository(uuid)
if r.exists():
return r
raise NotFound
if not settings.HOSTED:
raise BadRequest
repos = [Repository(folder)
for folder in os.listdir(settings.KILN_REPOSITORY_ROOT)
if Repository(folder).exists()]
for p1 in os.listdir(settings.KILN_REPOSITORY_ROOT):
if len(p1) == 2:
for p2 in os.listdir(os.path.join(settings.KILN_REPOSITORY_ROOT, p1)):
parent = os.path.join(settings.KILN_REPOSITORY_ROOT, p1, p2)
repos.extend(Repository(folder) for folder in os.listdir(parent) if Repository(folder).exists())
return repos
@post('/repo')
def repo_create():
q = request.form
try:
uuid = q['uuid']
pingback = q['pingback']
site = urlutil.siteurl(request)
meta = q.get('meta', None)
parent = q.get('parent', None)
except Exception, e:
raise
return BadRequest(e)
queue_repo_create(uuid, pingback, site, meta=meta, parent=parent)
return 'OK'
@post('/repo/<uuid>')
def update_meta(uuid):
q = request.form
try:
meta = simplejson.loads(q['meta']) if q.get('meta') else {}
except:
raise BadRequest
r = Repository(uuid)
if not r.exists():
raise NotFound
r.meta = meta
return r
@delete('/repo/<uuid>')
def repo_delete(uuid):
# This can only ever be called manually, so it's okay that
# this key is never used on the website side. If we do ever
# add repository purging via heartbeat or whatever, this
# will obviously need to change
if settings.HOSTED:
if request.args.get('magic_word') != settings.WHITE_RABBIT_OBJECT:
raise BadRequest
r = Repository(uuid)
if r.exists():
r.delete()
syncstatus.remove_repo(r)
return Response('', status=204)
raise NotFound
@post('/repo/<uuid>/commit')
def commit(uuid):
q = request.form
author = q['author']
parent = q['parent']
date = q['date']
message = q['message']
path = hexdecode(q['path'])
upload = request.files['file']
if upload.content_length > settings.KILN_MAX_COMMIT_FILE_SIZE:
return error('The uploaded file is too large.', 'too_large')
data = upload.read()
if hasattr(upload, 'close'):
upload.close()
def _writefile(repo, mctx, path):
return context.memfilectx(path, data)
r = Repository(uuid)
if not r.exists():
raise NotFound
repo = r.repo
l = None
try:
l = repo.lock()
except LockHeld:
if l: l.release()
return error('The repository is locked.', 'repo_locked')
try:
try:
ctx = repo[parent]
if ctx.children():
return error('Commit creates new head!', 'not_head')
except RepoLookupError:
raise NotFound
mctx = context.memctx(repo, [parent, None], message, [path], _writefile, user=author, date=date)
mctx.commit()
except Exception, e:
report_exception(e)
raise
finally:
if l: l.release()
return Response('OK')
@post('/repo/stripped')
def strip():
q = request.form
uuid = q['uuid']
parent = q['parent']
pingback = q['pingback']
rev = q['rev']
url = q['url']
ixPerson = q['ixperson']
meta = q.get('meta', '')
parent = q['parent']
if not Repository(parent).exists():
raise NotFound
queue_repo_strip(pingback, uuid, parent, rev, meta, url, ixPerson)
return Response('OK')
@get('/repo/<uuid>/manifest/<rev>')
def manifest(uuid, rev='tip'):
r = Repository(uuid)
if not r.exists():
raise NotFound
if not r.hasrevision(rev):
raise BadRequest
return {'type': 'manifest', 'manifest': r.manifest(rev)}
@get('/repo/<uuid>/size')
def size(uuid):
r = Repository(uuid)
if not r.exists():
# raise NotFound
# Hack around a dumb bug in ourdot's Kiln install
return {'type': 'reposize', 'size': 0}
return {'type': 'reposize', 'size': r.size()}
@get('/repo/<uuid>/commontag')
def common_tags(uuid):
"""
This function takes a list of checkins within a repository and
will return the nearest common child which has a tag.
"""
r = Repository(uuid)
if not 'revs' in request.args or not r.exists():
raise BadRequest
else:
revs = request.args['revs'].split(",");
if not 'num_tags' in request.args:
num_tags = 1
else:
num_tags = int(request.args['num_tags'])
tags = r.commontags(revs, num_tags);
return {'type': 'tags', 'tags': tags};
@post('/repo/<uuid>/tag/<rev>')
def create_tag(uuid, rev='tip'):
r = Repository(uuid)
if not r.exists():
raise NotFound
try:
tag = request.form['tag']
ixPerson = request.form['ixPerson']
url = request.form['url']
username = request.form['username']
except KeyError:
raise BadRequest
force = False
if 'force' in request.form and request.form['force'].lower() != 'false':
force = True
try:
r.tag(rev, tag, url, ixPerson, username, force)
except ValueError:
raise BadRequest
return {'type': 'tag', 'tag': tag, 'rev': rev}
@get('/repo/<uuid>/tag')
def get_tags(uuid):
r = Repository(uuid)
if not r.exists():
raise NotFound
return {'type': 'tags', 'tags': r.tags()}
@get('/repo/<uuid>/changesbetweentags')
def betweentags(uuid):
r = Repository(uuid)
if not r.exists():
raise NotFound
try:
tag1 = request.args["tag1"]
tag2 = request.args["tag2"]
except KeyError:
raise BadRequest
try:
changesetlist = r.changesbetweentags(tag1, tag2, request.args.get('includelow', 'false').lower() == 'true')
return {'type': 'changesets', 'changesets': changesetlist}
except:
raise BadRequest
@post('/repo/meta')
def set_meta():
'''Takes a JSON dictionary of repo uuid => repo metadata, at the key
'meta', and updates the metadata for those repos. Returns a dictionary
of uuid => boolean, with True for repos that were found and False for
repos that do not exist.'''
exists = {}
meta = simplejson.loads(request.form['meta'])
for uuid in meta:
r = Repository(uuid)
if not r.exists():
exists[uuid] = False
continue
exists[uuid] = True
r.meta = simplejson.loads(meta[uuid])
return exists
@get('/repo/<uuid>/file/<rev>/')
@get('/repo/<uuid>/file/<rev>/<path:path>')
def get_file(uuid, path='', rev='tip'):
r = Repository(uuid)
binaries = int(request.args.get('binaries', 0))
images = int(request.args.get('images', 0))
can_truncate = not int(request.args.get('no_truncate', 0))
no_contents = int(request.args.get('no_contents', 0))
path = hexdecode(path)
if not r.exists():
raise NotFound
if not r.hasrevision(rev):
raise BadRequest
if r.hasfile(path, rev):
return filecontents(r, path, rev, binaries, images, can_truncate, no_contents)
else:
return directorylisting(r, path, rev)
@get('/repo/<uuid>/file/<rev1>/<rev2>/<path:path>')
def get_subtracted_image(uuid, path='', rev1='tip', rev2='tip'):
r = Repository(uuid)
path = hexdecode(path)
if not r.exists():
raise NotFound
if not r.hasrevision(rev1) or not r.hasrevision(rev2):
raise BadRequest
if r.hasfile(path, rev1) and r.hasfile(path, rev2):
#open the old and new versions of the image in RGB mode, and resize them so that the largest dimension is 300px.
oldcontents = Image.open(cStringIO.StringIO(r.filecontents(path, rev1, raw=1)))
oldcontents = resizeimage(oldcontents, displaySize=tuple(determinedisplaysize(oldcontents.size, max=(500, 500)))).convert("RGB")
newcontents = resizeimage(Image.open(cStringIO.StringIO(r.filecontents(path, rev2, raw=1))), displaySize=tuple(determinedisplaysize(oldcontents.size, max=(500, 500)))).convert("RGB")
sub = subtractimages(oldcontents, newcontents)
#im = Image.new("RGB", (oldcontents.size[0]*3, oldcontents.size[1]))
#im.paste(oldcontents, (0,0, sub.size[0], sub.size[1]))
#im.paste(sub, (sub.size[0],0, sub.size[0]*2, sub.size[1]))
#im.paste(newcontents, (sub.size[0]*2,0,sub.size[0]*3,sub.size[1]))
im = sub
output = cStringIO.StringIO()
im.save(output, "PNG")
return Response(output.getvalue())
def filecontents(repo, path, rev, binaries, images, can_truncate, no_contents):
truncated = False
ft = filetuple(path)
if repo.isbfile(path) and not binaries:
try:
Image.open(cStringIO.StringIO(repo.filecontents(path, rev, raw=True)))
filetype = 'image'
contents = '(Image file)'
except IOError:
filetype = 'binary'
contents = '(Binary file)'
elif no_contents:
contents = ''
truncated = True
filetype = 'text'
else:
contents = repo.filecontents(path, rev, raw=binaries)
if util.binary(contents):
if not binaries:
try:
Image.open(cStringIO.StringIO(repo.filecontents(path, rev, raw=True)))
filetype = 'image'
contents = '(Image file)'
except IOError:
filetype = 'binary'
contents = '(Binary file)'
else:
filetype = 'text'
truncate_length = 200000
if len(contents) > truncate_length and can_truncate:
truncated = True
contents = contents[:truncate_length]
if binaries:
if images:
try:
imfile = cStringIO.StringIO()
resizeimage(Image.open(cStringIO.StringIO(contents))).save(imfile, "PNG")
contents = imfile.getvalue()
except IOError:
pass
return Response(contents)
else:
return {'type': 'file',
'path': ft['path'],
'bytepath': ft['bytepath'],
'name': ft['name'],
'filetype': filetype,
'truncated': truncated,
'contents': contents,
'formatted_contents': format_file(path, contents) if not truncated else None}
def resizeimage(image, displaySize=None):
if displaySize == None:
displaySize = tuple(determinedisplaysize(image.size))
if image.size == displaySize:
return image
else:
return image.resize(displaySize)
def subtractimages(oldimage, newimage):
im = Image.new("RGB", oldimage.size)
pix = im.load()
npix = newimage.load()
opix = oldimage.load()
for x in xrange(oldimage.size[0]):
for y in xrange(newimage.size[1]):
pix[x, y] = abs(npix[x, y][0] - opix[x, y][0]), abs(npix[x, y][1] - opix[x, y][1]), abs(npix[x, y][2] - opix[x, y][2])
pix[x, y] = leahhighlight(pix[x,y])
return im
def andrewdifference(pix):
pix = f(pix[0]),f(pix[1]),f(pix[2])
return pix
def f(x):
return int(256*pow((float(x)/256),.5))
def leahhighlight(pix):
if pix[0] >= 18 and pix[1] >= 18 and pix[2] >= 18:
pix = 5* pix[0],10* pix[1],5* pix[2]
return pix
def directorylisting(repo, path, rev):
files = repo.directorylisting(path, rev)
if files == None:
raise NotFound
else:
return {'type': 'files', 'files': files}
@get('/repo/<uuid>/annotate/<rev>/<path:path>')
def annotate(uuid, path, rev):
r = Repository(uuid)
path = hexdecode(path)
if not r.exists() or not r.hasfile(path, rev):
raise NotFound
contents = r.filecontents(path, rev)
if util.binary(contents):
return error('Unable to annotate binary files', 'annotate_binary')
if request.args.get('line'):
return linehistory(r, path, rev,
int(request.args['line']), int(request.args.get('count', 4)))
else:
return filehistory(r, path, rev, int(request.args.get('count', 0)))
def linehistory(r, path, rev, line, count):
return {'type': 'changesets', 'changesets': r.annotateline(path, rev, line, count)}
def filehistory(r, path, rev, count):
return {'type': 'annotation', 'annotation': r.annotate(path, rev, count=count)}
@get('/repo/<uuid>/branches')
def branches(uuid):
r = Repository(uuid)
if not r.exists():
raise NotFound
return r.branches()
@post('/repo/<uuid>/changeset') # For many changesets, e.g. reviews.
@get('/repo/<uuid>/changeset/<revs>')
@get('/repo/<uuid>/changeset/<revs>/<filename>')
def changesets(uuid, revs=None, filename=None):
r = Repository(uuid)
if not r.exists():
raise NotFound
if request.method == 'POST':
revs = request.form['revs']
filename = request.form.get('filename', None)
if filename:
filename = hexdecode(filename)
changedfiles = request.values.get('changedfiles')
revs = revs.split(':')
try:
if len(revs) == 1:
# did you instead give us an enumeration of individual changesets?
revs = revs[0].split(',')
if len(revs) == 1:
# Only one changeset, allow for file changesets
if filename:
return dict(r.filechangeset(filename, revs[0]), type='filechangeset')
else:
return dict(r.changeset(revs[0], changedfiles), type='changeset')
else:
# multiple changesets
return {'type': 'changesets',
'changesets': r.changesets(revs, changedfiles)}
elif len(revs) == 2:
if filename:
limit = int(request.values.get('limit', 0))
return {'type': 'filechangesets',
'filechangesets': r.filechangesets(filename, revs[0], revs[1], limit)}
else:
return {'type': 'changesets',
'changesets': r.changesetrange(revs[0], revs[1], changedfiles)}
except:
raise BadRequest
@get('/repo/<uuid>/diff/<revs>')
@get('/repo/<uuid>/diff/<revs>/<filename>')
def diff(uuid, revs, filename=None):
r = Repository(uuid)
if filename:
filename = hexdecode(filename)
if not r.exists():
raise NotFound
revs = revs.split(':')
for rev in revs:
if not r.hasrevision(rev):
raise BadRequest
# Set maxsize to 80 kB or as requested, unless it's a single file,
# in which case serve 200kb. The value of 80 kB was
# lovingly determined by trial and error. If you change it,
# please remember at least to do the former.
maxsize = int(request.args.get('maxsize') or (200 if filename else 80) * 1000)
ignorews = request.args.get('ignorews', 'False').lower() == 'true'
opts = dict(filename=filename, maxsize=maxsize, ignorews=ignorews)
if len(revs) > 1:
opts['rev2'] = revs[1]
udiff, bytecount = r.diff(revs[0], **opts)
format_diffs(udiff)
if filename:
if udiff:
return udiff[0]
return {'type': 'diff'}
else:
return {'type': 'diffs',
'truncated': bytecount - maxsize > 0,
'diffs': udiff}
@get('/repo/<uuid>/outgoing/<uuid2>')
def outgoing_get(uuid, uuid2):
r1 = Repository(uuid)
r2 = Repository(uuid2)
nochangesets = int(request.args.get('nochangesets', 0))
if not r1.exists():
raise NotFound
if not r2.exists():
raise BadRequest
if not r1.isrelated(r2):
return error('repositories are not related', 'notrelated')
return {'type': 'outgoing', 'newheads': r1.pushwouldmakeheads(r2), 'changesets': [] if nochangesets else r1.outgoing(r2)}
@post('/repo/<uuid>/outgoing/<uuid2>')
def push_repo(uuid, uuid2):
r1 = Repository(uuid)
r2 = Repository(uuid2)
ixPerson = request.form['ixPerson']
url = request.form['website']
if not r1.exists():
raise NotFound
if not r2.exists():
raise BadRequest
if not r1.isrelated(r2):
return error('repositories are not related', 'notrelated')
if not r1.outgoing(r2):
return error("repositories were already sync'd", 'alreadysyncd')
try:
return {'type': 'push', 'success': r1.push(r2, url, pusher=ixPerson)}
except RepositoryNotSubsetException, e:
return error(str(e), 'notstrictsubset')
except CreatesNewHeadsException, e:
return error(str(e), 'newheads')
@post('/sync')
def sync():
if not settings.HOSTED:
raise BadRequest
remote = request.form["remote"]
if 'repo' not in request.form:
# We don't have a specific repo, so we'll trigger a sync to every repo that needs it.
repos = syncstatus.need_sync(remote)
for repo in repos:
asyncpost(request.base_url, dict(remote=remote, repo=repo))
return dict(type='sync', success=True, count=len(repos))
resp = urllib2.urlopen(urlutil.urljoin(remote, "repo/%s" % request.form['repo']))
repo = simplejson.loads(resp.read())
failures = []
relink = False
r = Repository(repo['uuid'], suppresshooks=True)
if not r.exists():
r.create(repo['meta'])
relink = True
r.meta = repo['meta']
if 'bfile' in request.form:
try:
sha = request.form['bfile']
if bfiles.ishash(sha) and not bfiles.instore(sha):
resp = urllib2.urlopen(urlutil.urljoin(remote, 'repo', r.uuid, 'bfile', sha))
bfiles.storebfile(resp, sha)
except Exception, e:
failures.append({'repo': repo['uuid'], 'exception': e})
report_exception(e)
else:
remoteurl = urlutil.urljoin(remote, 'repo', r.uuid)
try:
r.pull(remoteurl)
if settings.DO_INDEXING:
queue_repo_index(repo['uuid'])
if settings.HOSTED:
syncstatus.update_status(r)
if relink:
r.relink()
# Chain the sync along
r.sync(site=urlutil.siteurl(request), peers=dict(r.ui.configitems('post_peers')))
except LockHeld, e:
# No need to report locked repos. They're expected.
failures.append({'repo': repo['uuid'], 'exception': e})
except Exception, e:
failures.append({'repo': repo['uuid'], 'exception': e})
report_exception(e, "uuid=%s, r.repo['tip'].rev()=%s, request.form=%s\n"
% (repo['uuid'], str(r.repo['tip'].rev()), str(request.form)))
d = {'type': 'sync', 'success': not failures}
if failures:
d['failures'] = failures
return d
@get('/version')
def version():
return {'version': settings.KILN_BACKEND_VERSION, 'hg_version': util.version()}
@app.route('/repo/<uuid>/bfile', methods=['GET', 'POST'])
@app.route('/repo/<uuid>/bfile/<sha>', methods=['GET', 'POST'])
def bfilehandle(uuid, sha=None):
repo = Repository(uuid)
if not sha:
if request.method == 'GET':
return Response(simplejson.dumps(bfiles.listbfiles()))
else:
raise BadRequest
if request.method == 'GET':
try:
return Response(bfiles.bfilecontents(sha))
except IOError:
raise NotFound
# bfiles uses PUT to upload files but django read the entire file into memory
# use POST instead so that we can access the file with a generator
# NOTE: This may no longer be necessary with flask, but it's the way it works
# so there's no reason to change it back right now.
elif request.method == 'POST':
try:
if bfiles.instore(sha):
return Response(status=200)
elif bfiles.storebfile(request.files['name'], sha):
try:
repo.sync(site=urlutil.siteurl(request),
bfile=sha,
peers=dict(repo.ui.configitems('peers')))
finally:
return Response(status=201)
else:
#SHA1 is checked by storebfile
raise BadRequest('SHA1 of file does not match SHA1 given.')
except Exception, e:
report_exception(e)
raise BadRequest
elif request.method == 'HEAD':
if bfiles.instore(sha):
m = hashlib.sha1()
with bfiles.bfilecontents(sha) as fd:
while True:
data = fd.read(32768)
if not data:
break
m.update(data)
response = Response()
response.headers['Content-SHA1'] = m.hexdigest()
return response
else:
raise NotFound
else:
raise BadRequest
@app.route('/repo/<uuid>/serve', methods=['GET', 'POST'])
def serve(uuid):
r = Repository(uuid, suppressoutput=False)
if not r.exists():
raise NotFound
repo = r.repo
if 'ixPerson' in request.args:
repo.ui.setconfig('kiln', 'ixperson', request.args['ixPerson'])
repo.ui.setconfig('kiln', 'url', request.args['website'])
repo.ui.setconfig('kiln', 'site', urlutil.siteurl(request))
repo.ui.setconfig('kiln', 'token', request.args.get('token', ''))
# if we're about to push, run recover. Don't do this for pull,
# because it locks the repo (even if only for a second), and it's
# obviously better if we don't have to wait for a push to finish
# to pull
if request.args['cmd'] == 'unbundle':
r.recover()
request.environ['REPO_NAME'] = request.environ['PATH_INFO'].strip('/')
return hgweb.hgweb(repo.root, baseui=repo.ui)
@get('/repo/<uuid>/heads')
def get_heads(uuid):
r = Repository(uuid)
if not r.exists():
raise NotFound
def _revtuple(rev):
"Return a (rev num, rev id) tuple from a changeset context"
return (rev.rev(), rev.hex())
return {'heads': [_revtuple(r.repo[head]) for head in r.repo.heads()]}
|
Loading...