Changeset c5ec70d1ed42…
Parent fca1d86acdc9…
by Benjamin Pollack <benjamin@fogcreek.com>
Changes to 42 files · Browse files at c5ec70d1ed42 Showing diff from parent fca1d86acdc9 Diff from another changeset...
|
@@ -0,0 +1,18 @@ + syntax: glob
+*.pyc
+*.pyo
+*.swp
+*.db
+*.sqlite3
+*.orig
+kiln/dist/*
+TAGS
+\#*\#
+local_settings.py
+kiln/build/*
+installer/Output
+out.txt
+*~
+_ReSharper.*
+obj
+bin
|
@@ -1,6 +1,6 @@ OVERVIEW
-This is a stand-alone server for Mercurial repositories, that provides
+This is a stand-alone server for Mercurial repositories that provides
Mercurial data in the form of JSON requests. This allows for much
more efficient polling of repository data from long-running
applications, such as websites, IDEs, and so on.
|
@@ -1,5 +1,13 @@ param([string] $repopath = "..")
+function Get-Batchfile ($file) {
+ $cmd = "`"$file`" & set"
+ cmd /c $cmd | Foreach-Object {
+ $p, $v = $_.split('=')
+ Set-Item -path env:$p -value $v
+ }
+}
+
function Get-ScriptDirectory
{
$Invocation = (Get-Variable MyInvocation -Scope 1).Value
@@ -10,9 +18,18 @@
pushd $path
pushd kiln
+if (test-path 'c:\pythonve\kiln25')
+{
+ Get-Batchfile('c:\pythonve\kiln25\scripts\activate.bat')
+}
python setup.py py2exe
+if (test-path 'c:\pythonve\kiln25')
+{
+ Get-Batchfile('c:\pythonve\kiln25\scripts\deactivate.bat')
+}
hg -R $repopath archive -t zip dist\source.zip
popd
+c:\Windows\Microsoft.NET\Framework\v3.5\msbuild.exe /p:Configuration=Release installer\RepoDirectoryMigrator\RepoDirectoryMigrator.sln
$iscc = "C:\Program Files (x86)\Inno Setup 5\ISCC.exe"
if (-not (Test-Path $iscc))
{
|
|
@@ -1,5 +1,5 @@ #define MyAppName "Kiln Storage Service"
-#define MyAppVerName "Kiln Storage Service 1.0"
+#define MyAppVerName "Kiln Storage Service 2.5"
#define MyAppPublisher "Fog Creek Software"
#define MyAppURL "http://www.fogcreek.com/kiln/"
@@ -33,16 +33,21 @@WelcomeLabel2=This will install [name/ver] on your computer.
[Files]
+
+Source: RepoDirectoryMigrator\RepoDirectoryMigrator\bin\x86\Release\RepoDirectoryMigrator.exe; DestDir: {tmp}; Flags: ignoreversion
+Source: ctags.exe; DestDir: {app}; Flags: ignoreversion
Source: ..\kiln\dist\library.zip; DestDir: {app}; Flags: ignoreversion
-Source: ..\kiln\dist\w9xpopen.exe; DestDir: {app}; Flags: ignoreversion
Source: ..\kiln\dist\backend.exe; DestDir: {app}; Flags: ignoreversion
+Source: ..\kiln\redis-server.exe; DestDir: {app}; Flags: ignoreversion
Source: ..\kiln\dist\source.zip; DestDir: {app}; Flags: ignoreversion
+Source: ..\kiln\dist\opengrok.jar; DestDir: {app}\opengrok; Flags: ignoreversion
+Source: ..\kiln\dist\lib\*; DestDir: {app}\opengrok\lib; Flags: recursesubdirs replacesameversion; Excludes: .hg*,*~
+Source: ..\kiln\client.crt; DestDir: {app}; Flags: ignoreversion
+Source: ..\kiln\client.key; DestDir: {app}; Flags: ignoreversion
[Icons]
Name: {group}\{cm:UninstallProgram,{#MyAppName}}; Filename: {uninstallexe}
-[Run]
-Filename: {app}\backend.exe; Parameters: --startup auto install; StatusMsg: Registering Kiln Storage Service; Flags: runhidden
[UninstallRun]
Filename: {app}\backend.exe; Parameters: stop; StatusMsg: Stopping Kiln Storage Service; Flags: runhidden
Filename: {app}\backend.exe; Parameters: remove; StatusMsg: Removing Kiln Storage Service; Flags: runhidden
@@ -56,12 +61,40 @@ StorageLocation: String;
Port: Cardinal;
+ JavaVersion: String;
+
+ StoppedOldService: Boolean;
+
const
REG_KEY = 'Software\Fog Creek Software\Kiln';
+ OG_KEY = 'Software\Fog Creek Software\Kiln\OpenGrok';
+ DAEMON_KEY = 'Software\Fog Creek Software\Kiln\Daemon';
+ JAR = 'Jar';
BACKEND_IP = 'KilnBackendIP';
BACKEND_PORT = 'KilnBackendPort';
REPOSITORY_ROOT = 'KilnRepositoryRoot';
DELIBERATELY_PUBLIC = 'KilnDeliberatelyPublic';
+ MINIREDIS_DB = 'MiniredisDB';
+ DATA_DIR = 'DataDir';
+
+ INDEX_THREADS = 'IndexThreads';
+ QUEUE_THREADS = 'QueueThreads';
+ NINDEX_THREADS = 1;
+ NQUEUE_THREADS = 1;
+
+ DAEMON_HOST = 'host';
+ DAEMON_PORT = 'port';
+ DAEMON_DB = 'db';
+ DAEMON_SSL_KEY = 'ssl_key';
+ DAEMON_SSL_CERT = 'ssl_cert';
+
+ JAVA_KEY = 'Software\JavaSoft\Java Runtime Environment';
+ JAVA_VERSION = 'CurrentVersion';
+
+ JAVA = 'Java';
+ CONFIG_UPDATE = 'ConfigUpdate';
+ JAVA_HOME = 'JavaHome';
+ CTAGS = 'CTags';
procedure InitializeWizard;
var
@@ -72,6 +105,7 @@ nextPageParent: Integer;
param: String;
begin
+ StoppedOldService := False;
LocalOnly := False;
for idx := 0 to ParamCount do
begin
@@ -133,8 +167,12 @@procedure FinishInstall;
var
ip: String;
+ ogStorageLocation: String;
+ MiniredisDBLocation: String;
ResultCode: Integer;
deliberatelyPublic: Cardinal;
+ JavaLoc: String;
+ ret: Boolean;
begin
if (CompareStr(StorageLocation, '') = 0) then StorageLocation := StorageLocationPage.Values[0];
if Port = 0 then Port := StrToInt(PortNumberPage.Values[0]);
@@ -149,25 +187,70 @@ deliberatelyPublic := 1;
end;
+ ogStorageLocation := StorageLocation + '\opengrokdata';
+ MiniredisDBLocation := StorageLocation + '\miniredis.db';
if not DirExists(StorageLocation) then CreateDir(StorageLocation);
+ if not DirExists(ogStorageLocation) then CreateDir(ogStorageLocation);
+
+ if IsWin64 then ret := RegQueryStringValue(HKLM64, JAVA_KEY + '\' + JavaVersion, JAVA_HOME, JavaLoc)
+ else ret := RegQueryStringValue(HKEY_LOCAL_MACHINE, JAVA_KEY + '\' + JavaVersion, JAVA_HOME, JavaLoc);
+
+ JavaLoc := JavaLoc + '\bin\java.exe'
RegWriteStringValue(HKEY_LOCAL_MACHINE, REG_KEY, REPOSITORY_ROOT, StorageLocation);
RegWriteDWordValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_PORT, Port);
RegWriteStringValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_IP, ip);
RegWriteDWordValue(HKEY_LOCAL_MACHINE, REG_KEY, DELIBERATELY_PUBLIC, deliberatelyPublic);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, REG_KEY, MINIREDIS_DB, MiniredisDBLocation);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, JAR, ExpandConstant('{app}\opengrok\opengrok.jar'));
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, DATA_DIR, ogStorageLocation);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, CONFIG_UPDATE, 'localhost:2424');
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, JAVA, JavaLoc);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, CTAGS, ExpandConstant('{app}\ctags.exe'));
+
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_HOST, 'localhost');
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_PORT, Port + 1);
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_DB, 0);
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, INDEX_THREADS, NINDEX_THREADS);
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, QUEUE_THREADS, NQUEUE_THREADS);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_KEY, ExpandConstant('{app}\client.key'));
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_CERT, ExpandConstant('{app}\client.crt'));
+
+ if Exec(ExpandConstant('{tmp}\RepoDirectoryMigrator.exe'), '', '', SW_HIDE, ewWaitUntilTerminated, ResultCode) then begin
+ if ResultCode <> 0 then RaiseException('Failed to migrate repositories to new directory structure!');
+ end;
+
+ if Exec(ExpandConstant('{app}\backend.exe'), '--startup auto install', '', SW_HIDE, ewWaitUntilTerminated, ResultCode) then begin
+ if ResultCode <> 0 then RaiseException('Failed to install service!');
+ end;
if Exec(ExpandConstant('{app}\backend.exe'), 'start', '', SW_HIDE, ewWaitUntilTerminated, ResultCode) then begin
if ResultCode <> 0 then RaiseException('Failed to start service!');
end;
end;
+procedure DeinitializeSetup();
+var
+ BackendPath: String;
+ ResultCode: Integer;
+begin
+ if StoppedOldService then begin
+ BackendPath := ExpandConstant('{app}\backend.exe');
+ Exec(BackendPath, 'start', '', SW_HIDE, ewNoWait, ResultCode);
+ end;
+end;
+
procedure HaltBackend;
var
BackendPath: String;
ResultCode: Integer;
begin
BackendPath := ExpandConstant('{app}\backend.exe');
- if FileExists(BackendPath) then Exec(BackendPath, 'stop', '', SW_HIDE, ewWaitUntilTerminated, ResultCode);
+ if FileExists(BackendPath) then begin
+ StoppedOldService := True;
+ Exec(BackendPath, 'stop', '', SW_HIDE, ewWaitUntilTerminated, ResultCode);
+ end;
+ Sleep(3000)
end;
procedure CurStepChanged(CurStep: TSetupStep);
@@ -183,5 +266,40 @@ RegDeleteValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_PORT);
RegDeleteValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_IP);
RegDeleteValue(HKEY_LOCAL_MACHINE, REG_KEY, DELIBERATELY_PUBLIC);
+
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, JAR);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, DATA_DIR);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, CONFIG_UPDATE);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, JAVA);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, CTAGS);
+
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_HOST);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_PORT);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_DB);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, INDEX_THREADS);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, QUEUE_THREADS);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_KEY);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_CERT);
end;
end;
+
+function NextButtonClick(CurPageID: Integer) : Boolean;
+var
+ version: String;
+ ret: Boolean;
+begin
+ if CurPageID = wpWelcome then
+ begin
+ if IsWin64 then ret := RegQueryStringValue(HKLM64, JAVA_KEY, JAVA_VERSION, version)
+ else ret := RegQueryStringValue(HKEY_LOCAL_MACHINE, JAVA_KEY, JAVA_VERSION, version);
+
+ if ret then JavaVersion := version
+ else
+ begin
+ MsgBox('The Kiln Storage Service requires the Java Runtime Environment (JRE) be installed. Please install the JRE for your platform from the Oracle website.',
+ mbInformation, MB_OK);
+ Abort();
+ end;
+ end;
+ Result := True
+end;
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,48 @@ + <Configuration>
+ <SettingsComponent>
+ <string />
+ <integer />
+ <boolean>
+ <setting name="SolutionAnalysisEnabled">False</setting>
+ </boolean>
+ </SettingsComponent>
+ <RecentFiles>
+ <RecentFiles>
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="398" fromTop="14" />
+ </RecentFiles>
+ <RecentEdits>
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="92" fromTop="3" />
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="228" fromTop="9" />
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="366" fromTop="14" />
+ </RecentEdits>
+ </RecentFiles>
+ <NAntValidationSettings>
+ <NAntPath value="" />
+ </NAntValidationSettings>
+ <UnitTestRunner>
+ <Providers />
+ </UnitTestRunner>
+ <UnitTestRunnerNUnit>
+ <NUnitInstallDir IsNull="False">
+ </NUnitInstallDir>
+ <UseAddins>Never</UseAddins>
+ </UnitTestRunnerNUnit>
+ <CompletionStatisticsManager>
+ <ItemStatistics item="Default">
+ <Item value="using" priority="0" />
+ <Item value="Microsoft" priority="0" />
+ <Item value="Win32" priority="0" />
+ <Item value="var" priority="2" />
+ <Item value="Registry`0" priority="0" />
+ <Item value="rk" priority="0" />
+ <Item value="const" priority="1" />
+ <Item value="string" priority="0" />
+ <Item value="RegistryKey`0" priority="0" />
+ <Item value="Environment`0" priority="0" />
+ <Item value="root" priority="0" />
+ </ItemStatistics>
+ <ItemStatistics item="Qualified:Microsoft.Win32.RegistryKey">
+ <Item value="GetValue`0" priority="1" />
+ </ItemStatistics>
+ </CompletionStatisticsManager>
+</Configuration>
\ No newline at end of file |
|
@@ -0,0 +1,20 @@ + 
+Microsoft Visual Studio Solution File, Format Version 10.00
+# Visual Studio 2008
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RepoDirectoryMigrator", "RepoDirectoryMigrator\RepoDirectoryMigrator.csproj", "{AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|x86 = Debug|x86
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Debug|x86.ActiveCfg = Debug|x86
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Debug|x86.Build.0 = Debug|x86
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Release|x86.ActiveCfg = Release|x86
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Release|x86.Build.0 = Release|x86
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
|
|
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,30 @@ + using System;
+using System.IO;
+using Microsoft.Win32;
+
+namespace RepoDirectoryMigrator
+{
+ class Program
+ {
+ static void Main(string[] args)
+ {
+ var repoRoot = (string)Registry.GetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Fog Creek Software\Kiln", "KilnRepositoryRoot", null);
+ if (string.IsNullOrEmpty(repoRoot))
+ {
+ Console.Error.WriteLine("KEY NOT FOUND!");
+ Environment.Exit(1);
+ }
+ var repositories = Directory.GetDirectories(repoRoot, "????????-????-????-????-????????????");
+ foreach (var path in repositories)
+ {
+ var repo = Path.GetFileName(path);
+ var part1 = Path.Combine(repoRoot, repo.Substring(0, 2));
+ var part2 = Path.Combine(part1, repo.Substring(2, 2));
+ Directory.CreateDirectory(part1);
+ Directory.CreateDirectory(part2);
+ Directory.Move(path, Path.Combine(part2, repo));
+ }
+ Console.Error.WriteLine("SUCCESS!");
+ }
+ }
+}
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,36 @@ + using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("RepoDirectoryMigrator")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Microsoft")]
+[assembly: AssemblyProduct("RepoDirectoryMigrator")]
+[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("47cb10cb-cc59-438e-b866-e7b6eebcbab0")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,66 @@ + <?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="3.5" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+ <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+ <ProductVersion>9.0.30729</ProductVersion>
+ <SchemaVersion>2.0</SchemaVersion>
+ <ProjectGuid>{AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}</ProjectGuid>
+ <OutputType>Exe</OutputType>
+ <AppDesignerFolder>Properties</AppDesignerFolder>
+ <RootNamespace>RepoDirectoryMigrator</RootNamespace>
+ <AssemblyName>RepoDirectoryMigrator</AssemblyName>
+ <TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
+ <FileAlignment>512</FileAlignment>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+ <DebugSymbols>true</DebugSymbols>
+ <DebugType>full</DebugType>
+ <Optimize>false</Optimize>
+ <OutputPath>bin\Debug\</OutputPath>
+ <DefineConstants>DEBUG;TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+ <DebugType>pdbonly</DebugType>
+ <Optimize>true</Optimize>
+ <OutputPath>bin\Release\</OutputPath>
+ <DefineConstants>TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x86' ">
+ <DebugSymbols>true</DebugSymbols>
+ <OutputPath>bin\x86\Debug\</OutputPath>
+ <DefineConstants>DEBUG;TRACE</DefineConstants>
+ <DebugType>full</DebugType>
+ <PlatformTarget>x86</PlatformTarget>
+ <ErrorReport>prompt</ErrorReport>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x86' ">
+ <OutputPath>bin\x86\Release\</OutputPath>
+ <DefineConstants>TRACE</DefineConstants>
+ <Optimize>true</Optimize>
+ <DebugType>pdbonly</DebugType>
+ <PlatformTarget>x86</PlatformTarget>
+ <ErrorReport>prompt</ErrorReport>
+ </PropertyGroup>
+ <ItemGroup>
+ <Reference Include="System" />
+ <Reference Include="System.Data" />
+ <Reference Include="System.Xml" />
+ </ItemGroup>
+ <ItemGroup>
+ <Compile Include="Program.cs" />
+ <Compile Include="Properties\AssemblyInfo.cs" />
+ </ItemGroup>
+ <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+ <!-- To modify your build process, add your task inside one of the targets below and uncomment it.
+ Other similar extension points exist, see Microsoft.Common.targets.
+ <Target Name="BeforeBuild">
+ </Target>
+ <Target Name="AfterBuild">
+ </Target>
+ -->
+</Project>
\ No newline at end of file |
|
|
@@ -1,43 +1,20 @@ #!/usr/bin/env python
+import site
+site.addsitedir('/home/kiln/virtualenv/kiln25/lib/python2.6/site-packages')
+
import os
import sys
-import urllib
-import urllib2
-
-from django.core.handlers.wsgi import WSGIHandler
OUR_ROOT = os.path.abspath(os.path.dirname(__file__))
os.environ['HGENCODING'] = 'utf8'
+os.environ['TEMP'] = '/home/kiln/data/tmp'
paths = (OUR_ROOT, os.path.join(OUR_ROOT, 'kiln'))
for path in paths:
if path not in sys.path:
sys.path.append(path)
- os.environ['DJANGO_SETTINGS_MODULE'] = 'kiln.settings'
-class KilnWSGIHandler(WSGIHandler):
- def report_exception(self, e):
- def get_stack_trace():
- import traceback
- return '\n'.join(traceback.format_exception(*sys.exc_info()))
+from kiln.api import handlers
+from kiln.versionmiddleware import VersionMiddleware
+from kiln.errorloggingmiddleware import ErrorLoggingMiddleware
- bug = {'ScoutUserName': 'BugzScout',
- 'ScoutProject': 'Kiln',
- 'ScoutArea': 'Backend',
- 'Description': str(e),
- 'Extra': get_stack_trace()}
-
- try:
- urllib2.urlopen('http://our.fogbugz.com/scoutSubmit.asp', urllib.urlencode(bug))
- except:
- pass
-
- def __call__(self, environ, start_response):
- if 'kiln.tempdir' in environ:
- os.environ['TMPDIR'] = environ['kiln.tempdir']
- try:
- return super(KilnWSGIHandler, self).__call__(environ, start_response)
- except Exception, e:
- self.report_exception(e)
- raise
-
-application = KilnWSGIHandler()
+application = ErrorLoggingMiddleware(VersionMiddleware(handlers.app))
|
|
@@ -0,0 +1,12 @@ + # legacy imports
+from redis.client import Redis, ConnectionPool
+from redis.exceptions import RedisError, ConnectionError, AuthenticationError
+from redis.exceptions import ResponseError, InvalidResponse, InvalidData
+
+__version__ = '2.0.0'
+
+__all__ = [
+ 'Redis', 'ConnectionPool',
+ 'RedisError', 'ConnectionError', 'ResponseError', 'AuthenticationError'
+ 'InvalidResponse', 'InvalidData',
+ ]
|
|
@@ -46,11 +46,15 @@ import string
import os
import shutil
-from mercurial import commands, extensions, util, bdiff
+from mercurial import bdiff, commands, extensions, store, util
from mercurial.context import filectx
from mercurial.node import nullrev
from mercurial.i18n import _
-from mercurial.store import hybridencode
+
+CACHEPATH = 'annotations/'
+
+def hybridencode(f):
+ return store._hybridencode(f, lambda path: store._auxencode(path, True))
class annotationcache(object):
''' Provides access to the cache of file annotations.
@@ -61,7 +65,7 @@ access. A cache file is line-oriented where each line is an
n-tuple of strings separated by the separator character ':'.
- If the file has any ancestor with a different name, then we
+ If the file has any ancestor with a different name, then we
append .f or .n depending on whether or not we followed the
annotation history to these ancestors. Otherwise a generic
cache is created which works for either case.
@@ -70,8 +74,8 @@ def __init__(self, repo, follow = True):
''' Create a new annotations cache for the given repository '''
self.followflag = follow and 'f' or 'n'
+ self._opener = repo.opener
self.cachepath = repo.join("annotations")
- self.opener = util.opener(self.cachepath)
self.sepchar = ':'
# fdcache caches information about existing files:
@@ -81,6 +85,9 @@ # fdcache[path] does not exist if the file state is unknown
self.fdcache = {}
+ def opener(self, path, *args, **kwargs):
+ return self._opener(CACHEPATH + path, *args, **kwargs)
+
def makepath(self, filectx):
''' Computes the path to the cache for the given file revision. '''
relpath = os.path.join('data', filectx.path())
|
|
@@ -0,0 +1,44 @@ + # Copyright (C) 2008-2010 Fog Creek Software. All rights reserved.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2, incorporated herein by reference.
+
+from mercurial import ui
+import traceback
+from bugzscout import report_error
+
+class emptyui(ui.ui):
+ def __init__(self, src=None, suppressoutput=True):
+ super(emptyui, self).__init__(src)
+ if isinstance(src, emptyui):
+ self.suppressoutput = src.suppressoutput
+ else:
+ self.suppressoutput = suppressoutput
+
+ if self.suppressoutput:
+ self.pushbuffer()
+
+ # Wrap the ui's write functions because writing to stdout causes an exception.
+ # Save the output using a buffer and create a bug from it later (essentially
+ # catch the error then report it).
+ def write_err(self, *args, **opts):
+ return self.write(*args, **opts)
+
+ def write(self, *args, **opts):
+ super(emptyui, self).write(*args, **opts)
+ if self.suppressoutput:
+ if len(self._buffers) == 1:
+ super(emptyui, self).write('\n'.join(traceback.format_stack()) + '\n')
+
+ def __del__(self):
+ if self.suppressoutput:
+ buffer = self.popbuffer()
+ if buffer:
+ report_error('Mercurial output error.', buffer)
+ try:
+ super(emptyui, self).__del__()
+ except AttributeError:
+ pass
+
+ def readconfig(self, *args, **kwargs):
+ pass
|
|
@@ -3,6 +3,8 @@ # This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
+import difflib
+import re
from pygments import highlight
from pygments.lexers import get_lexer_for_filename, guess_lexer_for_filename, TextLexer
from pygments.formatters import HtmlFormatter
@@ -13,8 +15,14 @@ 'vbs': 'vb',
'fbp5': 'xml',
'xul': 'xml',
+ 'ipp': 'cpp',
'jsm': 'js'}
+LINE_MAX = 20000
+
+def ensurenewline(s):
+ return s if s.endswith('\n') else s + '\n'
+
def tweak(filename):
"""change filename to a known extension, if applicable"""
(filename, extension) = filename.split('/')[-1].rsplit('.', 1)
@@ -25,50 +33,174 @@ """select an appropriate lexer based on the filename"""
try:
if content:
- return guess_lexer_for_filename(tweak(filename), content, stripnl=False)
+ l = guess_lexer_for_filename(tweak(filename), content, stripnl=False)
else:
- return get_lexer_for_filename(tweak(filename), stripnl=False)
+ l = get_lexer_for_filename(tweak(filename), stripnl=False)
except:
- return TextLexer(stripnl=False)
+ l = TextLexer(stripnl=False)
+ l.add_filter('whitespace', spaces=True, wstokentype=False)
+ return l
-def highlighted(lex, code):
- return highlight(code, lex, HtmlFormatter(nowrap=True))
+class IntralineHtmlFormatter(HtmlFormatter):
+ in_change = False
+ ranges = []
+
+ def __init__(self, ranges=None, *args, **kw):
+ if ranges:
+ self.ranges = ranges
+ HtmlFormatter.__init__(self, *args, **kw)
+
+ def _split_change_markers(self, tokensource):
+ '''Pre-process the token stream before it is formatted, to mark the tokens that should be highlighted for intraline diffs.'''
+ ranges = self.ranges or []
+ pos = 0
+ for ttype, value in tokensource:
+ for value in value.splitlines(True):
+ l = len(value)
+ range = None
+ rr = [r for r in ranges if (r[0] <= pos <= r[1]) or (pos <= r[0] <= r[1] <= pos + l) or (r[0] <= pos + l <= r[1])]
+ if not rr:
+ yield ttype, value
+ pos += l
+ continue
+ last = None
+ for r in rr:
+ if r[0] <= pos:
+ # r starts at or before token
+ if r[1] <= pos + l:
+ # range covers prefix of token
+ self.in_change = True
+ i = r[1] - pos
+ yield ttype, value[:i]
+ self.in_change = False
+ else:
+ # range covers whole token
+ self.in_change = True
+ yield ttype, value
+ self.in_change = False
+ else:
+ # r starts in the middle of the token
+ i = last[1] - pos if last else 0
+ j = r[0] - pos
+ yield ttype, value[i:j]
+ if r[1] <= pos + l:
+ # range covers middle chunk
+ self.in_change = True
+ i = r[0] - pos
+ j = r[1] - pos
+ yield ttype, value[i:j]
+ self.in_change = False
+ else:
+ # range covers suffix of token
+ self.in_change = True
+ i = r[0] - pos
+ yield ttype, value[i:]
+ self.in_change = False
+ last = r
+ if last[1] <= pos + l:
+ i = last[1] - pos
+ yield ttype, value[i:]
+ pos += l
+
+ def _format_lines(self, tokensource):
+ return super(IntralineHtmlFormatter, self)._format_lines(self._split_change_markers(tokensource))
+
+ def _get_css_class(self, ttype):
+ return super(IntralineHtmlFormatter, self)._get_css_class(ttype) + (' ch' if self.in_change else '')
+
+def highlighted(lex, code, ranges=None):
+ return highlight(code, lex, IntralineHtmlFormatter(ranges, nowrap=True))
+
+def highlight_patch(lex, lines, ranges=None):
+ lines = [(line[0], ensurenewline(line[1:LINE_MAX])) for line in lines]
+ for x in xrange(0, len(lines)):
+ if lines[x][0] == '\\':
+ lines[x] = (lines[x][0], '\n')
+ patch = ''.join(l[1] for l in lines)
+ patch = highlighted(lex, patch, ranges).splitlines(True)
+ for x in xrange(0, min(len(patch), len(lines))):
+ if lines[x][0] == '\\':
+ lines[x] = (lines[x][0], ' No newline at end of file\n')
+ else:
+ lines[x] = (lines[x][0], patch[x])
+ return ''.join(line[0] + line[1] for line in lines)
+
+# returns a list of ranges (a, b), marking that characters a:b in the patch are changed.
+def intraline_diff(patch):
+ removed_lines = []
+ added_lines = []
+ ranges = []
+ l = 0
+
+ for line in patch + [' ']:
+ if line[0] == '-':
+ removed_lines.append(line[1:])
+ elif line[0] == '+':
+ added_lines.append(line[1:])
+ else:
+ if added_lines or removed_lines:
+ rtotal = sum(len(s) for s in removed_lines)
+ atotal = sum(len(s) for s in added_lines)
+
+ # split the diff text into whole words and individual non-word characters
+ removed_words = [w for w in re.split(r'(\w+|\W)', ''.join(removed_lines)) if w]
+ added_words = [w for w in re.split(r'(\w+|\W)', ''.join(added_lines)) if w]
+ removed, added = l, l + rtotal
+ seq = difflib.SequenceMatcher();
+ seq.set_seqs(removed_words, added_words)
+
+ # find the matching words of each string, using the ranges in each opcode.
+ # 'equal' action is for non-changed text; otherwise, mark the range as changed.
+ for (action, r1, r2, a1, a2) in seq.get_opcodes():
+ ac = ''.join(added_words[a1:a2])
+ rc = ''.join(removed_words[r1:r2])
+ a = len(ac)
+ r = len(rc)
+ added += a
+ removed += r
+ if action == 'equal':
+ continue
+ if a != 0:
+ ranges.append((added - a, added))
+ if r != 0:
+ ranges.append((removed - r, removed))
+
+ l += atotal + rtotal
+ removed_lines = []
+ added_lines = []
+ l += len(line) - 1
+ return sorted(ranges)
+
+def format(filename, diff):
+ if not diff:
+ return None
+ formatted = []
+ patch = []
+
+ if not isinstance(diff, unicode):
+ diff_asc = diff
+ else:
+ diff_asc = diff.encode('utf-8')
+ diff_asc = diff_asc.replace('\r', '')
+ lines = diff_asc.splitlines(True)
+ if isinstance(diff, unicode):
+ lines = [l.decode('utf-8') for l in lines]
+
+ lex = lexer(filename)
+ for line in lines:
+ if line.startswith(u'@@'):
+ if patch: formatted.extend(highlight_patch(lex, patch, intraline_diff(patch)))
+ formatted.append(line)
+ patch = []
+ else:
+ patch.append(line)
+ if patch: formatted.extend(highlight_patch(lex, patch, intraline_diff(patch)))
+ return ''.join(formatted)
def format_diffs(diffs):
- def highlight_patch(lex, lines):
- lines = [(line[0], line[1:]) for line in lines]
- for x in xrange(0, len(lines)):
- if lines[x][0] == '\\':
- lines[x] = (lines[x][0], '\n')
- patch = ''.join(l[1] for l in lines)
- patch = highlighted(lex, patch).splitlines(True)
- for x in xrange(0, min(len(patch), len(lines))):
- if lines[x][0] == '\\':
- lines[x] = (lines[x][0], ' No newline at end of file\n')
- else:
- lines[x] = (lines[x][0], patch[x])
- return ''.join(line[0] + line[1] for line in lines)
-
- def format(filename, diff):
- if not diff:
- return None
- formatted = []
- patch = []
- diff = diff.replace('\r', '')
- lines = diff.splitlines(True)
- lex = lexer(filename)
- for line in lines:
- if line.startswith('@@'):
- if patch: formatted.extend(highlight_patch(lex, patch))
- formatted.append(line)
- patch = []
- else:
- patch.append(line)
- if patch: formatted.extend(highlight_patch(lex, patch))
- return ''.join(formatted)
-
for d in diffs:
d['formatted_diff'] = format(d['file']['name'], d['diff'])
def format_file(filename, contents):
- return highlighted(lexer(filename), contents.replace('\r', ''))
+ lines = [line[:LINE_MAX] for line in contents.replace('\r', '').split('\n')]
+ return highlighted(lexer(filename), '\n'.join(lines))
|
|
|
@@ -1,205 +1,377 @@ - # Copyright (C) 2009-2010 by Fog Creek Software. All rights reserved.
+# Copyright (C) 2009-2011 by Fog Creek Software. All rights reserved.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
+from functools import wraps
+import hashlib
+import os
+import urllib2
-import base64
-import os
-import sys
-import urllib
-import urllib2
-from threading import Thread
+from flask import Flask, Response, request
+from mercurial import hgweb, util, context
+from mercurial.error import LockHeld, RepoLookupError
+from werkzeug.exceptions import NotFound, BadRequest
+import settings
+import simplejson
-from django.conf import settings
-from django.utils import simplejson
-from mercurial import ui, util
-from piston.emitters import Emitter
-from piston.handler import AnonymousBaseHandler, typemapper
-from piston.utils import rc
+import Image
+import cStringIO
+from bugzscout import report_exception
+from encoders import EmittableEncoder
+from formatter import format_diffs, format_file
+from repositories import Repository, RepositoryNotSubsetException, CreatesNewHeadsException, filetuple, hexdecode, determinedisplaysize
+from webtasks import asyncpost, queue_repo_index, queue_repo_create, queue_repo_strip
+import bfiles
+import syncstatus
import urlutil
-from formatter import format_diffs, format_file
-from repositories import Repository, RepositoryNotSubsetException, CreatesNewHeadsException
-from repositories import filetuple, hexdecode
-class fakerequest(object):
- pass
-fakerequest.GET = {}
+app = Flask(__name__)
+
+def jsonify(obj):
+ if isinstance(obj, Response) or isinstance(obj, basestring):
+ return obj
+ return Response(enc.encode(obj), mimetype='application/json')
+
+def route(url, methods=['GET'], as_json=True):
+ def wrapper(f):
+ @app.route(url, methods=methods)
+ @wraps(f)
+ def inner(*args, **kwargs):
+ r = f(*args, **kwargs)
+ if as_json:
+ r = jsonify(r)
+ return r
+ return inner
+ return wrapper
+
+def get(url, as_json=True):
+ return route(url, methods=['GET'], as_json=as_json)
+
+def post(url, as_json=True):
+ return route(url, methods=['POST'], as_json=as_json)
+
+def delete(url):
+ return app.route(url, methods=['DELETE'])
def error(message, code):
return {'type': 'error', 'message': message, 'code': code}
-def reportexception(e):
- if settings.DEBUG:
- return
+enc = EmittableEncoder()
- def gettraceback():
- import traceback
- return '\n'.join(traceback.format_exception(*(sys.exc_info())))
-
- traceback = gettraceback()
- bug = {'ScoutUserName': settings.FOGBUGZ_USERNAME,
- 'ScoutProject': settings.FOGBUGZ_PROJECT,
- 'ScoutArea': settings.FOGBUGZ_AREA,
- 'Description': 'Backend exception: %s' % e,
- 'Extra': traceback}
-
- if settings.HOSTED:
- try:
- urllib2.urlopen(settings.FOGBUGZ_URL, urllib.urlencode(bug))
- except:
- pass
- else:
- from filelogmiddleware import _log_error
- _log_error(bug)
-
-class PingbackThread(Thread):
- def __init__(self, handler, method, pingback, request, args, kwargs):
- super(PingbackThread, self).__init__()
- self.handler = handler
- self.method = method
- self.pingback = pingback
- self.request = request
- self.args = args
- self.kwargs = kwargs
-
- def run(self):
- r = self.method(self.handler, *self.args, **self.kwargs)
- emitter, mime = Emitter.get('json')
- srl = emitter(r, typemapper, self.handler, self.handler.fields, True)
- json = srl.render(fakerequest)
-
- success = False
- attempts = 3
- while attempts and not success:
- try:
- attempts -= 1
- urllib2.urlopen(self.pingback, urllib.urlencode({'data': json.encode('utf8')}))
- success = True
- except urllib2.URLError, e:
- if attempts == 0:
- reportexception(e)
-
-def ping_wrapper(method):
- def f(self, *args, **kwargs):
- q = args[0].POST
- if 'pingback' in q:
- t = PingbackThread(self, method, q['pingback'], args[0], args, kwargs)
- try:
- t.start()
- except Exception, e:
- print e
- return rc.ALL_OK
- else:
- return method(self, *args, **kwargs)
- return f
-
-def with_pingbacks(cls):
- """pingback wrapper
-
- This is a decorator that makes any given handler function run
- asynchronously if provided with a pingback parameter in the
- web request."""
-
- for m in ('create', 'read', 'delete', 'update'):
- if m in cls.__dict__.keys():
- method = getattr(cls, m)
- method = ping_wrapper(method)
- setattr(cls, m, method)
- return cls
-
-@with_pingbacks
-class RepositoryHandler(AnonymousBaseHandler):
- allowed_methods = ('GET', 'POST', 'DELETE',)
- model = Repository
- fields = ('uuid', 'parent',)
-
- def read(self, request, uuid=None):
- if uuid:
- r = Repository(uuid)
- if r.exists():
- return r
- return rc.NOT_FOUND
- if not settings.HOSTED:
- return rc.BAD_REQUEST
- return [Repository(folder)
- for folder in os.listdir(settings.KILN_REPOSITORY_ROOT)
- if Repository(folder).exists()]
-
- def create(self, request):
- q = request.POST
- uuid = q['uuid']
- meta = simplejson.loads(q['meta']) if q.get('meta') else {}
- if q.get('parent'):
- r = Repository(q['parent']).cloneto(uuid, meta)
- else:
- r = Repository(uuid)
- r.create(meta)
- return r
-
- def delete(self, request, uuid):
+@get('/repo/<uuid>')
+def repo_get(uuid=None):
+ if uuid:
r = Repository(uuid)
if r.exists():
- r.delete()
- return rc.DELETED
- return rc.NOT_FOUND
+ return r
+ raise NotFound
+ if not settings.HOSTED:
+ raise BadRequest
+ repos = [Repository(folder)
+ for folder in os.listdir(settings.KILN_REPOSITORY_ROOT)
+ if Repository(folder).exists()]
+ for p1 in os.listdir(settings.KILN_REPOSITORY_ROOT):
+ if len(p1) == 2:
+ for p2 in os.listdir(os.path.join(settings.KILN_REPOSITORY_ROOT, p1)):
+ parent = os.path.join(settings.KILN_REPOSITORY_ROOT, p1, p2)
+ repos.extend(Repository(folder) for folder in os.listdir(parent) if Repository(folder).exists())
+ return repos
-class ManifestHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
+@post('/repo')
+def repo_create():
+ q = request.form
+ try:
+ uuid = q['uuid']
+ pingback = q['pingback']
+ site = urlutil.siteurl(request)
+ meta = q.get('meta', None)
+ parent = q.get('parent', None)
+ except Exception, e:
+ raise
+ return BadRequest(e)
+ queue_repo_create(uuid, pingback, site, meta=meta, parent=parent)
+ return 'OK'
- def read(self, request, uuid, rev='tip'):
+@post('/repo/<uuid>')
+def update_meta(uuid):
+ q = request.form
+ try:
+ meta = simplejson.loads(q['meta']) if q.get('meta') else {}
+ except:
+ raise BadRequest
+
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ r.meta = meta
+ return r
+
+@delete('/repo/<uuid>')
+def repo_delete(uuid):
+ # This can only ever be called manually, so it's okay that
+ # this key is never used on the website side. If we do ever
+ # add repository purging via heartbeat or whatever, this
+ # will obviously need to change
+ if settings.HOSTED:
+ if request.args.get('magic_word') != settings.WHITE_RABBIT_OBJECT:
+ raise BadRequest
+ r = Repository(uuid)
+ if r.exists():
+ r.delete()
+ syncstatus.remove_repo(r)
+ return Response('', status=204)
+ raise NotFound
+
+@post('/repo/<uuid>/commit')
+def commit(uuid):
+ q = request.form
+ author = q['author']
+ parent = q['parent']
+ date = q['date']
+ message = q['message']
+ path = hexdecode(q['path'])
+ upload = request.files['file']
+ if upload.content_length > settings.KILN_MAX_COMMIT_FILE_SIZE:
+ return error('The uploaded file is too large.', 'too_large')
+ data = upload.read()
+ if hasattr(upload, 'close'):
+ upload.close()
+
+ def _writefile(repo, mctx, path):
+ return context.memfilectx(path, data)
+
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+
+ repo = r.repo
+ l = None
+ try:
+ l = repo.lock()
+ except LockHeld:
+ if l: l.release()
+ return error('The repository is locked.', 'repo_locked')
+ try:
+ try:
+ ctx = repo[parent]
+ if ctx.children():
+ return error('Commit creates new head!', 'not_head')
+ except RepoLookupError:
+ raise NotFound
+ mctx = context.memctx(repo, [parent, None], message, [path], _writefile, user=author, date=date)
+ mctx.commit()
+ except Exception, e:
+ report_exception(e)
+ raise
+ finally:
+ if l: l.release()
+
+ return Response('OK')
+
+@post('/repo/stripped')
+def strip():
+ q = request.form
+ uuid = q['uuid']
+ parent = q['parent']
+ pingback = q['pingback']
+ rev = q['rev']
+ url = q['url']
+ ixPerson = q['ixperson']
+ meta = q.get('meta', '')
+ parent = q['parent']
+ if not Repository(parent).exists():
+ raise NotFound
+ queue_repo_strip(pingback, uuid, parent, rev, meta, url, ixPerson)
+ return Response('OK')
+
+@get('/repo/<uuid>/manifest/<rev>')
+def manifest(uuid, rev='tip'):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ if not r.hasrevision(rev):
+ raise BadRequest
+ return {'type': 'manifest', 'manifest': r.manifest(rev)}
+
+@get('/repo/<uuid>/size')
+def size(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ # raise NotFound
+ # Hack around a dumb bug in ourdot's Kiln install
+ return {'type': 'reposize', 'size': 0}
+ return {'type': 'reposize', 'size': r.size()}
+
+@get('/repo/<uuid>/commontag')
+def common_tags(uuid):
+ """
+ This function takes a list of checkins within a repository and
+ will return the nearest common child which has a tag.
+ """
+ r = Repository(uuid)
+ if not 'revs' in request.args or not r.exists():
+ raise BadRequest
+ else:
+ revs = request.args['revs'].split(",");
+
+ if not 'num_tags' in request.args:
+ num_tags = 1
+ else:
+ num_tags = int(request.args['num_tags'])
+ tags = r.commontags(revs, num_tags);
+
+ return {'type': 'tags', 'tags': tags};
+
+@post('/repo/<uuid>/tag/<rev>')
+def create_tag(uuid, rev='tip'):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ try:
+ tag = request.form['tag']
+ ixPerson = request.form['ixPerson']
+ url = request.form['url']
+ username = request.form['username']
+ except KeyError:
+ raise BadRequest
+ force = False
+ if 'force' in request.form and request.form['force'].lower() != 'false':
+ force = True
+ try:
+ r.tag(rev, tag, url, ixPerson, username, force)
+ except ValueError:
+ raise BadRequest
+
+ return {'type': 'tag', 'tag': tag, 'rev': rev}
+
+@get('/repo/<uuid>/tag')
+def get_tags(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ return {'type': 'tags', 'tags': r.tags()}
+
+@get('/repo/<uuid>/changesbetweentags')
+def betweentags(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ try:
+ tag1 = request.args["tag1"]
+ tag2 = request.args["tag2"]
+ except KeyError:
+ raise BadRequest
+
+ try:
+ changesetlist = r.changesbetweentags(tag1, tag2, request.args.get('includelow', 'false').lower() == 'true')
+ return {'type': 'changesets', 'changesets': changesetlist}
+ except:
+ raise BadRequest
+
+@post('/repo/meta')
+def set_meta():
+ '''Takes a JSON dictionary of repo uuid => repo metadata, at the key
+ 'meta', and updates the metadata for those repos. Returns a dictionary
+ of uuid => boolean, with True for repos that were found and False for
+ repos that do not exist.'''
+ exists = {}
+ meta = simplejson.loads(request.form['meta'])
+ for uuid in meta:
r = Repository(uuid)
if not r.exists():
- return rc.NOT_FOUND
- if not r.hasrevision(rev):
- return rc.BAD_REQUEST
- return {'type': 'manifest', 'manifest': r.manifest(rev)}
+ exists[uuid] = False
+ continue
+ exists[uuid] = True
+ r.meta = simplejson.loads(meta[uuid])
+ return exists
-class SizeHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
+@get('/repo/<uuid>/file/<rev>/')
+@get('/repo/<uuid>/file/<rev>/<path:path>')
+def get_file(uuid, path='', rev='tip'):
+ r = Repository(uuid)
+ binaries = int(request.args.get('binaries', 0))
+ images = int(request.args.get('images', 0))
+ can_truncate = not int(request.args.get('no_truncate', 0))
+ no_contents = int(request.args.get('no_contents', 0))
+ path = hexdecode(path)
+ if not r.exists():
+ raise NotFound
+ if not r.hasrevision(rev):
+ raise BadRequest
+ if r.hasfile(path, rev):
+ return filecontents(r, path, rev, binaries, images, can_truncate, no_contents)
+ else:
+ return directorylisting(r, path, rev)
- def read(self, request, uuid):
- r = Repository(uuid)
- if not r.exists():
- # return rc.NOT_FOUND
- # Hack around a dumb bug in ourdot's Kiln install
- return {'type': 'reposize', 'size': 0}
- return {'type': 'reposize', 'size': r.size()}
+@get('/repo/<uuid>/file/<rev1>/<rev2>/<path:path>')
+def get_subtracted_image(uuid, path='', rev1='tip', rev2='tip'):
+ r = Repository(uuid)
+ path = hexdecode(path)
+ if not r.exists():
+ raise NotFound
+ if not r.hasrevision(rev1) or not r.hasrevision(rev2):
+ raise BadRequest
+ if r.hasfile(path, rev1) and r.hasfile(path, rev2):
+ #open the old and new versions of the image in RGB mode, and resize them so that the largest dimension is 300px.
+ oldcontents = Image.open(cStringIO.StringIO(r.filecontents(path, rev1, raw=1)))
+ oldcontents = resizeimage(oldcontents, displaySize=tuple(determinedisplaysize(oldcontents.size, max=(500, 500)))).convert("RGB")
+ newcontents = resizeimage(Image.open(cStringIO.StringIO(r.filecontents(path, rev2, raw=1))), displaySize=tuple(determinedisplaysize(oldcontents.size, max=(500, 500)))).convert("RGB")
+ sub = subtractimages(oldcontents, newcontents)
+
+ #im = Image.new("RGB", (oldcontents.size[0]*3, oldcontents.size[1]))
+ #im.paste(oldcontents, (0,0, sub.size[0], sub.size[1]))
+ #im.paste(sub, (sub.size[0],0, sub.size[0]*2, sub.size[1]))
+ #im.paste(newcontents, (sub.size[0]*2,0,sub.size[0]*3,sub.size[1]))
+ im = sub
+
+ output = cStringIO.StringIO()
+ im.save(output, "PNG")
+ return Response(output.getvalue())
-class FileHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
-
- def read(self, request, uuid, path, rev='tip'):
- r = Repository(uuid)
- binaries = int(request.GET.get('binaries', 0))
- can_truncate = not int(request.GET.get('no_truncate', 0))
- path = hexdecode(path)
- if not r.exists():
- return rc.NOT_FOUND
- if not r.hasrevision(rev):
- return rc.BAD_REQUEST
- if r.hasfile(path, rev):
- return self.filecontents(r, path, rev, binaries, can_truncate)
- else:
- return self.directorylisting(r, path, rev)
-
- def filecontents(self, repo, path, rev, binaries, can_truncate):
- truncated = False
- contents = repo.filecontents(path, rev)
- ft = filetuple(path)
+def filecontents(repo, path, rev, binaries, images, can_truncate, no_contents):
+ truncated = False
+ ft = filetuple(path)
+ if repo.isbfile(path) and not binaries:
+ try:
+ Image.open(cStringIO.StringIO(repo.filecontents(path, rev, raw=True)))
+ filetype = 'image'
+ contents = '(Image file)'
+ except IOError:
+ filetype = 'binary'
+ contents = '(Binary file)'
+ elif no_contents:
+ contents = ''
+ truncated = True
+ filetype = 'text'
+ else:
+ contents = repo.filecontents(path, rev, raw=binaries)
if util.binary(contents):
- if binaries:
- filetype = "base64"
- contents = base64.b64encode(contents)
- else:
- filetype = 'binary'
- contents = '(Binary file)'
+ if not binaries:
+ try:
+ Image.open(cStringIO.StringIO(repo.filecontents(path, rev, raw=True)))
+ filetype = 'image'
+ contents = '(Image file)'
+ except IOError:
+ filetype = 'binary'
+ contents = '(Binary file)'
else:
filetype = 'text'
- if len(contents) > 300000 and can_truncate:
+ truncate_length = 200000
+ if len(contents) > truncate_length and can_truncate:
truncated = True
- contents = contents[:300000]
+ contents = contents[:truncate_length]
+ if binaries:
+ if images:
+ try:
+ imfile = cStringIO.StringIO()
+ resizeimage(Image.open(cStringIO.StringIO(contents))).save(imfile, "PNG")
+ contents = imfile.getvalue()
+ except IOError:
+ pass
+ return Response(contents)
+ else:
return {'type': 'file',
'path': ft['path'],
'bytepath': ft['bytepath'],
@@ -207,209 +379,327 @@ 'filetype': filetype,
'truncated': truncated,
'contents': contents,
- 'formatted_contents': format_file(path, contents) if can_truncate else None}
+ 'formatted_contents': format_file(path, contents) if not truncated else None}
- def directorylisting(self, repo, path, rev):
- files = repo.directorylisting(path, rev)
- if files == None:
- return rc.NOT_FOUND
+def resizeimage(image, displaySize=None):
+ if displaySize == None:
+ displaySize = tuple(determinedisplaysize(image.size))
+ if image.size == displaySize:
+ return image
+ else:
+ return image.resize(displaySize)
+
+def subtractimages(oldimage, newimage):
+ im = Image.new("RGB", oldimage.size)
+ pix = im.load()
+ npix = newimage.load()
+ opix = oldimage.load()
+ for x in xrange(oldimage.size[0]):
+ for y in xrange(newimage.size[1]):
+ pix[x, y] = abs(npix[x, y][0] - opix[x, y][0]), abs(npix[x, y][1] - opix[x, y][1]), abs(npix[x, y][2] - opix[x, y][2])
+ pix[x, y] = leahhighlight(pix[x,y])
+ return im
+
+def andrewdifference(pix):
+ pix = f(pix[0]),f(pix[1]),f(pix[2])
+ return pix
+
+def f(x):
+ return int(256*pow((float(x)/256),.5))
+
+def leahhighlight(pix):
+ if pix[0] >= 18 and pix[1] >= 18 and pix[2] >= 18:
+ pix = 5* pix[0],10* pix[1],5* pix[2]
+ return pix
+
+def directorylisting(repo, path, rev):
+ files = repo.directorylisting(path, rev)
+ if files == None:
+ raise NotFound
+ else:
+ return {'type': 'files', 'files': files}
+
+@get('/repo/<uuid>/annotate/<rev>/<path:path>')
+def annotate(uuid, path, rev):
+ r = Repository(uuid)
+ path = hexdecode(path)
+ if not r.exists() or not r.hasfile(path, rev):
+ raise NotFound
+ contents = r.filecontents(path, rev)
+ if util.binary(contents):
+ return error('Unable to annotate binary files', 'annotate_binary')
+
+ if request.args.get('line'):
+ return linehistory(r, path, rev,
+ int(request.args['line']), int(request.args.get('count', 4)))
+ else:
+ return filehistory(r, path, rev, int(request.args.get('count', 0)))
+
+def linehistory(r, path, rev, line, count):
+ return {'type': 'changesets', 'changesets': r.annotateline(path, rev, line, count)}
+
+def filehistory(r, path, rev, count):
+ return {'type': 'annotation', 'annotation': r.annotate(path, rev, count=count)}
+
+@get('/repo/<uuid>/branches')
+def branches(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ return r.branches()
+
+@post('/repo/<uuid>/changeset') # For many changesets, e.g. reviews.
+@get('/repo/<uuid>/changeset/<revs>')
+@get('/repo/<uuid>/changeset/<revs>/<filename>')
+def changesets(uuid, revs=None, filename=None):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+
+ if request.method == 'POST':
+ revs = request.form['revs']
+ filename = request.form.get('filename', None)
+
+ if filename:
+ filename = hexdecode(filename)
+
+ changedfiles = request.values.get('changedfiles')
+
+ revs = revs.split(':')
+ try:
+ if len(revs) == 1:
+ # did you instead give us an enumeration of individual changesets?
+ revs = revs[0].split(',')
+ if len(revs) == 1:
+ # Only one changeset, allow for file changesets
+ if filename:
+ return dict(r.filechangeset(filename, revs[0]), type='filechangeset')
+ else:
+ return dict(r.changeset(revs[0], changedfiles), type='changeset')
+ else:
+ # multiple changesets
+ return {'type': 'changesets',
+ 'changesets': r.changesets(revs, changedfiles)}
+ elif len(revs) == 2:
+ if filename:
+ limit = int(request.values.get('limit', 0))
+ return {'type': 'filechangesets',
+ 'filechangesets': r.filechangesets(filename, revs[0], revs[1], limit)}
+ else:
+ return {'type': 'changesets',
+ 'changesets': r.changesetrange(revs[0], revs[1], changedfiles)}
+ except:
+ raise BadRequest
+
+@get('/repo/<uuid>/diff/<revs>')
+@get('/repo/<uuid>/diff/<revs>/<filename>')
+def diff(uuid, revs, filename=None):
+ r = Repository(uuid)
+ if filename:
+ filename = hexdecode(filename)
+
+ if not r.exists():
+ raise NotFound
+ revs = revs.split(':')
+ for rev in revs:
+ if not r.hasrevision(rev):
+ raise BadRequest
+
+ # Set maxsize to 80 kB or as requested, unless it's a single file,
+ # in which case serve 200kb. The value of 80 kB was
+ # lovingly determined by trial and error. If you change it,
+ # please remember at least to do the former.
+ maxsize = int(request.args.get('maxsize') or (200 if filename else 80) * 1000)
+
+ ignorews = request.args.get('ignorews', 'False').lower() == 'true'
+
+ opts = dict(filename=filename, maxsize=maxsize, ignorews=ignorews)
+ if len(revs) > 1:
+ opts['rev2'] = revs[1]
+ udiff, bytecount = r.diff(revs[0], **opts)
+ format_diffs(udiff)
+
+ if filename:
+ if udiff:
+ return udiff[0]
+ return {'type': 'diff'}
+ else:
+ return {'type': 'diffs',
+ 'truncated': bytecount - maxsize > 0,
+ 'diffs': udiff}
+
+@get('/repo/<uuid>/outgoing/<uuid2>')
+def outgoing_get(uuid, uuid2):
+ r1 = Repository(uuid)
+ r2 = Repository(uuid2)
+ nochangesets = int(request.args.get('nochangesets', 0))
+ if not r1.exists():
+ raise NotFound
+ if not r2.exists():
+ raise BadRequest
+ if not r1.isrelated(r2):
+ return error('repositories are not related', 'notrelated')
+ return {'type': 'outgoing', 'newheads': r1.pushwouldmakeheads(r2), 'changesets': [] if nochangesets else r1.outgoing(r2)}
+
+@post('/repo/<uuid>/outgoing/<uuid2>')
+def push_repo(uuid, uuid2):
+ r1 = Repository(uuid)
+ r2 = Repository(uuid2)
+ ixPerson = request.form['ixPerson']
+ url = request.form['website']
+ if not r1.exists():
+ raise NotFound
+ if not r2.exists():
+ raise BadRequest
+ if not r1.isrelated(r2):
+ return error('repositories are not related', 'notrelated')
+ if not r1.outgoing(r2):
+ return error("repositories were already sync'd", 'alreadysyncd')
+ try:
+ return {'type': 'push', 'success': r1.push(r2, url, pusher=ixPerson)}
+ except RepositoryNotSubsetException, e:
+ return error(str(e), 'notstrictsubset')
+ except CreatesNewHeadsException, e:
+ return error(str(e), 'newheads')
+
+@post('/sync')
+def sync():
+ if not settings.HOSTED:
+ raise BadRequest
+ remote = request.form["remote"]
+ if 'repo' not in request.form:
+ # We don't have a specific repo, so we'll trigger a sync to every repo that needs it.
+ repos = syncstatus.need_sync(remote)
+ for repo in repos:
+ asyncpost(request.base_url, dict(remote=remote, repo=repo))
+ return dict(type='sync', success=True, count=len(repos))
+ resp = urllib2.urlopen(urlutil.urljoin(remote, "repo/%s" % request.form['repo']))
+ repo = simplejson.loads(resp.read())
+ failures = []
+ relink = False
+ r = Repository(repo['uuid'], suppresshooks=True)
+ if not r.exists():
+ r.create(repo['meta'])
+ relink = True
+ r.meta = repo['meta']
+ if 'bfile' in request.form:
+ try:
+ sha = request.form['bfile']
+ if bfiles.ishash(sha) and not bfiles.instore(sha):
+ resp = urllib2.urlopen(urlutil.urljoin(remote, 'repo', r.uuid, 'bfile', sha))
+ bfiles.storebfile(resp, sha)
+ except Exception, e:
+ failures.append({'repo': repo['uuid'], 'exception': e})
+ report_exception(e)
+ else:
+ remoteurl = urlutil.urljoin(remote, 'repo', r.uuid)
+ try:
+ r.pull(remoteurl)
+ if settings.DO_INDEXING:
+ queue_repo_index(repo['uuid'])
+ if settings.HOSTED:
+ syncstatus.update_status(r)
+ if relink:
+ r.relink()
+ # Chain the sync along
+ r.sync(site=urlutil.siteurl(request), peers=dict(r.ui.configitems('post_peers')))
+ except LockHeld, e:
+ # No need to report locked repos. They're expected.
+ failures.append({'repo': repo['uuid'], 'exception': e})
+ except Exception, e:
+ failures.append({'repo': repo['uuid'], 'exception': e})
+ report_exception(e, "uuid=%s, r.repo['tip'].rev()=%s, request.form=%s\n"
+ % (repo['uuid'], str(r.repo['tip'].rev()), str(request.form)))
+ d = {'type': 'sync', 'success': not failures}
+ if failures:
+ d['failures'] = failures
+ return d
+
+@get('/version')
+def version():
+ return {'version': settings.KILN_BACKEND_VERSION, 'hg_version': util.version()}
+
+@app.route('/repo/<uuid>/bfile', methods=['GET', 'POST'])
+@app.route('/repo/<uuid>/bfile/<sha>', methods=['GET', 'POST'])
+def bfilehandle(uuid, sha=None):
+ repo = Repository(uuid)
+ if not sha:
+ if request.method == 'GET':
+ return Response(simplejson.dumps(bfiles.listbfiles()))
else:
- return {'type': 'files', 'files': files}
+ raise BadRequest
-class AnnotationHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
+ if request.method == 'GET':
+ try:
+ return Response(bfiles.bfilecontents(sha))
+ except IOError:
+ raise NotFound
- def read(self, request, uuid, path, rev):
- r = Repository(uuid)
- path = hexdecode(path)
- if not r.exists() or not r.hasfile(path, rev):
- return rc.NOT_FOUND
- contents = r.filecontents(path, rev)
- if util.binary(contents):
- return error('Unable to annotate binary files', 'annotate_binary')
+ # bfiles uses PUT to upload files but django read the entire file into memory
+ # use POST instead so that we can access the file with a generator
+ # NOTE: This may no longer be necessary with flask, but it's the way it works
+ # so there's no reason to change it back right now.
+ elif request.method == 'POST':
+ try:
+ if bfiles.instore(sha):
+ return Response(status=200)
+ elif bfiles.storebfile(request.files['name'], sha):
+ try:
+ repo.sync(site=urlutil.siteurl(request),
+ bfile=sha,
+ peers=dict(repo.ui.configitems('peers')))
+ finally:
+ return Response(status=201)
+ else:
+ #SHA1 is checked by storebfile
+ raise BadRequest('SHA1 of file does not match SHA1 given.')
+ except Exception, e:
+ report_exception(e)
+ raise BadRequest
- if request.GET.get('line'):
- return self.linehistory(r, path, rev,
- int(request.GET['line']), int(request.GET.get('count', 4)))
+ elif request.method == 'HEAD':
+ if bfiles.instore(sha):
+ m = hashlib.sha1()
+ with bfiles.bfilecontents(sha) as fd:
+ while True:
+ data = fd.read(32768)
+ if not data:
+ break
+ m.update(data)
+ response = Response()
+ response.headers['Content-SHA1'] = m.hexdigest()
+ return response
else:
- return self.filehistory(r, path, rev)
+ raise NotFound
+ else:
+ raise BadRequest
- def linehistory(self, r, path, rev, line, count):
- return {'type': 'changesets', 'changesets': r.annotateline(path, rev, line, count)}
+@app.route('/repo/<uuid>/serve', methods=['GET', 'POST'])
+def serve(uuid):
+ r = Repository(uuid, suppressoutput=False)
+ if not r.exists():
+ raise NotFound
+ repo = r.repo
+ if 'ixPerson' in request.args:
+ repo.ui.setconfig('kiln', 'ixperson', request.args['ixPerson'])
+ repo.ui.setconfig('kiln', 'url', request.args['website'])
+ repo.ui.setconfig('kiln', 'site', urlutil.siteurl(request))
+ repo.ui.setconfig('kiln', 'token', request.args.get('token', ''))
+ # if we're about to push, run recover. Don't do this for pull,
+ # because it locks the repo (even if only for a second), and it's
+ # obviously better if we don't have to wait for a push to finish
+ # to pull
+ if request.args['cmd'] == 'unbundle':
+ r.recover()
+ request.environ['REPO_NAME'] = request.environ['PATH_INFO'].strip('/')
+ return hgweb.hgweb(repo.root, baseui=repo.ui)
- def filehistory(self, r, path, rev):
- return {'type': 'annotation', 'annotation': r.annotate(path, rev)}
-
-class ChangesetHandler(AnonymousBaseHandler):
- allowed_methods = ('GET', 'POST')
-
- def create(self, request, uuid):
- revs = request.POST["revs"]
- filename = request.POST.get("filename")
- return self.read(request, uuid, revs, filename)
-
- def read(self, request, uuid, revs, filename=None):
- r = Repository(uuid)
- if filename:
- filename = hexdecode(filename)
-
- if not r.exists():
- return rc.NOT_FOUND
-
- changedfiles = request.REQUEST.get('changedfiles')
-
- revs = revs.split(':')
- try:
- if len(revs) == 1:
- # did you instead give us an enumeration of individual changesets?
- revs = revs[0].split(',')
- if len(revs) == 1:
- # Only one changeset, allow for file changesets
- if filename:
- return dict(r.filechangeset(filename, revs[0]), type='filechangeset')
- else:
- return dict(r.changeset(revs[0], changedfiles), type='changeset')
- else:
- # multiple changesets
- return {'type': 'changesets',
- 'changesets': r.changesets(revs, changedfiles)}
- elif len(revs) == 2:
- if filename:
- limit = int(request.REQUEST.get('limit', 0))
- return {'type': 'filechangesets',
- 'filechangesets': r.filechangesets(filename, revs[0], revs[1], limit)}
- else:
- return {'type': 'changesets',
- 'changesets': r.changesetrange(revs[0], revs[1], changedfiles)}
- except:
- return rc.BAD_REQUEST
-
-class DiffHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
-
- def read(self, request, uuid, revs, filename=None):
- r = Repository(uuid)
- if filename:
- filename = hexdecode(filename)
-
- if not r.exists():
- return rc.NOT_FOUND
- revs = revs.split(':')
- for rev in revs:
- if not r.hasrevision(rev):
- return rc.BAD_REQUEST
-
- # Set maxsize to 100 kB or as requested, unless it's a single file,
- # in which case serve the whole thing. The value of 80 kB was
- # lovingly determined by trial and error. If you change it,
- # please remember at least to do the former.
- maxsize = request.GET.get('maxsize') if not filename else None
- if maxsize:
- maxsize = int(maxsize)
- elif not filename:
- maxsize = 80 * 1000
-
- if len(revs) == 1:
- udiff, bytecount, increment = r.diff(revs[0], filename=filename, maxsize=maxsize)
- format_diffs(udiff)
- else:
- udiff, bytecount, increment = r.diff(revs[0], rev2=revs[1], filename=filename, maxsize=maxsize)
- format_diffs(udiff)
-
- if filename:
- return udiff[0] if udiff else rc.NOT_FOUND
- else:
- return {'type': 'diffs',
- 'truncated': bytecount - maxsize > 0,
- 'increment': increment,
- 'diffs': udiff}
-
-class AutopullHandler(AnonymousBaseHandler):
- allowed_methods = ('POST',)
-
- def create(self, request, uuid, uuid2):
- r1 = Repository(uuid)
- r2 = Repository(uuid2)
- url = request.POST['website']
- if not r1.exists():
- return rc.NOT_FOUND
- if not r2.exists():
- return rc.BAD_REQUEST
- if not r1.isrelated(r2):
- return error('repositories are not related', 'notrelated')
- if not r2.outgoing(r1):
- return error("repositories were already sync'd", 'alreadysyncd')
- return {'type': 'push', 'success': r1.autopull(r2, url)}
-
-@with_pingbacks
-class OutgoingHandler(AnonymousBaseHandler):
- allowed_methods = ('GET', 'POST')
-
- def read(self, request, uuid, uuid2):
- r1 = Repository(uuid)
- r2 = Repository(uuid2)
- nochangesets = int(request.GET.get('nochangesets', 0))
- if not r1.exists():
- return rc.NOT_FOUND
- if not r2.exists():
- return rc.BAD_REQUEST
- if not r1.isrelated(r2):
- return error('repositories are not related', 'notrelated')
- return {'type': 'outgoing', 'newheads': r1.pushwouldmakeheads(r2), 'changesets': [] if nochangesets else r1.outgoing(r2)}
-
- def create(self, request, uuid, uuid2):
- r1 = Repository(uuid)
- r2 = Repository(uuid2)
- ixPerson = request.POST.get('ixPerson')
- url = request.POST['website']
- if not r1.exists():
- return rc.NOT_FOUND
- if not r2.exists():
- return rc.BAD_REQUEST
- if not r1.isrelated(r2):
- return error('repositories are not related', 'notrelated')
- if not r1.outgoing(r2):
- return error("repositories were already sync'd", 'alreadysyncd')
- try:
- return {'type': 'push', 'success': r1.push(r2, url, pusher=ixPerson)}
- except RepositoryNotSubsetException, e:
- return error(str(e), 'notstrictsubset')
- except CreatesNewHeadsException, e:
- return error(str(e), 'newheads')
-
-class SynchronizeHandler(AnonymousBaseHandler):
- allowed_methods = ('POST',)
-
- def create(self, request):
- if not settings.HOSTED:
- return rc.BAD_REQUEST
- remote = request.POST["remote"]
- if 'repo' in request.POST:
- resp = urllib2.urlopen(urlutil.urljoin(remote, "repo/%s/" % request.POST['repo']))
- repos = [simplejson.loads(resp.read())]
- else:
- resp = urllib2.urlopen(urlutil.urljoin(remote, "repo/"))
- repos = simplejson.loads(resp.read())
- u = ui.ui()
- u.setconfig('ui', 'quiet', 'True')
- failures = []
- for repo in repos:
- r = Repository(repo['uuid'], u)
- remoteurl = urlutil.urljoin(remote, "repo/%s/serve" % r.uuid)
- if not r.exists():
- r.create(repo['meta'])
- try:
- r.pull(remoteurl)
- except Exception, e:
- failures.append({'repo': repo['uuid'], 'exception': e})
- reportexception(e)
- d = {'type': 'sync', 'success': not failures}
- if failures:
- d['failures'] = failures
- return d
-
-class VersionHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
-
- def read(self, request):
- return {'version': settings.KILN_BACKEND_VERSION}
+@get('/repo/<uuid>/heads')
+def get_heads(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ def _revtuple(rev):
+ "Return a (rev num, rev id) tuple from a changeset context"
+ return (rev.rev(), rev.hex())
+ return {'heads': [_revtuple(r.repo[head]) for head in r.repo.heads()]}
|
|
@@ -0,0 +1,62 @@ + from flask import request, render_template
+import settings
+from handlers import get, post, jsonify
+from redis import Redis
+from redis.cli import RedisCli
+
+queue_data = dict(
+ lists = [
+ 'kiln:queue',
+ 'kiln:queue:high',
+ 'kiln:queue:low',
+ 'kiln:cancelations',
+ 'kiln:queue:running',
+ 'opengrok:index:running',
+ 'opengrok:cancelations',
+ ],
+ zsets = [
+ 'opengrok:index',
+ ],
+ keys = [
+ 'updaterepo:*:repo',
+ 'updaterepo:*:__failcount',
+ 'httppost:*:url',
+ 'httppost:*:__failcount',
+ ],
+)
+
+def _get_redis():
+ return Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB)
+
+@get('/queuestats/', as_json=False)
+def queuestats():
+ r = _get_redis()
+ data = {}
+ for l in queue_data['lists']:
+ data[l] = r.llen(l)
+ for z in queue_data['zsets']:
+ data[z] = r.zcard(z)
+ for k in queue_data['keys']:
+ data[k] = len(r.keys(k))
+ if request.headers.get('X-Requested-With', '').lower() == 'xmlhttprequest':
+ return jsonify(data)
+ return render_template('queuestats.html', data=data)
+
+@post('/queuestats/redis/cli/')
+def cli():
+ cmd = request.form['cmd']
+ try:
+ r = RedisCli(settings.REDIS_HOST, settings.REDIS_PORT).onecmd(cmd)
+ except Exception, e:
+ r = '*** Unknown exception: %s' % e
+ if r is None:
+ r = ''
+ elif isinstance(r, list):
+ r = '\n'.join(r)
+ d = dict(response=r)
+ if isinstance(r, basestring) and (r.startswith('Error') or r.startswith('***')):
+ d['type'] = 'error'
+ else:
+ d['type'] = 'success'
+ return d
+
|
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
|
|
@@ -0,0 +1,129 @@ + /*
+ * jQuery Color Animations
+ * Copyright 2007 John Resig
+ * Released under the MIT and GPL licenses.
+ */
+
+(function(jQuery){
+
+ // We override the animation for all of these color styles
+ jQuery.each(['backgroundColor', 'borderBottomColor', 'borderLeftColor', 'borderRightColor', 'borderTopColor', 'color', 'outlineColor'], function(i,attr){
+ jQuery.fx.step[attr] = function(fx){
+ if ( !fx.colorInit ) {
+ fx.start = getColor( fx.elem, attr );
+ fx.end = getRGB( fx.end );
+ fx.colorInit = true;
+ }
+
+ fx.elem.style[attr] = "rgb(" + [
+ Math.max(Math.min( parseInt((fx.pos * (fx.end[0] - fx.start[0])) + fx.start[0]), 255), 0),
+ Math.max(Math.min( parseInt((fx.pos * (fx.end[1] - fx.start[1])) + fx.start[1]), 255), 0),
+ Math.max(Math.min( parseInt((fx.pos * (fx.end[2] - fx.start[2])) + fx.start[2]), 255), 0)
+ ].join(",") + ")";
+ }
+ });
+
+ // Color Conversion functions from highlightFade
+ // By Blair Mitchelmore
+ // http://jquery.offput.ca/highlightFade/
+
+ // Parse strings looking for color tuples [255,255,255]
+ function getRGB(color) {
+ var result;
+
+ // Check if we're already dealing with an array of colors
+ if ( color && color.constructor == Array && color.length == 3 )
+ return color;
+
+ // Look for rgb(num,num,num)
+ if (result = /rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(color))
+ return [parseInt(result[1]), parseInt(result[2]), parseInt(result[3])];
+
+ // Look for rgb(num%,num%,num%)
+ if (result = /rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(color))
+ return [parseFloat(result[1])*2.55, parseFloat(result[2])*2.55, parseFloat(result[3])*2.55];
+
+ // Look for #a0b1c2
+ if (result = /#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(color))
+ return [parseInt(result[1],16), parseInt(result[2],16), parseInt(result[3],16)];
+
+ // Look for #fff
+ if (result = /#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(color))
+ return [parseInt(result[1]+result[1],16), parseInt(result[2]+result[2],16), parseInt(result[3]+result[3],16)];
+
+ // Look for rgba(0, 0, 0, 0) == transparent in Safari 3
+ if (result = /rgba\(0, 0, 0, 0\)/.exec(color))
+ return colors['transparent'];
+
+ // Otherwise, we're most likely dealing with a named color
+ return colors[jQuery.trim(color).toLowerCase()];
+ }
+
+ function getColor(elem, attr) {
+ var color;
+
+ do {
+ color = jQuery.curCSS(elem, attr);
+
+ // Keep going until we find an element that has color, or we hit the body
+ if ( color != '' && color != 'transparent' || jQuery.nodeName(elem, "body") )
+ break;
+
+ attr = "backgroundColor";
+ } while ( elem = elem.parentNode );
+
+ return getRGB(color);
+ };
+
+ // Some named colors to work with
+ // From Interface by Stefan Petre
+ // http://interface.eyecon.ro/
+
+ var colors = {
+ aqua:[0,255,255],
+ azure:[240,255,255],
+ beige:[245,245,220],
+ black:[0,0,0],
+ blue:[0,0,255],
+ brown:[165,42,42],
+ cyan:[0,255,255],
+ darkblue:[0,0,139],
+ darkcyan:[0,139,139],
+ darkgrey:[169,169,169],
+ darkgreen:[0,100,0],
+ darkkhaki:[189,183,107],
+ darkmagenta:[139,0,139],
+ darkolivegreen:[85,107,47],
+ darkorange:[255,140,0],
+ darkorchid:[153,50,204],
+ darkred:[139,0,0],
+ darksalmon:[233,150,122],
+ darkviolet:[148,0,211],
+ fuchsia:[255,0,255],
+ gold:[255,215,0],
+ green:[0,128,0],
+ indigo:[75,0,130],
+ khaki:[240,230,140],
+ lightblue:[173,216,230],
+ lightcyan:[224,255,255],
+ lightgreen:[144,238,144],
+ lightgrey:[211,211,211],
+ lightpink:[255,182,193],
+ lightyellow:[255,255,224],
+ lime:[0,255,0],
+ magenta:[255,0,255],
+ maroon:[128,0,0],
+ navy:[0,0,128],
+ olive:[128,128,0],
+ orange:[255,165,0],
+ pink:[255,192,203],
+ purple:[128,0,128],
+ violet:[128,0,128],
+ red:[255,0,0],
+ silver:[192,192,192],
+ white:[255,255,255],
+ yellow:[255,255,0],
+ transparent: [255,255,255]
+ };
+
+})(jQuery);
|
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
|
@@ -0,0 +1,12 @@ + <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" >
+ <head>
+ <meta http-equiv="X-UA-Compatible" content="IE=8" />
+ <title>{% block title %}{% endblock title %}</title>
+ <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js"></script>
+ {% block extra_head %}{% endblock extra_head %}
+ </head>
+ <body>
+ {% block content %}{% endblock content %}
+ </body>
+</html>
|
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
|
@@ -0,0 +1,7 @@ + #!/bin/sh
+
+if [ ! -e ~/miniredis.pid ]; then
+ nohup python miniredis.py -p 56784 -d ~/miniredis.db -l ~/miniredis.out --pid ~/miniredis.pid &
+else
+ echo MiniRedis is running, or crashed
+fi
|
Loading...