Changeset c5ec70d1ed42…
Parent fca1d86acdc9…
by Benjamin Pollack <benjamin@fogcreek.com>
Changes to 42 files · Browse files at c5ec70d1ed42 Showing diff from parent fca1d86acdc9 Diff from another changeset...
|
@@ -0,0 +1,18 @@ + syntax: glob
+*.pyc
+*.pyo
+*.swp
+*.db
+*.sqlite3
+*.orig
+kiln/dist/*
+TAGS
+\#*\#
+local_settings.py
+kiln/build/*
+installer/Output
+out.txt
+*~
+_ReSharper.*
+obj
+bin
|
@@ -1,6 +1,6 @@ OVERVIEW
-This is a stand-alone server for Mercurial repositories, that provides
+This is a stand-alone server for Mercurial repositories that provides
Mercurial data in the form of JSON requests. This allows for much
more efficient polling of repository data from long-running
applications, such as websites, IDEs, and so on.
|
@@ -1,5 +1,13 @@ param([string] $repopath = "..")
+function Get-Batchfile ($file) {
+ $cmd = "`"$file`" & set"
+ cmd /c $cmd | Foreach-Object {
+ $p, $v = $_.split('=')
+ Set-Item -path env:$p -value $v
+ }
+}
+
function Get-ScriptDirectory
{
$Invocation = (Get-Variable MyInvocation -Scope 1).Value
@@ -10,9 +18,18 @@
pushd $path
pushd kiln
+if (test-path 'c:\pythonve\kiln25')
+{
+ Get-Batchfile('c:\pythonve\kiln25\scripts\activate.bat')
+}
python setup.py py2exe
+if (test-path 'c:\pythonve\kiln25')
+{
+ Get-Batchfile('c:\pythonve\kiln25\scripts\deactivate.bat')
+}
hg -R $repopath archive -t zip dist\source.zip
popd
+c:\Windows\Microsoft.NET\Framework\v3.5\msbuild.exe /p:Configuration=Release installer\RepoDirectoryMigrator\RepoDirectoryMigrator.sln
$iscc = "C:\Program Files (x86)\Inno Setup 5\ISCC.exe"
if (-not (Test-Path $iscc))
{
|
|
@@ -1,5 +1,5 @@ #define MyAppName "Kiln Storage Service"
-#define MyAppVerName "Kiln Storage Service 1.0"
+#define MyAppVerName "Kiln Storage Service 2.5"
#define MyAppPublisher "Fog Creek Software"
#define MyAppURL "http://www.fogcreek.com/kiln/"
@@ -33,16 +33,21 @@WelcomeLabel2=This will install [name/ver] on your computer.
[Files]
+
+Source: RepoDirectoryMigrator\RepoDirectoryMigrator\bin\x86\Release\RepoDirectoryMigrator.exe; DestDir: {tmp}; Flags: ignoreversion
+Source: ctags.exe; DestDir: {app}; Flags: ignoreversion
Source: ..\kiln\dist\library.zip; DestDir: {app}; Flags: ignoreversion
-Source: ..\kiln\dist\w9xpopen.exe; DestDir: {app}; Flags: ignoreversion
Source: ..\kiln\dist\backend.exe; DestDir: {app}; Flags: ignoreversion
+Source: ..\kiln\redis-server.exe; DestDir: {app}; Flags: ignoreversion
Source: ..\kiln\dist\source.zip; DestDir: {app}; Flags: ignoreversion
+Source: ..\kiln\dist\opengrok.jar; DestDir: {app}\opengrok; Flags: ignoreversion
+Source: ..\kiln\dist\lib\*; DestDir: {app}\opengrok\lib; Flags: recursesubdirs replacesameversion; Excludes: .hg*,*~
+Source: ..\kiln\client.crt; DestDir: {app}; Flags: ignoreversion
+Source: ..\kiln\client.key; DestDir: {app}; Flags: ignoreversion
[Icons]
Name: {group}\{cm:UninstallProgram,{#MyAppName}}; Filename: {uninstallexe}
-[Run]
-Filename: {app}\backend.exe; Parameters: --startup auto install; StatusMsg: Registering Kiln Storage Service; Flags: runhidden
[UninstallRun]
Filename: {app}\backend.exe; Parameters: stop; StatusMsg: Stopping Kiln Storage Service; Flags: runhidden
Filename: {app}\backend.exe; Parameters: remove; StatusMsg: Removing Kiln Storage Service; Flags: runhidden
@@ -56,12 +61,40 @@ StorageLocation: String;
Port: Cardinal;
+ JavaVersion: String;
+
+ StoppedOldService: Boolean;
+
const
REG_KEY = 'Software\Fog Creek Software\Kiln';
+ OG_KEY = 'Software\Fog Creek Software\Kiln\OpenGrok';
+ DAEMON_KEY = 'Software\Fog Creek Software\Kiln\Daemon';
+ JAR = 'Jar';
BACKEND_IP = 'KilnBackendIP';
BACKEND_PORT = 'KilnBackendPort';
REPOSITORY_ROOT = 'KilnRepositoryRoot';
DELIBERATELY_PUBLIC = 'KilnDeliberatelyPublic';
+ MINIREDIS_DB = 'MiniredisDB';
+ DATA_DIR = 'DataDir';
+
+ INDEX_THREADS = 'IndexThreads';
+ QUEUE_THREADS = 'QueueThreads';
+ NINDEX_THREADS = 1;
+ NQUEUE_THREADS = 1;
+
+ DAEMON_HOST = 'host';
+ DAEMON_PORT = 'port';
+ DAEMON_DB = 'db';
+ DAEMON_SSL_KEY = 'ssl_key';
+ DAEMON_SSL_CERT = 'ssl_cert';
+
+ JAVA_KEY = 'Software\JavaSoft\Java Runtime Environment';
+ JAVA_VERSION = 'CurrentVersion';
+
+ JAVA = 'Java';
+ CONFIG_UPDATE = 'ConfigUpdate';
+ JAVA_HOME = 'JavaHome';
+ CTAGS = 'CTags';
procedure InitializeWizard;
var
@@ -72,6 +105,7 @@ nextPageParent: Integer;
param: String;
begin
+ StoppedOldService := False;
LocalOnly := False;
for idx := 0 to ParamCount do
begin
@@ -133,8 +167,12 @@procedure FinishInstall;
var
ip: String;
+ ogStorageLocation: String;
+ MiniredisDBLocation: String;
ResultCode: Integer;
deliberatelyPublic: Cardinal;
+ JavaLoc: String;
+ ret: Boolean;
begin
if (CompareStr(StorageLocation, '') = 0) then StorageLocation := StorageLocationPage.Values[0];
if Port = 0 then Port := StrToInt(PortNumberPage.Values[0]);
@@ -149,25 +187,70 @@ deliberatelyPublic := 1;
end;
+ ogStorageLocation := StorageLocation + '\opengrokdata';
+ MiniredisDBLocation := StorageLocation + '\miniredis.db';
if not DirExists(StorageLocation) then CreateDir(StorageLocation);
+ if not DirExists(ogStorageLocation) then CreateDir(ogStorageLocation);
+
+ if IsWin64 then ret := RegQueryStringValue(HKLM64, JAVA_KEY + '\' + JavaVersion, JAVA_HOME, JavaLoc)
+ else ret := RegQueryStringValue(HKEY_LOCAL_MACHINE, JAVA_KEY + '\' + JavaVersion, JAVA_HOME, JavaLoc);
+
+ JavaLoc := JavaLoc + '\bin\java.exe'
RegWriteStringValue(HKEY_LOCAL_MACHINE, REG_KEY, REPOSITORY_ROOT, StorageLocation);
RegWriteDWordValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_PORT, Port);
RegWriteStringValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_IP, ip);
RegWriteDWordValue(HKEY_LOCAL_MACHINE, REG_KEY, DELIBERATELY_PUBLIC, deliberatelyPublic);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, REG_KEY, MINIREDIS_DB, MiniredisDBLocation);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, JAR, ExpandConstant('{app}\opengrok\opengrok.jar'));
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, DATA_DIR, ogStorageLocation);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, CONFIG_UPDATE, 'localhost:2424');
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, JAVA, JavaLoc);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, OG_KEY, CTAGS, ExpandConstant('{app}\ctags.exe'));
+
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_HOST, 'localhost');
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_PORT, Port + 1);
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_DB, 0);
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, INDEX_THREADS, NINDEX_THREADS);
+ RegWriteDWordValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, QUEUE_THREADS, NQUEUE_THREADS);
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_KEY, ExpandConstant('{app}\client.key'));
+ RegWriteStringValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_CERT, ExpandConstant('{app}\client.crt'));
+
+ if Exec(ExpandConstant('{tmp}\RepoDirectoryMigrator.exe'), '', '', SW_HIDE, ewWaitUntilTerminated, ResultCode) then begin
+ if ResultCode <> 0 then RaiseException('Failed to migrate repositories to new directory structure!');
+ end;
+
+ if Exec(ExpandConstant('{app}\backend.exe'), '--startup auto install', '', SW_HIDE, ewWaitUntilTerminated, ResultCode) then begin
+ if ResultCode <> 0 then RaiseException('Failed to install service!');
+ end;
if Exec(ExpandConstant('{app}\backend.exe'), 'start', '', SW_HIDE, ewWaitUntilTerminated, ResultCode) then begin
if ResultCode <> 0 then RaiseException('Failed to start service!');
end;
end;
+procedure DeinitializeSetup();
+var
+ BackendPath: String;
+ ResultCode: Integer;
+begin
+ if StoppedOldService then begin
+ BackendPath := ExpandConstant('{app}\backend.exe');
+ Exec(BackendPath, 'start', '', SW_HIDE, ewNoWait, ResultCode);
+ end;
+end;
+
procedure HaltBackend;
var
BackendPath: String;
ResultCode: Integer;
begin
BackendPath := ExpandConstant('{app}\backend.exe');
- if FileExists(BackendPath) then Exec(BackendPath, 'stop', '', SW_HIDE, ewWaitUntilTerminated, ResultCode);
+ if FileExists(BackendPath) then begin
+ StoppedOldService := True;
+ Exec(BackendPath, 'stop', '', SW_HIDE, ewWaitUntilTerminated, ResultCode);
+ end;
+ Sleep(3000)
end;
procedure CurStepChanged(CurStep: TSetupStep);
@@ -183,5 +266,40 @@ RegDeleteValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_PORT);
RegDeleteValue(HKEY_LOCAL_MACHINE, REG_KEY, BACKEND_IP);
RegDeleteValue(HKEY_LOCAL_MACHINE, REG_KEY, DELIBERATELY_PUBLIC);
+
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, JAR);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, DATA_DIR);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, CONFIG_UPDATE);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, JAVA);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, OG_KEY, CTAGS);
+
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_HOST);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_PORT);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_DB);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, INDEX_THREADS);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, QUEUE_THREADS);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_KEY);
+ RegDeleteValue(HKEY_LOCAL_MACHINE, DAEMON_KEY, DAEMON_SSL_CERT);
end;
end;
+
+function NextButtonClick(CurPageID: Integer) : Boolean;
+var
+ version: String;
+ ret: Boolean;
+begin
+ if CurPageID = wpWelcome then
+ begin
+ if IsWin64 then ret := RegQueryStringValue(HKLM64, JAVA_KEY, JAVA_VERSION, version)
+ else ret := RegQueryStringValue(HKEY_LOCAL_MACHINE, JAVA_KEY, JAVA_VERSION, version);
+
+ if ret then JavaVersion := version
+ else
+ begin
+ MsgBox('The Kiln Storage Service requires the Java Runtime Environment (JRE) be installed. Please install the JRE for your platform from the Oracle website.',
+ mbInformation, MB_OK);
+ Abort();
+ end;
+ end;
+ Result := True
+end;
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,48 @@ + <Configuration>
+ <SettingsComponent>
+ <string />
+ <integer />
+ <boolean>
+ <setting name="SolutionAnalysisEnabled">False</setting>
+ </boolean>
+ </SettingsComponent>
+ <RecentFiles>
+ <RecentFiles>
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="398" fromTop="14" />
+ </RecentFiles>
+ <RecentEdits>
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="92" fromTop="3" />
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="228" fromTop="9" />
+ <File id="AFC5BBEB-4CA4-4AEA-8449-95B66478AC29/f:Program.cs" caret="366" fromTop="14" />
+ </RecentEdits>
+ </RecentFiles>
+ <NAntValidationSettings>
+ <NAntPath value="" />
+ </NAntValidationSettings>
+ <UnitTestRunner>
+ <Providers />
+ </UnitTestRunner>
+ <UnitTestRunnerNUnit>
+ <NUnitInstallDir IsNull="False">
+ </NUnitInstallDir>
+ <UseAddins>Never</UseAddins>
+ </UnitTestRunnerNUnit>
+ <CompletionStatisticsManager>
+ <ItemStatistics item="Default">
+ <Item value="using" priority="0" />
+ <Item value="Microsoft" priority="0" />
+ <Item value="Win32" priority="0" />
+ <Item value="var" priority="2" />
+ <Item value="Registry`0" priority="0" />
+ <Item value="rk" priority="0" />
+ <Item value="const" priority="1" />
+ <Item value="string" priority="0" />
+ <Item value="RegistryKey`0" priority="0" />
+ <Item value="Environment`0" priority="0" />
+ <Item value="root" priority="0" />
+ </ItemStatistics>
+ <ItemStatistics item="Qualified:Microsoft.Win32.RegistryKey">
+ <Item value="GetValue`0" priority="1" />
+ </ItemStatistics>
+ </CompletionStatisticsManager>
+</Configuration>
\ No newline at end of file |
|
@@ -0,0 +1,20 @@ + 
+Microsoft Visual Studio Solution File, Format Version 10.00
+# Visual Studio 2008
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RepoDirectoryMigrator", "RepoDirectoryMigrator\RepoDirectoryMigrator.csproj", "{AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|x86 = Debug|x86
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Debug|x86.ActiveCfg = Debug|x86
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Debug|x86.Build.0 = Debug|x86
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Release|x86.ActiveCfg = Release|x86
+ {AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}.Release|x86.Build.0 = Release|x86
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
|
|
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,30 @@ + using System;
+using System.IO;
+using Microsoft.Win32;
+
+namespace RepoDirectoryMigrator
+{
+ class Program
+ {
+ static void Main(string[] args)
+ {
+ var repoRoot = (string)Registry.GetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Fog Creek Software\Kiln", "KilnRepositoryRoot", null);
+ if (string.IsNullOrEmpty(repoRoot))
+ {
+ Console.Error.WriteLine("KEY NOT FOUND!");
+ Environment.Exit(1);
+ }
+ var repositories = Directory.GetDirectories(repoRoot, "????????-????-????-????-????????????");
+ foreach (var path in repositories)
+ {
+ var repo = Path.GetFileName(path);
+ var part1 = Path.Combine(repoRoot, repo.Substring(0, 2));
+ var part2 = Path.Combine(part1, repo.Substring(2, 2));
+ Directory.CreateDirectory(part1);
+ Directory.CreateDirectory(part2);
+ Directory.Move(path, Path.Combine(part2, repo));
+ }
+ Console.Error.WriteLine("SUCCESS!");
+ }
+ }
+}
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,36 @@ + using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("RepoDirectoryMigrator")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Microsoft")]
+[assembly: AssemblyProduct("RepoDirectoryMigrator")]
+[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("47cb10cb-cc59-438e-b866-e7b6eebcbab0")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
|
Change 1 of 1
|
||
---|---|---|
|
@@ -0,0 +1,66 @@ + <?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="3.5" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+ <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+ <ProductVersion>9.0.30729</ProductVersion>
+ <SchemaVersion>2.0</SchemaVersion>
+ <ProjectGuid>{AFC5BBEB-4CA4-4AEA-8449-95B66478AC29}</ProjectGuid>
+ <OutputType>Exe</OutputType>
+ <AppDesignerFolder>Properties</AppDesignerFolder>
+ <RootNamespace>RepoDirectoryMigrator</RootNamespace>
+ <AssemblyName>RepoDirectoryMigrator</AssemblyName>
+ <TargetFrameworkVersion>v2.0</TargetFrameworkVersion>
+ <FileAlignment>512</FileAlignment>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+ <DebugSymbols>true</DebugSymbols>
+ <DebugType>full</DebugType>
+ <Optimize>false</Optimize>
+ <OutputPath>bin\Debug\</OutputPath>
+ <DefineConstants>DEBUG;TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+ <DebugType>pdbonly</DebugType>
+ <Optimize>true</Optimize>
+ <OutputPath>bin\Release\</OutputPath>
+ <DefineConstants>TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x86' ">
+ <DebugSymbols>true</DebugSymbols>
+ <OutputPath>bin\x86\Debug\</OutputPath>
+ <DefineConstants>DEBUG;TRACE</DefineConstants>
+ <DebugType>full</DebugType>
+ <PlatformTarget>x86</PlatformTarget>
+ <ErrorReport>prompt</ErrorReport>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x86' ">
+ <OutputPath>bin\x86\Release\</OutputPath>
+ <DefineConstants>TRACE</DefineConstants>
+ <Optimize>true</Optimize>
+ <DebugType>pdbonly</DebugType>
+ <PlatformTarget>x86</PlatformTarget>
+ <ErrorReport>prompt</ErrorReport>
+ </PropertyGroup>
+ <ItemGroup>
+ <Reference Include="System" />
+ <Reference Include="System.Data" />
+ <Reference Include="System.Xml" />
+ </ItemGroup>
+ <ItemGroup>
+ <Compile Include="Program.cs" />
+ <Compile Include="Properties\AssemblyInfo.cs" />
+ </ItemGroup>
+ <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+ <!-- To modify your build process, add your task inside one of the targets below and uncomment it.
+ Other similar extension points exist, see Microsoft.Common.targets.
+ <Target Name="BeforeBuild">
+ </Target>
+ <Target Name="AfterBuild">
+ </Target>
+ -->
+</Project>
\ No newline at end of file |
|
|
@@ -1,43 +1,20 @@ #!/usr/bin/env python
+import site
+site.addsitedir('/home/kiln/virtualenv/kiln25/lib/python2.6/site-packages')
+
import os
import sys
-import urllib
-import urllib2
-
-from django.core.handlers.wsgi import WSGIHandler
OUR_ROOT = os.path.abspath(os.path.dirname(__file__))
os.environ['HGENCODING'] = 'utf8'
+os.environ['TEMP'] = '/home/kiln/data/tmp'
paths = (OUR_ROOT, os.path.join(OUR_ROOT, 'kiln'))
for path in paths:
if path not in sys.path:
sys.path.append(path)
- os.environ['DJANGO_SETTINGS_MODULE'] = 'kiln.settings'
-class KilnWSGIHandler(WSGIHandler):
- def report_exception(self, e):
- def get_stack_trace():
- import traceback
- return '\n'.join(traceback.format_exception(*sys.exc_info()))
+from kiln.api import handlers
+from kiln.versionmiddleware import VersionMiddleware
+from kiln.errorloggingmiddleware import ErrorLoggingMiddleware
- bug = {'ScoutUserName': 'BugzScout',
- 'ScoutProject': 'Kiln',
- 'ScoutArea': 'Backend',
- 'Description': str(e),
- 'Extra': get_stack_trace()}
-
- try:
- urllib2.urlopen('http://our.fogbugz.com/scoutSubmit.asp', urllib.urlencode(bug))
- except:
- pass
-
- def __call__(self, environ, start_response):
- if 'kiln.tempdir' in environ:
- os.environ['TMPDIR'] = environ['kiln.tempdir']
- try:
- return super(KilnWSGIHandler, self).__call__(environ, start_response)
- except Exception, e:
- self.report_exception(e)
- raise
-
-application = KilnWSGIHandler()
+application = ErrorLoggingMiddleware(VersionMiddleware(handlers.app))
|
|
@@ -0,0 +1,12 @@ + # legacy imports
+from redis.client import Redis, ConnectionPool
+from redis.exceptions import RedisError, ConnectionError, AuthenticationError
+from redis.exceptions import ResponseError, InvalidResponse, InvalidData
+
+__version__ = '2.0.0'
+
+__all__ = [
+ 'Redis', 'ConnectionPool',
+ 'RedisError', 'ConnectionError', 'ResponseError', 'AuthenticationError'
+ 'InvalidResponse', 'InvalidData',
+ ]
|
|
@@ -46,11 +46,15 @@ import string
import os
import shutil
-from mercurial import commands, extensions, util, bdiff
+from mercurial import bdiff, commands, extensions, store, util
from mercurial.context import filectx
from mercurial.node import nullrev
from mercurial.i18n import _
-from mercurial.store import hybridencode
+
+CACHEPATH = 'annotations/'
+
+def hybridencode(f):
+ return store._hybridencode(f, lambda path: store._auxencode(path, True))
class annotationcache(object):
''' Provides access to the cache of file annotations.
@@ -61,7 +65,7 @@ access. A cache file is line-oriented where each line is an
n-tuple of strings separated by the separator character ':'.
- If the file has any ancestor with a different name, then we
+ If the file has any ancestor with a different name, then we
append .f or .n depending on whether or not we followed the
annotation history to these ancestors. Otherwise a generic
cache is created which works for either case.
@@ -70,8 +74,8 @@ def __init__(self, repo, follow = True):
''' Create a new annotations cache for the given repository '''
self.followflag = follow and 'f' or 'n'
+ self._opener = repo.opener
self.cachepath = repo.join("annotations")
- self.opener = util.opener(self.cachepath)
self.sepchar = ':'
# fdcache caches information about existing files:
@@ -81,6 +85,9 @@ # fdcache[path] does not exist if the file state is unknown
self.fdcache = {}
+ def opener(self, path, *args, **kwargs):
+ return self._opener(CACHEPATH + path, *args, **kwargs)
+
def makepath(self, filectx):
''' Computes the path to the cache for the given file revision. '''
relpath = os.path.join('data', filectx.path())
|
|
@@ -0,0 +1,44 @@ + # Copyright (C) 2008-2010 Fog Creek Software. All rights reserved.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2, incorporated herein by reference.
+
+from mercurial import ui
+import traceback
+from bugzscout import report_error
+
+class emptyui(ui.ui):
+ def __init__(self, src=None, suppressoutput=True):
+ super(emptyui, self).__init__(src)
+ if isinstance(src, emptyui):
+ self.suppressoutput = src.suppressoutput
+ else:
+ self.suppressoutput = suppressoutput
+
+ if self.suppressoutput:
+ self.pushbuffer()
+
+ # Wrap the ui's write functions because writing to stdout causes an exception.
+ # Save the output using a buffer and create a bug from it later (essentially
+ # catch the error then report it).
+ def write_err(self, *args, **opts):
+ return self.write(*args, **opts)
+
+ def write(self, *args, **opts):
+ super(emptyui, self).write(*args, **opts)
+ if self.suppressoutput:
+ if len(self._buffers) == 1:
+ super(emptyui, self).write('\n'.join(traceback.format_stack()) + '\n')
+
+ def __del__(self):
+ if self.suppressoutput:
+ buffer = self.popbuffer()
+ if buffer:
+ report_error('Mercurial output error.', buffer)
+ try:
+ super(emptyui, self).__del__()
+ except AttributeError:
+ pass
+
+ def readconfig(self, *args, **kwargs):
+ pass
|
|
@@ -3,6 +3,8 @@ # This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
+import difflib
+import re
from pygments import highlight
from pygments.lexers import get_lexer_for_filename, guess_lexer_for_filename, TextLexer
from pygments.formatters import HtmlFormatter
@@ -13,8 +15,14 @@ 'vbs': 'vb',
'fbp5': 'xml',
'xul': 'xml',
+ 'ipp': 'cpp',
'jsm': 'js'}
+LINE_MAX = 20000
+
+def ensurenewline(s):
+ return s if s.endswith('\n') else s + '\n'
+
def tweak(filename):
"""change filename to a known extension, if applicable"""
(filename, extension) = filename.split('/')[-1].rsplit('.', 1)
@@ -25,50 +33,174 @@ """select an appropriate lexer based on the filename"""
try:
if content:
- return guess_lexer_for_filename(tweak(filename), content, stripnl=False)
+ l = guess_lexer_for_filename(tweak(filename), content, stripnl=False)
else:
- return get_lexer_for_filename(tweak(filename), stripnl=False)
+ l = get_lexer_for_filename(tweak(filename), stripnl=False)
except:
- return TextLexer(stripnl=False)
+ l = TextLexer(stripnl=False)
+ l.add_filter('whitespace', spaces=True, wstokentype=False)
+ return l
-def highlighted(lex, code):
- return highlight(code, lex, HtmlFormatter(nowrap=True))
+class IntralineHtmlFormatter(HtmlFormatter):
+ in_change = False
+ ranges = []
+
+ def __init__(self, ranges=None, *args, **kw):
+ if ranges:
+ self.ranges = ranges
+ HtmlFormatter.__init__(self, *args, **kw)
+
+ def _split_change_markers(self, tokensource):
+ '''Pre-process the token stream before it is formatted, to mark the tokens that should be highlighted for intraline diffs.'''
+ ranges = self.ranges or []
+ pos = 0
+ for ttype, value in tokensource:
+ for value in value.splitlines(True):
+ l = len(value)
+ range = None
+ rr = [r for r in ranges if (r[0] <= pos <= r[1]) or (pos <= r[0] <= r[1] <= pos + l) or (r[0] <= pos + l <= r[1])]
+ if not rr:
+ yield ttype, value
+ pos += l
+ continue
+ last = None
+ for r in rr:
+ if r[0] <= pos:
+ # r starts at or before token
+ if r[1] <= pos + l:
+ # range covers prefix of token
+ self.in_change = True
+ i = r[1] - pos
+ yield ttype, value[:i]
+ self.in_change = False
+ else:
+ # range covers whole token
+ self.in_change = True
+ yield ttype, value
+ self.in_change = False
+ else:
+ # r starts in the middle of the token
+ i = last[1] - pos if last else 0
+ j = r[0] - pos
+ yield ttype, value[i:j]
+ if r[1] <= pos + l:
+ # range covers middle chunk
+ self.in_change = True
+ i = r[0] - pos
+ j = r[1] - pos
+ yield ttype, value[i:j]
+ self.in_change = False
+ else:
+ # range covers suffix of token
+ self.in_change = True
+ i = r[0] - pos
+ yield ttype, value[i:]
+ self.in_change = False
+ last = r
+ if last[1] <= pos + l:
+ i = last[1] - pos
+ yield ttype, value[i:]
+ pos += l
+
+ def _format_lines(self, tokensource):
+ return super(IntralineHtmlFormatter, self)._format_lines(self._split_change_markers(tokensource))
+
+ def _get_css_class(self, ttype):
+ return super(IntralineHtmlFormatter, self)._get_css_class(ttype) + (' ch' if self.in_change else '')
+
+def highlighted(lex, code, ranges=None):
+ return highlight(code, lex, IntralineHtmlFormatter(ranges, nowrap=True))
+
+def highlight_patch(lex, lines, ranges=None):
+ lines = [(line[0], ensurenewline(line[1:LINE_MAX])) for line in lines]
+ for x in xrange(0, len(lines)):
+ if lines[x][0] == '\\':
+ lines[x] = (lines[x][0], '\n')
+ patch = ''.join(l[1] for l in lines)
+ patch = highlighted(lex, patch, ranges).splitlines(True)
+ for x in xrange(0, min(len(patch), len(lines))):
+ if lines[x][0] == '\\':
+ lines[x] = (lines[x][0], ' No newline at end of file\n')
+ else:
+ lines[x] = (lines[x][0], patch[x])
+ return ''.join(line[0] + line[1] for line in lines)
+
+# returns a list of ranges (a, b), marking that characters a:b in the patch are changed.
+def intraline_diff(patch):
+ removed_lines = []
+ added_lines = []
+ ranges = []
+ l = 0
+
+ for line in patch + [' ']:
+ if line[0] == '-':
+ removed_lines.append(line[1:])
+ elif line[0] == '+':
+ added_lines.append(line[1:])
+ else:
+ if added_lines or removed_lines:
+ rtotal = sum(len(s) for s in removed_lines)
+ atotal = sum(len(s) for s in added_lines)
+
+ # split the diff text into whole words and individual non-word characters
+ removed_words = [w for w in re.split(r'(\w+|\W)', ''.join(removed_lines)) if w]
+ added_words = [w for w in re.split(r'(\w+|\W)', ''.join(added_lines)) if w]
+ removed, added = l, l + rtotal
+ seq = difflib.SequenceMatcher();
+ seq.set_seqs(removed_words, added_words)
+
+ # find the matching words of each string, using the ranges in each opcode.
+ # 'equal' action is for non-changed text; otherwise, mark the range as changed.
+ for (action, r1, r2, a1, a2) in seq.get_opcodes():
+ ac = ''.join(added_words[a1:a2])
+ rc = ''.join(removed_words[r1:r2])
+ a = len(ac)
+ r = len(rc)
+ added += a
+ removed += r
+ if action == 'equal':
+ continue
+ if a != 0:
+ ranges.append((added - a, added))
+ if r != 0:
+ ranges.append((removed - r, removed))
+
+ l += atotal + rtotal
+ removed_lines = []
+ added_lines = []
+ l += len(line) - 1
+ return sorted(ranges)
+
+def format(filename, diff):
+ if not diff:
+ return None
+ formatted = []
+ patch = []
+
+ if not isinstance(diff, unicode):
+ diff_asc = diff
+ else:
+ diff_asc = diff.encode('utf-8')
+ diff_asc = diff_asc.replace('\r', '')
+ lines = diff_asc.splitlines(True)
+ if isinstance(diff, unicode):
+ lines = [l.decode('utf-8') for l in lines]
+
+ lex = lexer(filename)
+ for line in lines:
+ if line.startswith(u'@@'):
+ if patch: formatted.extend(highlight_patch(lex, patch, intraline_diff(patch)))
+ formatted.append(line)
+ patch = []
+ else:
+ patch.append(line)
+ if patch: formatted.extend(highlight_patch(lex, patch, intraline_diff(patch)))
+ return ''.join(formatted)
def format_diffs(diffs):
- def highlight_patch(lex, lines):
- lines = [(line[0], line[1:]) for line in lines]
- for x in xrange(0, len(lines)):
- if lines[x][0] == '\\':
- lines[x] = (lines[x][0], '\n')
- patch = ''.join(l[1] for l in lines)
- patch = highlighted(lex, patch).splitlines(True)
- for x in xrange(0, min(len(patch), len(lines))):
- if lines[x][0] == '\\':
- lines[x] = (lines[x][0], ' No newline at end of file\n')
- else:
- lines[x] = (lines[x][0], patch[x])
- return ''.join(line[0] + line[1] for line in lines)
-
- def format(filename, diff):
- if not diff:
- return None
- formatted = []
- patch = []
- diff = diff.replace('\r', '')
- lines = diff.splitlines(True)
- lex = lexer(filename)
- for line in lines:
- if line.startswith('@@'):
- if patch: formatted.extend(highlight_patch(lex, patch))
- formatted.append(line)
- patch = []
- else:
- patch.append(line)
- if patch: formatted.extend(highlight_patch(lex, patch))
- return ''.join(formatted)
-
for d in diffs:
d['formatted_diff'] = format(d['file']['name'], d['diff'])
def format_file(filename, contents):
- return highlighted(lexer(filename), contents.replace('\r', ''))
+ lines = [line[:LINE_MAX] for line in contents.replace('\r', '').split('\n')]
+ return highlighted(lexer(filename), '\n'.join(lines))
|
|
|
@@ -1,205 +1,377 @@ - # Copyright (C) 2009-2010 by Fog Creek Software. All rights reserved.
+# Copyright (C) 2009-2011 by Fog Creek Software. All rights reserved.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
+from functools import wraps
+import hashlib
+import os
+import urllib2
-import base64
-import os
-import sys
-import urllib
-import urllib2
-from threading import Thread
+from flask import Flask, Response, request
+from mercurial import hgweb, util, context
+from mercurial.error import LockHeld, RepoLookupError
+from werkzeug.exceptions import NotFound, BadRequest
+import settings
+import simplejson
-from django.conf import settings
-from django.utils import simplejson
-from mercurial import ui, util
-from piston.emitters import Emitter
-from piston.handler import AnonymousBaseHandler, typemapper
-from piston.utils import rc
+import Image
+import cStringIO
+from bugzscout import report_exception
+from encoders import EmittableEncoder
+from formatter import format_diffs, format_file
+from repositories import Repository, RepositoryNotSubsetException, CreatesNewHeadsException, filetuple, hexdecode, determinedisplaysize
+from webtasks import asyncpost, queue_repo_index, queue_repo_create, queue_repo_strip
+import bfiles
+import syncstatus
import urlutil
-from formatter import format_diffs, format_file
-from repositories import Repository, RepositoryNotSubsetException, CreatesNewHeadsException
-from repositories import filetuple, hexdecode
-class fakerequest(object):
- pass
-fakerequest.GET = {}
+app = Flask(__name__)
+
+def jsonify(obj):
+ if isinstance(obj, Response) or isinstance(obj, basestring):
+ return obj
+ return Response(enc.encode(obj), mimetype='application/json')
+
+def route(url, methods=['GET'], as_json=True):
+ def wrapper(f):
+ @app.route(url, methods=methods)
+ @wraps(f)
+ def inner(*args, **kwargs):
+ r = f(*args, **kwargs)
+ if as_json:
+ r = jsonify(r)
+ return r
+ return inner
+ return wrapper
+
+def get(url, as_json=True):
+ return route(url, methods=['GET'], as_json=as_json)
+
+def post(url, as_json=True):
+ return route(url, methods=['POST'], as_json=as_json)
+
+def delete(url):
+ return app.route(url, methods=['DELETE'])
def error(message, code):
return {'type': 'error', 'message': message, 'code': code}
-def reportexception(e):
- if settings.DEBUG:
- return
+enc = EmittableEncoder()
- def gettraceback():
- import traceback
- return '\n'.join(traceback.format_exception(*(sys.exc_info())))
-
- traceback = gettraceback()
- bug = {'ScoutUserName': settings.FOGBUGZ_USERNAME,
- 'ScoutProject': settings.FOGBUGZ_PROJECT,
- 'ScoutArea': settings.FOGBUGZ_AREA,
- 'Description': 'Backend exception: %s' % e,
- 'Extra': traceback}
-
- if settings.HOSTED:
- try:
- urllib2.urlopen(settings.FOGBUGZ_URL, urllib.urlencode(bug))
- except:
- pass
- else:
- from filelogmiddleware import _log_error
- _log_error(bug)
-
-class PingbackThread(Thread):
- def __init__(self, handler, method, pingback, request, args, kwargs):
- super(PingbackThread, self).__init__()
- self.handler = handler
- self.method = method
- self.pingback = pingback
- self.request = request
- self.args = args
- self.kwargs = kwargs
-
- def run(self):
- r = self.method(self.handler, *self.args, **self.kwargs)
- emitter, mime = Emitter.get('json')
- srl = emitter(r, typemapper, self.handler, self.handler.fields, True)
- json = srl.render(fakerequest)
-
- success = False
- attempts = 3
- while attempts and not success:
- try:
- attempts -= 1
- urllib2.urlopen(self.pingback, urllib.urlencode({'data': json.encode('utf8')}))
- success = True
- except urllib2.URLError, e:
- if attempts == 0:
- reportexception(e)
-
-def ping_wrapper(method):
- def f(self, *args, **kwargs):
- q = args[0].POST
- if 'pingback' in q:
- t = PingbackThread(self, method, q['pingback'], args[0], args, kwargs)
- try:
- t.start()
- except Exception, e:
- print e
- return rc.ALL_OK
- else:
- return method(self, *args, **kwargs)
- return f
-
-def with_pingbacks(cls):
- """pingback wrapper
-
- This is a decorator that makes any given handler function run
- asynchronously if provided with a pingback parameter in the
- web request."""
-
- for m in ('create', 'read', 'delete', 'update'):
- if m in cls.__dict__.keys():
- method = getattr(cls, m)
- method = ping_wrapper(method)
- setattr(cls, m, method)
- return cls
-
-@with_pingbacks
-class RepositoryHandler(AnonymousBaseHandler):
- allowed_methods = ('GET', 'POST', 'DELETE',)
- model = Repository
- fields = ('uuid', 'parent',)
-
- def read(self, request, uuid=None):
- if uuid:
- r = Repository(uuid)
- if r.exists():
- return r
- return rc.NOT_FOUND
- if not settings.HOSTED:
- return rc.BAD_REQUEST
- return [Repository(folder)
- for folder in os.listdir(settings.KILN_REPOSITORY_ROOT)
- if Repository(folder).exists()]
-
- def create(self, request):
- q = request.POST
- uuid = q['uuid']
- meta = simplejson.loads(q['meta']) if q.get('meta') else {}
- if q.get('parent'):
- r = Repository(q['parent']).cloneto(uuid, meta)
- else:
- r = Repository(uuid)
- r.create(meta)
- return r
-
- def delete(self, request, uuid):
+@get('/repo/<uuid>')
+def repo_get(uuid=None):
+ if uuid:
r = Repository(uuid)
if r.exists():
- r.delete()
- return rc.DELETED
- return rc.NOT_FOUND
+ return r
+ raise NotFound
+ if not settings.HOSTED:
+ raise BadRequest
+ repos = [Repository(folder)
+ for folder in os.listdir(settings.KILN_REPOSITORY_ROOT)
+ if Repository(folder).exists()]
+ for p1 in os.listdir(settings.KILN_REPOSITORY_ROOT):
+ if len(p1) == 2:
+ for p2 in os.listdir(os.path.join(settings.KILN_REPOSITORY_ROOT, p1)):
+ parent = os.path.join(settings.KILN_REPOSITORY_ROOT, p1, p2)
+ repos.extend(Repository(folder) for folder in os.listdir(parent) if Repository(folder).exists())
+ return repos
-class ManifestHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
+@post('/repo')
+def repo_create():
+ q = request.form
+ try:
+ uuid = q['uuid']
+ pingback = q['pingback']
+ site = urlutil.siteurl(request)
+ meta = q.get('meta', None)
+ parent = q.get('parent', None)
+ except Exception, e:
+ raise
+ return BadRequest(e)
+ queue_repo_create(uuid, pingback, site, meta=meta, parent=parent)
+ return 'OK'
- def read(self, request, uuid, rev='tip'):
+@post('/repo/<uuid>')
+def update_meta(uuid):
+ q = request.form
+ try:
+ meta = simplejson.loads(q['meta']) if q.get('meta') else {}
+ except:
+ raise BadRequest
+
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ r.meta = meta
+ return r
+
+@delete('/repo/<uuid>')
+def repo_delete(uuid):
+ # This can only ever be called manually, so it's okay that
+ # this key is never used on the website side. If we do ever
+ # add repository purging via heartbeat or whatever, this
+ # will obviously need to change
+ if settings.HOSTED:
+ if request.args.get('magic_word') != settings.WHITE_RABBIT_OBJECT:
+ raise BadRequest
+ r = Repository(uuid)
+ if r.exists():
+ r.delete()
+ syncstatus.remove_repo(r)
+ return Response('', status=204)
+ raise NotFound
+
+@post('/repo/<uuid>/commit')
+def commit(uuid):
+ q = request.form
+ author = q['author']
+ parent = q['parent']
+ date = q['date']
+ message = q['message']
+ path = hexdecode(q['path'])
+ upload = request.files['file']
+ if upload.content_length > settings.KILN_MAX_COMMIT_FILE_SIZE:
+ return error('The uploaded file is too large.', 'too_large')
+ data = upload.read()
+ if hasattr(upload, 'close'):
+ upload.close()
+
+ def _writefile(repo, mctx, path):
+ return context.memfilectx(path, data)
+
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+
+ repo = r.repo
+ l = None
+ try:
+ l = repo.lock()
+ except LockHeld:
+ if l: l.release()
+ return error('The repository is locked.', 'repo_locked')
+ try:
+ try:
+ ctx = repo[parent]
+ if ctx.children():
+ return error('Commit creates new head!', 'not_head')
+ except RepoLookupError:
+ raise NotFound
+ mctx = context.memctx(repo, [parent, None], message, [path], _writefile, user=author, date=date)
+ mctx.commit()
+ except Exception, e:
+ report_exception(e)
+ raise
+ finally:
+ if l: l.release()
+
+ return Response('OK')
+
+@post('/repo/stripped')
+def strip():
+ q = request.form
+ uuid = q['uuid']
+ parent = q['parent']
+ pingback = q['pingback']
+ rev = q['rev']
+ url = q['url']
+ ixPerson = q['ixperson']
+ meta = q.get('meta', '')
+ parent = q['parent']
+ if not Repository(parent).exists():
+ raise NotFound
+ queue_repo_strip(pingback, uuid, parent, rev, meta, url, ixPerson)
+ return Response('OK')
+
+@get('/repo/<uuid>/manifest/<rev>')
+def manifest(uuid, rev='tip'):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ if not r.hasrevision(rev):
+ raise BadRequest
+ return {'type': 'manifest', 'manifest': r.manifest(rev)}
+
+@get('/repo/<uuid>/size')
+def size(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ # raise NotFound
+ # Hack around a dumb bug in ourdot's Kiln install
+ return {'type': 'reposize', 'size': 0}
+ return {'type': 'reposize', 'size': r.size()}
+
+@get('/repo/<uuid>/commontag')
+def common_tags(uuid):
+ """
+ This function takes a list of checkins within a repository and
+ will return the nearest common child which has a tag.
+ """
+ r = Repository(uuid)
+ if not 'revs' in request.args or not r.exists():
+ raise BadRequest
+ else:
+ revs = request.args['revs'].split(",");
+
+ if not 'num_tags' in request.args:
+ num_tags = 1
+ else:
+ num_tags = int(request.args['num_tags'])
+ tags = r.commontags(revs, num_tags);
+
+ return {'type': 'tags', 'tags': tags};
+
+@post('/repo/<uuid>/tag/<rev>')
+def create_tag(uuid, rev='tip'):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ try:
+ tag = request.form['tag']
+ ixPerson = request.form['ixPerson']
+ url = request.form['url']
+ username = request.form['username']
+ except KeyError:
+ raise BadRequest
+ force = False
+ if 'force' in request.form and request.form['force'].lower() != 'false':
+ force = True
+ try:
+ r.tag(rev, tag, url, ixPerson, username, force)
+ except ValueError:
+ raise BadRequest
+
+ return {'type': 'tag', 'tag': tag, 'rev': rev}
+
+@get('/repo/<uuid>/tag')
+def get_tags(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ return {'type': 'tags', 'tags': r.tags()}
+
+@get('/repo/<uuid>/changesbetweentags')
+def betweentags(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ try:
+ tag1 = request.args["tag1"]
+ tag2 = request.args["tag2"]
+ except KeyError:
+ raise BadRequest
+
+ try:
+ changesetlist = r.changesbetweentags(tag1, tag2, request.args.get('includelow', 'false').lower() == 'true')
+ return {'type': 'changesets', 'changesets': changesetlist}
+ except:
+ raise BadRequest
+
+@post('/repo/meta')
+def set_meta():
+ '''Takes a JSON dictionary of repo uuid => repo metadata, at the key
+ 'meta', and updates the metadata for those repos. Returns a dictionary
+ of uuid => boolean, with True for repos that were found and False for
+ repos that do not exist.'''
+ exists = {}
+ meta = simplejson.loads(request.form['meta'])
+ for uuid in meta:
r = Repository(uuid)
if not r.exists():
- return rc.NOT_FOUND
- if not r.hasrevision(rev):
- return rc.BAD_REQUEST
- return {'type': 'manifest', 'manifest': r.manifest(rev)}
+ exists[uuid] = False
+ continue
+ exists[uuid] = True
+ r.meta = simplejson.loads(meta[uuid])
+ return exists
-class SizeHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
+@get('/repo/<uuid>/file/<rev>/')
+@get('/repo/<uuid>/file/<rev>/<path:path>')
+def get_file(uuid, path='', rev='tip'):
+ r = Repository(uuid)
+ binaries = int(request.args.get('binaries', 0))
+ images = int(request.args.get('images', 0))
+ can_truncate = not int(request.args.get('no_truncate', 0))
+ no_contents = int(request.args.get('no_contents', 0))
+ path = hexdecode(path)
+ if not r.exists():
+ raise NotFound
+ if not r.hasrevision(rev):
+ raise BadRequest
+ if r.hasfile(path, rev):
+ return filecontents(r, path, rev, binaries, images, can_truncate, no_contents)
+ else:
+ return directorylisting(r, path, rev)
- def read(self, request, uuid):
- r = Repository(uuid)
- if not r.exists():
- # return rc.NOT_FOUND
- # Hack around a dumb bug in ourdot's Kiln install
- return {'type': 'reposize', 'size': 0}
- return {'type': 'reposize', 'size': r.size()}
+@get('/repo/<uuid>/file/<rev1>/<rev2>/<path:path>')
+def get_subtracted_image(uuid, path='', rev1='tip', rev2='tip'):
+ r = Repository(uuid)
+ path = hexdecode(path)
+ if not r.exists():
+ raise NotFound
+ if not r.hasrevision(rev1) or not r.hasrevision(rev2):
+ raise BadRequest
+ if r.hasfile(path, rev1) and r.hasfile(path, rev2):
+ #open the old and new versions of the image in RGB mode, and resize them so that the largest dimension is 300px.
+ oldcontents = Image.open(cStringIO.StringIO(r.filecontents(path, rev1, raw=1)))
+ oldcontents = resizeimage(oldcontents, displaySize=tuple(determinedisplaysize(oldcontents.size, max=(500, 500)))).convert("RGB")
+ newcontents = resizeimage(Image.open(cStringIO.StringIO(r.filecontents(path, rev2, raw=1))), displaySize=tuple(determinedisplaysize(oldcontents.size, max=(500, 500)))).convert("RGB")
+ sub = subtractimages(oldcontents, newcontents)
+
+ #im = Image.new("RGB", (oldcontents.size[0]*3, oldcontents.size[1]))
+ #im.paste(oldcontents, (0,0, sub.size[0], sub.size[1]))
+ #im.paste(sub, (sub.size[0],0, sub.size[0]*2, sub.size[1]))
+ #im.paste(newcontents, (sub.size[0]*2,0,sub.size[0]*3,sub.size[1]))
+ im = sub
+
+ output = cStringIO.StringIO()
+ im.save(output, "PNG")
+ return Response(output.getvalue())
-class FileHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
-
- def read(self, request, uuid, path, rev='tip'):
- r = Repository(uuid)
- binaries = int(request.GET.get('binaries', 0))
- can_truncate = not int(request.GET.get('no_truncate', 0))
- path = hexdecode(path)
- if not r.exists():
- return rc.NOT_FOUND
- if not r.hasrevision(rev):
- return rc.BAD_REQUEST
- if r.hasfile(path, rev):
- return self.filecontents(r, path, rev, binaries, can_truncate)
- else:
- return self.directorylisting(r, path, rev)
-
- def filecontents(self, repo, path, rev, binaries, can_truncate):
- truncated = False
- contents = repo.filecontents(path, rev)
- ft = filetuple(path)
+def filecontents(repo, path, rev, binaries, images, can_truncate, no_contents):
+ truncated = False
+ ft = filetuple(path)
+ if repo.isbfile(path) and not binaries:
+ try:
+ Image.open(cStringIO.StringIO(repo.filecontents(path, rev, raw=True)))
+ filetype = 'image'
+ contents = '(Image file)'
+ except IOError:
+ filetype = 'binary'
+ contents = '(Binary file)'
+ elif no_contents:
+ contents = ''
+ truncated = True
+ filetype = 'text'
+ else:
+ contents = repo.filecontents(path, rev, raw=binaries)
if util.binary(contents):
- if binaries:
- filetype = "base64"
- contents = base64.b64encode(contents)
- else:
- filetype = 'binary'
- contents = '(Binary file)'
+ if not binaries:
+ try:
+ Image.open(cStringIO.StringIO(repo.filecontents(path, rev, raw=True)))
+ filetype = 'image'
+ contents = '(Image file)'
+ except IOError:
+ filetype = 'binary'
+ contents = '(Binary file)'
else:
filetype = 'text'
- if len(contents) > 300000 and can_truncate:
+ truncate_length = 200000
+ if len(contents) > truncate_length and can_truncate:
truncated = True
- contents = contents[:300000]
+ contents = contents[:truncate_length]
+ if binaries:
+ if images:
+ try:
+ imfile = cStringIO.StringIO()
+ resizeimage(Image.open(cStringIO.StringIO(contents))).save(imfile, "PNG")
+ contents = imfile.getvalue()
+ except IOError:
+ pass
+ return Response(contents)
+ else:
return {'type': 'file',
'path': ft['path'],
'bytepath': ft['bytepath'],
@@ -207,209 +379,327 @@ 'filetype': filetype,
'truncated': truncated,
'contents': contents,
- 'formatted_contents': format_file(path, contents) if can_truncate else None}
+ 'formatted_contents': format_file(path, contents) if not truncated else None}
- def directorylisting(self, repo, path, rev):
- files = repo.directorylisting(path, rev)
- if files == None:
- return rc.NOT_FOUND
+def resizeimage(image, displaySize=None):
+ if displaySize == None:
+ displaySize = tuple(determinedisplaysize(image.size))
+ if image.size == displaySize:
+ return image
+ else:
+ return image.resize(displaySize)
+
+def subtractimages(oldimage, newimage):
+ im = Image.new("RGB", oldimage.size)
+ pix = im.load()
+ npix = newimage.load()
+ opix = oldimage.load()
+ for x in xrange(oldimage.size[0]):
+ for y in xrange(newimage.size[1]):
+ pix[x, y] = abs(npix[x, y][0] - opix[x, y][0]), abs(npix[x, y][1] - opix[x, y][1]), abs(npix[x, y][2] - opix[x, y][2])
+ pix[x, y] = leahhighlight(pix[x,y])
+ return im
+
+def andrewdifference(pix):
+ pix = f(pix[0]),f(pix[1]),f(pix[2])
+ return pix
+
+def f(x):
+ return int(256*pow((float(x)/256),.5))
+
+def leahhighlight(pix):
+ if pix[0] >= 18 and pix[1] >= 18 and pix[2] >= 18:
+ pix = 5* pix[0],10* pix[1],5* pix[2]
+ return pix
+
+def directorylisting(repo, path, rev):
+ files = repo.directorylisting(path, rev)
+ if files == None:
+ raise NotFound
+ else:
+ return {'type': 'files', 'files': files}
+
+@get('/repo/<uuid>/annotate/<rev>/<path:path>')
+def annotate(uuid, path, rev):
+ r = Repository(uuid)
+ path = hexdecode(path)
+ if not r.exists() or not r.hasfile(path, rev):
+ raise NotFound
+ contents = r.filecontents(path, rev)
+ if util.binary(contents):
+ return error('Unable to annotate binary files', 'annotate_binary')
+
+ if request.args.get('line'):
+ return linehistory(r, path, rev,
+ int(request.args['line']), int(request.args.get('count', 4)))
+ else:
+ return filehistory(r, path, rev, int(request.args.get('count', 0)))
+
+def linehistory(r, path, rev, line, count):
+ return {'type': 'changesets', 'changesets': r.annotateline(path, rev, line, count)}
+
+def filehistory(r, path, rev, count):
+ return {'type': 'annotation', 'annotation': r.annotate(path, rev, count=count)}
+
+@get('/repo/<uuid>/branches')
+def branches(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ return r.branches()
+
+@post('/repo/<uuid>/changeset') # For many changesets, e.g. reviews.
+@get('/repo/<uuid>/changeset/<revs>')
+@get('/repo/<uuid>/changeset/<revs>/<filename>')
+def changesets(uuid, revs=None, filename=None):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+
+ if request.method == 'POST':
+ revs = request.form['revs']
+ filename = request.form.get('filename', None)
+
+ if filename:
+ filename = hexdecode(filename)
+
+ changedfiles = request.values.get('changedfiles')
+
+ revs = revs.split(':')
+ try:
+ if len(revs) == 1:
+ # did you instead give us an enumeration of individual changesets?
+ revs = revs[0].split(',')
+ if len(revs) == 1:
+ # Only one changeset, allow for file changesets
+ if filename:
+ return dict(r.filechangeset(filename, revs[0]), type='filechangeset')
+ else:
+ return dict(r.changeset(revs[0], changedfiles), type='changeset')
+ else:
+ # multiple changesets
+ return {'type': 'changesets',
+ 'changesets': r.changesets(revs, changedfiles)}
+ elif len(revs) == 2:
+ if filename:
+ limit = int(request.values.get('limit', 0))
+ return {'type': 'filechangesets',
+ 'filechangesets': r.filechangesets(filename, revs[0], revs[1], limit)}
+ else:
+ return {'type': 'changesets',
+ 'changesets': r.changesetrange(revs[0], revs[1], changedfiles)}
+ except:
+ raise BadRequest
+
+@get('/repo/<uuid>/diff/<revs>')
+@get('/repo/<uuid>/diff/<revs>/<filename>')
+def diff(uuid, revs, filename=None):
+ r = Repository(uuid)
+ if filename:
+ filename = hexdecode(filename)
+
+ if not r.exists():
+ raise NotFound
+ revs = revs.split(':')
+ for rev in revs:
+ if not r.hasrevision(rev):
+ raise BadRequest
+
+ # Set maxsize to 80 kB or as requested, unless it's a single file,
+ # in which case serve 200kb. The value of 80 kB was
+ # lovingly determined by trial and error. If you change it,
+ # please remember at least to do the former.
+ maxsize = int(request.args.get('maxsize') or (200 if filename else 80) * 1000)
+
+ ignorews = request.args.get('ignorews', 'False').lower() == 'true'
+
+ opts = dict(filename=filename, maxsize=maxsize, ignorews=ignorews)
+ if len(revs) > 1:
+ opts['rev2'] = revs[1]
+ udiff, bytecount = r.diff(revs[0], **opts)
+ format_diffs(udiff)
+
+ if filename:
+ if udiff:
+ return udiff[0]
+ return {'type': 'diff'}
+ else:
+ return {'type': 'diffs',
+ 'truncated': bytecount - maxsize > 0,
+ 'diffs': udiff}
+
+@get('/repo/<uuid>/outgoing/<uuid2>')
+def outgoing_get(uuid, uuid2):
+ r1 = Repository(uuid)
+ r2 = Repository(uuid2)
+ nochangesets = int(request.args.get('nochangesets', 0))
+ if not r1.exists():
+ raise NotFound
+ if not r2.exists():
+ raise BadRequest
+ if not r1.isrelated(r2):
+ return error('repositories are not related', 'notrelated')
+ return {'type': 'outgoing', 'newheads': r1.pushwouldmakeheads(r2), 'changesets': [] if nochangesets else r1.outgoing(r2)}
+
+@post('/repo/<uuid>/outgoing/<uuid2>')
+def push_repo(uuid, uuid2):
+ r1 = Repository(uuid)
+ r2 = Repository(uuid2)
+ ixPerson = request.form['ixPerson']
+ url = request.form['website']
+ if not r1.exists():
+ raise NotFound
+ if not r2.exists():
+ raise BadRequest
+ if not r1.isrelated(r2):
+ return error('repositories are not related', 'notrelated')
+ if not r1.outgoing(r2):
+ return error("repositories were already sync'd", 'alreadysyncd')
+ try:
+ return {'type': 'push', 'success': r1.push(r2, url, pusher=ixPerson)}
+ except RepositoryNotSubsetException, e:
+ return error(str(e), 'notstrictsubset')
+ except CreatesNewHeadsException, e:
+ return error(str(e), 'newheads')
+
+@post('/sync')
+def sync():
+ if not settings.HOSTED:
+ raise BadRequest
+ remote = request.form["remote"]
+ if 'repo' not in request.form:
+ # We don't have a specific repo, so we'll trigger a sync to every repo that needs it.
+ repos = syncstatus.need_sync(remote)
+ for repo in repos:
+ asyncpost(request.base_url, dict(remote=remote, repo=repo))
+ return dict(type='sync', success=True, count=len(repos))
+ resp = urllib2.urlopen(urlutil.urljoin(remote, "repo/%s" % request.form['repo']))
+ repo = simplejson.loads(resp.read())
+ failures = []
+ relink = False
+ r = Repository(repo['uuid'], suppresshooks=True)
+ if not r.exists():
+ r.create(repo['meta'])
+ relink = True
+ r.meta = repo['meta']
+ if 'bfile' in request.form:
+ try:
+ sha = request.form['bfile']
+ if bfiles.ishash(sha) and not bfiles.instore(sha):
+ resp = urllib2.urlopen(urlutil.urljoin(remote, 'repo', r.uuid, 'bfile', sha))
+ bfiles.storebfile(resp, sha)
+ except Exception, e:
+ failures.append({'repo': repo['uuid'], 'exception': e})
+ report_exception(e)
+ else:
+ remoteurl = urlutil.urljoin(remote, 'repo', r.uuid)
+ try:
+ r.pull(remoteurl)
+ if settings.DO_INDEXING:
+ queue_repo_index(repo['uuid'])
+ if settings.HOSTED:
+ syncstatus.update_status(r)
+ if relink:
+ r.relink()
+ # Chain the sync along
+ r.sync(site=urlutil.siteurl(request), peers=dict(r.ui.configitems('post_peers')))
+ except LockHeld, e:
+ # No need to report locked repos. They're expected.
+ failures.append({'repo': repo['uuid'], 'exception': e})
+ except Exception, e:
+ failures.append({'repo': repo['uuid'], 'exception': e})
+ report_exception(e, "uuid=%s, r.repo['tip'].rev()=%s, request.form=%s\n"
+ % (repo['uuid'], str(r.repo['tip'].rev()), str(request.form)))
+ d = {'type': 'sync', 'success': not failures}
+ if failures:
+ d['failures'] = failures
+ return d
+
+@get('/version')
+def version():
+ return {'version': settings.KILN_BACKEND_VERSION, 'hg_version': util.version()}
+
+@app.route('/repo/<uuid>/bfile', methods=['GET', 'POST'])
+@app.route('/repo/<uuid>/bfile/<sha>', methods=['GET', 'POST'])
+def bfilehandle(uuid, sha=None):
+ repo = Repository(uuid)
+ if not sha:
+ if request.method == 'GET':
+ return Response(simplejson.dumps(bfiles.listbfiles()))
else:
- return {'type': 'files', 'files': files}
+ raise BadRequest
-class AnnotationHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
+ if request.method == 'GET':
+ try:
+ return Response(bfiles.bfilecontents(sha))
+ except IOError:
+ raise NotFound
- def read(self, request, uuid, path, rev):
- r = Repository(uuid)
- path = hexdecode(path)
- if not r.exists() or not r.hasfile(path, rev):
- return rc.NOT_FOUND
- contents = r.filecontents(path, rev)
- if util.binary(contents):
- return error('Unable to annotate binary files', 'annotate_binary')
+ # bfiles uses PUT to upload files but django read the entire file into memory
+ # use POST instead so that we can access the file with a generator
+ # NOTE: This may no longer be necessary with flask, but it's the way it works
+ # so there's no reason to change it back right now.
+ elif request.method == 'POST':
+ try:
+ if bfiles.instore(sha):
+ return Response(status=200)
+ elif bfiles.storebfile(request.files['name'], sha):
+ try:
+ repo.sync(site=urlutil.siteurl(request),
+ bfile=sha,
+ peers=dict(repo.ui.configitems('peers')))
+ finally:
+ return Response(status=201)
+ else:
+ #SHA1 is checked by storebfile
+ raise BadRequest('SHA1 of file does not match SHA1 given.')
+ except Exception, e:
+ report_exception(e)
+ raise BadRequest
- if request.GET.get('line'):
- return self.linehistory(r, path, rev,
- int(request.GET['line']), int(request.GET.get('count', 4)))
+ elif request.method == 'HEAD':
+ if bfiles.instore(sha):
+ m = hashlib.sha1()
+ with bfiles.bfilecontents(sha) as fd:
+ while True:
+ data = fd.read(32768)
+ if not data:
+ break
+ m.update(data)
+ response = Response()
+ response.headers['Content-SHA1'] = m.hexdigest()
+ return response
else:
- return self.filehistory(r, path, rev)
+ raise NotFound
+ else:
+ raise BadRequest
- def linehistory(self, r, path, rev, line, count):
- return {'type': 'changesets', 'changesets': r.annotateline(path, rev, line, count)}
+@app.route('/repo/<uuid>/serve', methods=['GET', 'POST'])
+def serve(uuid):
+ r = Repository(uuid, suppressoutput=False)
+ if not r.exists():
+ raise NotFound
+ repo = r.repo
+ if 'ixPerson' in request.args:
+ repo.ui.setconfig('kiln', 'ixperson', request.args['ixPerson'])
+ repo.ui.setconfig('kiln', 'url', request.args['website'])
+ repo.ui.setconfig('kiln', 'site', urlutil.siteurl(request))
+ repo.ui.setconfig('kiln', 'token', request.args.get('token', ''))
+ # if we're about to push, run recover. Don't do this for pull,
+ # because it locks the repo (even if only for a second), and it's
+ # obviously better if we don't have to wait for a push to finish
+ # to pull
+ if request.args['cmd'] == 'unbundle':
+ r.recover()
+ request.environ['REPO_NAME'] = request.environ['PATH_INFO'].strip('/')
+ return hgweb.hgweb(repo.root, baseui=repo.ui)
- def filehistory(self, r, path, rev):
- return {'type': 'annotation', 'annotation': r.annotate(path, rev)}
-
-class ChangesetHandler(AnonymousBaseHandler):
- allowed_methods = ('GET', 'POST')
-
- def create(self, request, uuid):
- revs = request.POST["revs"]
- filename = request.POST.get("filename")
- return self.read(request, uuid, revs, filename)
-
- def read(self, request, uuid, revs, filename=None):
- r = Repository(uuid)
- if filename:
- filename = hexdecode(filename)
-
- if not r.exists():
- return rc.NOT_FOUND
-
- changedfiles = request.REQUEST.get('changedfiles')
-
- revs = revs.split(':')
- try:
- if len(revs) == 1:
- # did you instead give us an enumeration of individual changesets?
- revs = revs[0].split(',')
- if len(revs) == 1:
- # Only one changeset, allow for file changesets
- if filename:
- return dict(r.filechangeset(filename, revs[0]), type='filechangeset')
- else:
- return dict(r.changeset(revs[0], changedfiles), type='changeset')
- else:
- # multiple changesets
- return {'type': 'changesets',
- 'changesets': r.changesets(revs, changedfiles)}
- elif len(revs) == 2:
- if filename:
- limit = int(request.REQUEST.get('limit', 0))
- return {'type': 'filechangesets',
- 'filechangesets': r.filechangesets(filename, revs[0], revs[1], limit)}
- else:
- return {'type': 'changesets',
- 'changesets': r.changesetrange(revs[0], revs[1], changedfiles)}
- except:
- return rc.BAD_REQUEST
-
-class DiffHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
-
- def read(self, request, uuid, revs, filename=None):
- r = Repository(uuid)
- if filename:
- filename = hexdecode(filename)
-
- if not r.exists():
- return rc.NOT_FOUND
- revs = revs.split(':')
- for rev in revs:
- if not r.hasrevision(rev):
- return rc.BAD_REQUEST
-
- # Set maxsize to 100 kB or as requested, unless it's a single file,
- # in which case serve the whole thing. The value of 80 kB was
- # lovingly determined by trial and error. If you change it,
- # please remember at least to do the former.
- maxsize = request.GET.get('maxsize') if not filename else None
- if maxsize:
- maxsize = int(maxsize)
- elif not filename:
- maxsize = 80 * 1000
-
- if len(revs) == 1:
- udiff, bytecount, increment = r.diff(revs[0], filename=filename, maxsize=maxsize)
- format_diffs(udiff)
- else:
- udiff, bytecount, increment = r.diff(revs[0], rev2=revs[1], filename=filename, maxsize=maxsize)
- format_diffs(udiff)
-
- if filename:
- return udiff[0] if udiff else rc.NOT_FOUND
- else:
- return {'type': 'diffs',
- 'truncated': bytecount - maxsize > 0,
- 'increment': increment,
- 'diffs': udiff}
-
-class AutopullHandler(AnonymousBaseHandler):
- allowed_methods = ('POST',)
-
- def create(self, request, uuid, uuid2):
- r1 = Repository(uuid)
- r2 = Repository(uuid2)
- url = request.POST['website']
- if not r1.exists():
- return rc.NOT_FOUND
- if not r2.exists():
- return rc.BAD_REQUEST
- if not r1.isrelated(r2):
- return error('repositories are not related', 'notrelated')
- if not r2.outgoing(r1):
- return error("repositories were already sync'd", 'alreadysyncd')
- return {'type': 'push', 'success': r1.autopull(r2, url)}
-
-@with_pingbacks
-class OutgoingHandler(AnonymousBaseHandler):
- allowed_methods = ('GET', 'POST')
-
- def read(self, request, uuid, uuid2):
- r1 = Repository(uuid)
- r2 = Repository(uuid2)
- nochangesets = int(request.GET.get('nochangesets', 0))
- if not r1.exists():
- return rc.NOT_FOUND
- if not r2.exists():
- return rc.BAD_REQUEST
- if not r1.isrelated(r2):
- return error('repositories are not related', 'notrelated')
- return {'type': 'outgoing', 'newheads': r1.pushwouldmakeheads(r2), 'changesets': [] if nochangesets else r1.outgoing(r2)}
-
- def create(self, request, uuid, uuid2):
- r1 = Repository(uuid)
- r2 = Repository(uuid2)
- ixPerson = request.POST.get('ixPerson')
- url = request.POST['website']
- if not r1.exists():
- return rc.NOT_FOUND
- if not r2.exists():
- return rc.BAD_REQUEST
- if not r1.isrelated(r2):
- return error('repositories are not related', 'notrelated')
- if not r1.outgoing(r2):
- return error("repositories were already sync'd", 'alreadysyncd')
- try:
- return {'type': 'push', 'success': r1.push(r2, url, pusher=ixPerson)}
- except RepositoryNotSubsetException, e:
- return error(str(e), 'notstrictsubset')
- except CreatesNewHeadsException, e:
- return error(str(e), 'newheads')
-
-class SynchronizeHandler(AnonymousBaseHandler):
- allowed_methods = ('POST',)
-
- def create(self, request):
- if not settings.HOSTED:
- return rc.BAD_REQUEST
- remote = request.POST["remote"]
- if 'repo' in request.POST:
- resp = urllib2.urlopen(urlutil.urljoin(remote, "repo/%s/" % request.POST['repo']))
- repos = [simplejson.loads(resp.read())]
- else:
- resp = urllib2.urlopen(urlutil.urljoin(remote, "repo/"))
- repos = simplejson.loads(resp.read())
- u = ui.ui()
- u.setconfig('ui', 'quiet', 'True')
- failures = []
- for repo in repos:
- r = Repository(repo['uuid'], u)
- remoteurl = urlutil.urljoin(remote, "repo/%s/serve" % r.uuid)
- if not r.exists():
- r.create(repo['meta'])
- try:
- r.pull(remoteurl)
- except Exception, e:
- failures.append({'repo': repo['uuid'], 'exception': e})
- reportexception(e)
- d = {'type': 'sync', 'success': not failures}
- if failures:
- d['failures'] = failures
- return d
-
-class VersionHandler(AnonymousBaseHandler):
- allowed_methods = ('GET',)
-
- def read(self, request):
- return {'version': settings.KILN_BACKEND_VERSION}
+@get('/repo/<uuid>/heads')
+def get_heads(uuid):
+ r = Repository(uuid)
+ if not r.exists():
+ raise NotFound
+ def _revtuple(rev):
+ "Return a (rev num, rev id) tuple from a changeset context"
+ return (rev.rev(), rev.hex())
+ return {'heads': [_revtuple(r.repo[head]) for head in r.repo.heads()]}
|
|
@@ -0,0 +1,62 @@ + from flask import request, render_template
+import settings
+from handlers import get, post, jsonify
+from redis import Redis
+from redis.cli import RedisCli
+
+queue_data = dict(
+ lists = [
+ 'kiln:queue',
+ 'kiln:queue:high',
+ 'kiln:queue:low',
+ 'kiln:cancelations',
+ 'kiln:queue:running',
+ 'opengrok:index:running',
+ 'opengrok:cancelations',
+ ],
+ zsets = [
+ 'opengrok:index',
+ ],
+ keys = [
+ 'updaterepo:*:repo',
+ 'updaterepo:*:__failcount',
+ 'httppost:*:url',
+ 'httppost:*:__failcount',
+ ],
+)
+
+def _get_redis():
+ return Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB)
+
+@get('/queuestats/', as_json=False)
+def queuestats():
+ r = _get_redis()
+ data = {}
+ for l in queue_data['lists']:
+ data[l] = r.llen(l)
+ for z in queue_data['zsets']:
+ data[z] = r.zcard(z)
+ for k in queue_data['keys']:
+ data[k] = len(r.keys(k))
+ if request.headers.get('X-Requested-With', '').lower() == 'xmlhttprequest':
+ return jsonify(data)
+ return render_template('queuestats.html', data=data)
+
+@post('/queuestats/redis/cli/')
+def cli():
+ cmd = request.form['cmd']
+ try:
+ r = RedisCli(settings.REDIS_HOST, settings.REDIS_PORT).onecmd(cmd)
+ except Exception, e:
+ r = '*** Unknown exception: %s' % e
+ if r is None:
+ r = ''
+ elif isinstance(r, list):
+ r = '\n'.join(r)
+ d = dict(response=r)
+ if isinstance(r, basestring) and (r.startswith('Error') or r.startswith('***')):
+ d['type'] = 'error'
+ else:
+ d['type'] = 'success'
+ return d
+
|
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
|
|
@@ -0,0 +1,129 @@ + /*
+ * jQuery Color Animations
+ * Copyright 2007 John Resig
+ * Released under the MIT and GPL licenses.
+ */
+
+(function(jQuery){
+
+ // We override the animation for all of these color styles
+ jQuery.each(['backgroundColor', 'borderBottomColor', 'borderLeftColor', 'borderRightColor', 'borderTopColor', 'color', 'outlineColor'], function(i,attr){
+ jQuery.fx.step[attr] = function(fx){
+ if ( !fx.colorInit ) {
+ fx.start = getColor( fx.elem, attr );
+ fx.end = getRGB( fx.end );
+ fx.colorInit = true;
+ }
+
+ fx.elem.style[attr] = "rgb(" + [
+ Math.max(Math.min( parseInt((fx.pos * (fx.end[0] - fx.start[0])) + fx.start[0]), 255), 0),
+ Math.max(Math.min( parseInt((fx.pos * (fx.end[1] - fx.start[1])) + fx.start[1]), 255), 0),
+ Math.max(Math.min( parseInt((fx.pos * (fx.end[2] - fx.start[2])) + fx.start[2]), 255), 0)
+ ].join(",") + ")";
+ }
+ });
+
+ // Color Conversion functions from highlightFade
+ // By Blair Mitchelmore
+ // http://jquery.offput.ca/highlightFade/
+
+ // Parse strings looking for color tuples [255,255,255]
+ function getRGB(color) {
+ var result;
+
+ // Check if we're already dealing with an array of colors
+ if ( color && color.constructor == Array && color.length == 3 )
+ return color;
+
+ // Look for rgb(num,num,num)
+ if (result = /rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(color))
+ return [parseInt(result[1]), parseInt(result[2]), parseInt(result[3])];
+
+ // Look for rgb(num%,num%,num%)
+ if (result = /rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(color))
+ return [parseFloat(result[1])*2.55, parseFloat(result[2])*2.55, parseFloat(result[3])*2.55];
+
+ // Look for #a0b1c2
+ if (result = /#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(color))
+ return [parseInt(result[1],16), parseInt(result[2],16), parseInt(result[3],16)];
+
+ // Look for #fff
+ if (result = /#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(color))
+ return [parseInt(result[1]+result[1],16), parseInt(result[2]+result[2],16), parseInt(result[3]+result[3],16)];
+
+ // Look for rgba(0, 0, 0, 0) == transparent in Safari 3
+ if (result = /rgba\(0, 0, 0, 0\)/.exec(color))
+ return colors['transparent'];
+
+ // Otherwise, we're most likely dealing with a named color
+ return colors[jQuery.trim(color).toLowerCase()];
+ }
+
+ function getColor(elem, attr) {
+ var color;
+
+ do {
+ color = jQuery.curCSS(elem, attr);
+
+ // Keep going until we find an element that has color, or we hit the body
+ if ( color != '' && color != 'transparent' || jQuery.nodeName(elem, "body") )
+ break;
+
+ attr = "backgroundColor";
+ } while ( elem = elem.parentNode );
+
+ return getRGB(color);
+ };
+
+ // Some named colors to work with
+ // From Interface by Stefan Petre
+ // http://interface.eyecon.ro/
+
+ var colors = {
+ aqua:[0,255,255],
+ azure:[240,255,255],
+ beige:[245,245,220],
+ black:[0,0,0],
+ blue:[0,0,255],
+ brown:[165,42,42],
+ cyan:[0,255,255],
+ darkblue:[0,0,139],
+ darkcyan:[0,139,139],
+ darkgrey:[169,169,169],
+ darkgreen:[0,100,0],
+ darkkhaki:[189,183,107],
+ darkmagenta:[139,0,139],
+ darkolivegreen:[85,107,47],
+ darkorange:[255,140,0],
+ darkorchid:[153,50,204],
+ darkred:[139,0,0],
+ darksalmon:[233,150,122],
+ darkviolet:[148,0,211],
+ fuchsia:[255,0,255],
+ gold:[255,215,0],
+ green:[0,128,0],
+ indigo:[75,0,130],
+ khaki:[240,230,140],
+ lightblue:[173,216,230],
+ lightcyan:[224,255,255],
+ lightgreen:[144,238,144],
+ lightgrey:[211,211,211],
+ lightpink:[255,182,193],
+ lightyellow:[255,255,224],
+ lime:[0,255,0],
+ magenta:[255,0,255],
+ maroon:[128,0,0],
+ navy:[0,0,128],
+ olive:[128,128,0],
+ orange:[255,165,0],
+ pink:[255,192,203],
+ purple:[128,0,128],
+ violet:[128,0,128],
+ red:[255,0,0],
+ silver:[192,192,192],
+ white:[255,255,255],
+ yellow:[255,255,0],
+ transparent: [255,255,255]
+ };
+
+})(jQuery);
|
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
|
@@ -0,0 +1,73 @@ + /*
+
+ | __ _____ ________ __
+ | / // _ /__ __ _____ ___ __ _/__ ___/__ ___ ______ __ __ __ ___ / /
+ | __ / // // // // // _ // _// // / / // _ // _// // // \/ // _ \/ /
+ | / / // // // // // ___// / / // / / // ___// / / / / // // /\ // // / /__
+ | \___//____ \\___//____//_/ _\_ / /_//____//_/ /_/ /_//_//_/ /_/ \__\_\___/
+ | \/ /____/ version 0.3.3
+ http://terminal.jcubic.pl
+
+ Licensed under GNU LGPL Version 3 license
+ Copyright (c) 2011 Jakub Jankiewicz <http://jcubic.pl>
+
+ Includes:
+
+ Storage plugin Distributed under the MIT License
+ Copyright (c) 2010 Dave Schindler
+
+ LiveQuery plugin Dual MIT and GPL
+ Copyright (c) 2008 Brandon Aaron (http://brandonaaron.net)
+
+ jQuery Timers licenced with the WTFPL
+ <http://jquery.offput.ca/every/>
+
+ Date: Mon, 07 Mar 2011 11:57:17 +0000
+*/
+Array.prototype.has=function(g){for(var v=this.length;v--;)if(this[v]==g)return true;return false};function get_stack(g){return g?[g.toString().match(/.*\n.*\n/)].concat(get_stack(g.caller)):[]}
+(function(g,v){function S(a,d){var c;if(typeof a==="string"&&typeof d==="string"){localStorage[a]=d;return true}else if(typeof a==="object"&&typeof d==="undefined"){for(c in a)if(a.hasOwnProperty(c))localStorage[c]=a[c];return true}return false}function O(a,d){var c,i;c=new Date;c.setTime(c.getTime()+31536E6);c="; expires="+c.toGMTString();if(typeof a==="string"&&typeof d==="string"){document.cookie=a+"="+d+c+"; path=/";return true}else if(typeof a==="object"&&typeof d==="undefined"){for(i in a)if(a.hasOwnProperty(i))document.cookie=
+i+"="+a[i]+c+"; path=/";return true}return false}function T(a){return localStorage[a]}function U(a){var d,c,i;a+="=";d=document.cookie.split(";");for(c=0;c<d.length;c++){for(i=d[c];i.charAt(0)===" ";)i=i.substring(1,i.length);if(i.indexOf(a)===0)return i.substring(a.length,i.length)}return null}function V(a){return delete localStorage[a]}function W(a){return O(a,"",-1)}function P(a,d){var c=[],i=a.length;if(i<d)return[a];for(var j=0;j<i;j+=d)c.push(a.substring(j,j+d));return c}function x(a){if(typeof a==
+"string"){a=a.replace(/&(?!#[0-9]*;)/g,"&");a=a.replace(/</g,"<").replace(/>/g,">");a=a.replace(/\n/g,"<br/>");a=a.replace(/ /g," ");a=a.replace(/\t/g," ");var d=a.split(X);if(d.length>1)a=g.map(d,function(c){return c[0]=="["?c.replace(Y,function(i,j,p,u,k){i="";if(j.indexOf("b")!=-1)i+="font-weight:bold;";if(j.indexOf("u")!=-1)i+="text-decoration:underline;";if(j.indexOf("i")!=-1)i+="font-style:italic; ";if(p.match(Q))i+="color:"+p+";";if(u.match(Q))i+="background-color:"+
+u;return a='<span style="'+i+'">'+k+"</span>"}):"<span>"+c+"</span>"}).join("");return a}else return""}function R(a){var d=a instanceof Array?a:a?[a]:[],c=0;g.extend(this,{left:function(){if(c===0)c=d.length-1;else--c;return d[c]},right:function(){if(c==d.length-1)c=0;else++c;return d[c]},current:function(){return d[c]},data:function(){return d},reset:function(){c=0},append:function(i){d.push(i);this.reset()}})}function Z(a){var d=a?[a]:[];g.extend(this,{size:function(){return d.length},pop:function(){if(d.length===
+0)return null;else{var c=d[d.length-1];d=d.slice(0,d.length-1);return c}},push:function(c){d=d.concat([c]);return c},top:function(){return d.length>0?d[d.length-1]:null}})}function $(a){var d=true;if(typeof a==="string"&&a!=="")a+="_";var c=g.Storage.get(a+"commands"),i=new R(c?eval("("+c+")"):[""]);g.extend(this,{append:function(j){if(d&&i.current()!=j){i.append(j);g.Storage.set(a+"commands",g.json_stringify(i.data()))}},data:function(){return i.data()},next:function(){return i.right()},last:function(){i.reset()},
+previous:function(){return i.left()},clear:function(){i=new R;g.Storage.remove(a+"commands")},enable:function(){d=true},disable:function(){d=false}})}g.extend(g.fn,{livequery:function(a,d,c){var i=this,j;if(g.isFunction(a)){c=d;d=a;a=v}g.each(g.livequery.queries,function(p,u){if(i.selector==u.selector&&i.context==u.context&&a==u.type&&(!d||d.$lqguid==u.fn.$lqguid)&&(!c||c.$lqguid==u.fn2.$lqguid))return(j=u)&&false});j=j||new g.livequery(this.selector,this.context,a,d,c);j.stopped=false;j.run();return this},
+expire:function(a,d,c){var i=this;if(g.isFunction(a)){c=d;d=a;a=v}g.each(g.livequery.queries,function(j,p){if(i.selector==p.selector&&i.context==p.context&&(!a||a==p.type)&&(!d||d.$lqguid==p.fn.$lqguid)&&(!c||c.$lqguid==p.fn2.$lqguid)&&!this.stopped)g.livequery.stop(p.id)});return this}});g.livequery=function(a,d,c,i,j){this.selector=a;this.context=d||document;this.type=c;this.fn=i;this.fn2=j;this.elements=[];this.stopped=false;this.id=g.livequery.queries.push(this)-1;i.$lqguid=i.$lqguid||g.livequery.guid++;
+if(j)j.$lqguid=j.$lqguid||g.livequery.guid++;return this};g.livequery.prototype={stop:function(){var a=this;if(this.type)this.elements.unbind(this.type,this.fn);else this.fn2&&this.elements.each(function(d,c){a.fn2.apply(c)});this.elements=[];this.stopped=true},run:function(){if(!this.stopped){var a=this,d=this.elements,c=g(this.selector,this.context),i=c.not(d);this.elements=c;if(this.type){i.bind(this.type,this.fn);d.length>0&&g.each(d,function(j,p){g.inArray(p,c)<0&&g.event.remove(p,a.type,a.fn)})}else{i.each(function(){a.fn.apply(this)});
+this.fn2&&d.length>0&&g.each(d,function(j,p){g.inArray(p,c)<0&&a.fn2.apply(p)})}}}};g.extend(g.livequery,{guid:0,queries:[],queue:[],running:false,timeout:null,checkQueue:function(){if(g.livequery.running&&g.livequery.queue.length)for(var a=g.livequery.queue.length;a--;)g.livequery.queries[g.livequery.queue.shift()].run()},pause:function(){g.livequery.running=false},play:function(){g.livequery.running=true;g.livequery.run()},registerPlugin:function(){g.each(arguments,function(a,d){if(g.fn[d]){var c=
+g.fn[d];g.fn[d]=function(){var i=c.apply(this,arguments);g.livequery.run();return i}}})},run:function(a){if(a!=v)g.inArray(a,g.livequery.queue)<0&&g.livequery.queue.push(a);else g.each(g.livequery.queries,function(d){g.inArray(d,g.livequery.queue)<0&&g.livequery.queue.push(d)});g.livequery.timeout&&clearTimeout(g.livequery.timeout);g.livequery.timeout=setTimeout(g.livequery.checkQueue,20)},stop:function(a){a!=v?g.livequery.queries[a].stop():g.each(g.livequery.queries,function(d){g.livequery.queries[d].stop()})}});
+g.livequery.registerPlugin("append","prepend","after","before","wrap","attr","removeAttr","addClass","removeClass","toggleClass","empty","remove");g(function(){g.livequery.play()});var aa=g.prototype.init;g.prototype.init=function(a,d){var c=aa.apply(this,arguments);if(a&&a.selector){c.context=a.context;c.selector=a.selector}if(typeof a=="string"){c.context=d||document;c.selector=a}return c};g.prototype.init.prototype=g.prototype;var J=typeof window.localStorage!=="undefined";g.extend({Storage:{set:J?
+S:O,get:J?T:U,remove:J?V:W}});jQuery.fn.extend({everyTime:function(a,d,c,i,j){return this.each(function(){jQuery.timer.add(this,a,d,c,i,j)})},oneTime:function(a,d,c){return this.each(function(){jQuery.timer.add(this,a,d,c,1)})},stopTime:function(a,d){return this.each(function(){jQuery.timer.remove(this,a,d)})}});jQuery.extend({timer:{guid:1,global:{},regex:/^([0-9]+)\s*(.*s)?$/,powers:{ms:1,cs:10,ds:100,s:1E3,das:1E4,hs:1E5,ks:1E6},timeParse:function(a){if(a==v||a==null)return null;var d=this.regex.exec(jQuery.trim(a.toString()));
+return d[2]?parseInt(d[1],10)*(this.powers[d[2]]||1):a},add:function(a,d,c,i,j,p){var u=0;if(jQuery.isFunction(c)){j||(j=i);i=c;c=d}d=jQuery.timer.timeParse(d);if(!(typeof d!="number"||isNaN(d)||d<=0)){if(j&&j.constructor!=Number){p=!!j;j=0}j=j||0;p=p||false;if(!a.$timers)a.$timers={};a.$timers[c]||(a.$timers[c]={});i.$timerID=i.$timerID||this.guid++;var k=function(){if(!(p&&this.inProgress)){this.inProgress=true;if(++u>j&&j!==0||i.call(a,u)===false)jQuery.timer.remove(a,c,i);this.inProgress=false}};
+k.$timerID=i.$timerID;a.$timers[c][i.$timerID]||(a.$timers[c][i.$timerID]=window.setInterval(k,d));this.global[c]||(this.global[c]=[]);this.global[c].push(a)}},remove:function(a,d,c){var i=a.$timers,j;if(i){if(d){if(i[d]){if(c){if(c.$timerID){window.clearInterval(i[d][c.$timerID]);delete i[d][c.$timerID]}}else for(c in i[d]){window.clearInterval(i[d][c]);delete i[d][c]}for(j in i[d])break;if(!j){j=null;delete i[d]}}}else for(d in i)this.remove(a,d,c);for(j in i)break;if(!j)a.$timers=null}}}});if(jQuery.browser.msie)jQuery(window).one("unload",
+function(){var a=jQuery.timer.global,d;for(d in a)for(var c=a[d],i=c.length;--i;)jQuery.timer.remove(c[i],d)});var X=/(\[\[[biu]*;[^;]*;[^\]]*\][^\]]*\])/g,Y=/\[\[([biu]*);([^;]*);([^\]]*)\]([^\]]*)\]/g,Q=/#([0-9A-Fa-f]{3}|[0-9A-Fa-f]{6})/;g.json_stringify=function(a,d){var c="";d=d===v?1:d;switch(typeof a){case "function":c+=a;break;case "boolean":c+=a?"true":"false";break;case "object":if(a===null)c+="null";else if(a instanceof Array){c+="[";for(var i=a.length,j=0;j<i-1;++j)c+=g.json_stringify(a[j],
+d+1);c+=g.json_stringify(a[i-1],d+1)+"]"}else{c+="{";for(i in a)if(a.hasOwnProperty(i))c+='"'+i+'":'+g.json_stringify(a[i],d+1);c+="}"}break;case "string":i=a;var p={"\\\\":"\\\\",'"':'\\"',"/":"\\/","\\n":"\\n","\\r":"\\r","\\t":"\\t"};for(j in p)if(p.hasOwnProperty(j))i=i.replace(RegExp(j,"g"),p[j]);c+='"'+i+'"';break;case "number":c+=String(a)}c+=d>1?",":"";if(d==1)c=c.replace(/,([\]}])/g,"$1");return c.replace(/([\[{]),/g,"$1")};g.fn.cmd=function(a){function d(e){var q=e.substring(0,j-p-1);e=
+e.substring(j-p-1);return[q].concat(P(e,j))}var c=this;c.addClass("cmd");c.append('<span class="prompt"></span><span></span><span class="cursor"> </span><span></span>');var i=g("<textarea/>").addClass("clipboard").appendTo(c);a.width&&c.width(a.width);var j,p,u=a.mask||false,k="",m=0,C,F=a.enabled,K,z,L=function(){var e=c.find(".cursor");return function(){e.toggleClass("inverted")}}(),N=c.find(".cursor"),f=function(e){function q(b,h){if(h==b.length){w.html(x(b));D.html(" ");o.html("")}else if(h===
+0){w.html("");D.html(x(b.slice(0,1)));o.html(x(b.slice(1)))}else{var n=x(b.slice(0,h));w.html(n);n=b.slice(h,h+1);D.html(n==" "?" ":x(n));h==b.lenght-1?o.html(""):o.html(x(b.slice(h+1)))}}function A(b){return"<div>"+x(b)+"</div>"}function l(b){var h=o;g.each(b,function(n,s){h=g(A(s)).insertAfter(h)})}function G(b){g.each(b,function(h,n){w.before(A(n))})}var D=e.find(".cursor"),w=D.prev(),o=D.next();return function(){var b=u?k.replace(/./g,"*"):k;e.find("div").remove();w.html("");if(b.length>
+j-p-1){var h=d(b),n=h[0].length;if(m<n){q(h[0],m);l(h.slice(1))}else if(m==n){w.before(A(h[0]));q(h[1],0);l(h.slice(2))}else{var s=h.length;if(m<n){q(h[0],m);l(h.slice(1))}else if(m==n){w.before(A(h[0]));q(h[1],0);l(h.slice(2))}else{var r=h.slice(-1)[0];b=b.length-m;if(b<=r.length){G(h.slice(0,-1));n=r.length==b?0:r.length-b;q(r,n)}else if(s==3){w.before("<div>"+x(h[0])+"</div>");q(h[1],m-n-1);o.after("<div>"+x(h[2])+"</div>")}else{r=Math.floor((m+p)/j);s=h[r];n=function(y){for(var t=0,B=y.length;B--;)t+=
+y[B].length;return t}(h.slice(0,r));n=m-n;if(n==j){n=0;s=h[++r]}x(s.slice(0,n));q(s,n);G(h.slice(0,r));l(h.slice(r+1))}}}}else if(b===""){w.html("");D.html(" ");o.html("")}else q(b,m)}}(c),E=function(){var e=c.find(".prompt");return function(){if(typeof C=="string"){p=C.length;e.html(x(C)+" ")}else C(function(q){p=q.length;e.html(x(q)+" ")})}}();g.extend(c,{name:function(e){if(e!==v){K=e;z=new $(e)}else return K},history:function(){return z},set:function(e,q){if(e!==v){k=e;if(!q)m=
+k.length;f()}},insert:function(e,q){if(m==k.length)k+=e;else k=m===0?e+k:k.slice(0,m)+e+k.slice(m);q||(m+=e.length);f()},get:function(){return k},commands:function(e){if(e)a.commands=e;else return e},destroy:function(){g(document.documentElement).unbind(".commandline");c.find(".prompt").remove()},prompt:function(e){if(e===v)return C;else{if(typeof e=="string"||typeof e=="function")C=e;else throw"prompt must be a function or string";E()}},position:function(e){if(typeof e=="number"){m=e<0?0:e>k.length?
+k.length:e;f()}else return m},resize:function(e){if(e)j=e;else{e=c.width();var q=N.innerWidth();j=Math.floor(e/q)}f()},enable:function(){if(!this.isenabled()){c.everyTime(500,"blink",L);F=true}},isenabled:function(){return F},disable:function(){if(this.isenabled()){c.stopTime("blink",L);c.find(".cursor").removeClass("inverted");F=false}},mask:function(e){if(typeof e=="boolean"){u=e;f()}else return u}});c.name(a.name||"");C=a.prompt||">";E();if(a.enabled===v||a.enabled===true)c.enable();g(document.documentElement).keypress(function(e){var q;
+if(e.ctrlKey&&e.which==99)return true;if(a.keypress)q=a.keypress(e);if(q===v||q){if(F)if([38,32,13,40,0,8].has(e.which)&&e.keyCode!=123&&!(e.which==40&&e.shiftKey||e.which==38&&e.shiftKey))return false;else if(!e.ctrlKey&&!(e.altKey&&e.which==100)){c.insert(String.fromCharCode(e.which));return false}}else return q;if(e.which==100&&e.ctrlKey)return false}).keydown(function(e){if(a.keydown&&a.keydown(e)===false)return false;if(F){var q;if(e.keyCode==13){z&&k&&z.append(k);z.last();e=k;c.set("");typeof C==
+"function"&&E();a.commands&&a.commands(e)}else if(e.which==32)c.insert(" ");else if(e.which==8){if(k!==""&&m>0){k=k.slice(0,m-1)+k.slice(m,k.length);--m;f()}}else if(e.which==9&&!(e.ctrlKey||e.altKey))c.insert("\t");else if(e.which==46||e.which==68&&e.ctrlKey){if(k!==""&&m<k.length){k=k.slice(0,m)+k.slice(m+1,k.length);f()}return true}else if(z&&e.which==38||e.which==80&&e.ctrlKey)c.set(z.previous());else if(z&&e.which==40||e.which==78&&e.ctrlKey)c.set(z.next());else if(e.which==27)c.set("");else if(e.which==
+37||e.which==66&&e.ctrlKey)if(e.ctrlKey&&e.which!=66){q=m-1;e=0;for(k[q]==" "&&--q;q>0;--q)if(k[q]==" "&&k[q+1]!=" "){e=q+1;break}c.position(e)}else{if(m>0){--m;f()}}else if(e.which==39||e.which==70&&e.ctrlKey)if(e.ctrlKey&&e.which!=70){k[m]==" "&&++m;e=k.slice(m).match(/[^ ] {2,}| +[^ ]?/);if(!e||e[0].match(/^ +$/))m=k.length;else if(e[0][0]!=" ")m+=e.index+1;else{m+=e.index+e[0].length-1;e[0][e[0].length-1]!=" "&&--m}f()}else{if(m<k.length){++m;f()}}else if(e.which==123)return true;else if(e.which==
+36)c.position(0);else if(e.which==35)c.position(k.length);else if(e.ctrlKey)if(e.shiftKey){if(e.which==84)return true}else{if(!e.altKey)if(e.which==65)c.position(0);else if(e.which==69)c.position(k.length);else if(e.which==88||e.which==67||e.which==87||e.which==84)return true;else if(e.which==86){i.focus();c.oneTime(1,function(){c.insert(i.val());i.blur();i.val("")});return true}else if(e.which==75)if(m===0)c.set("");else m!=k.length&&c.set(k.slice(0,m));else if(e.which==17)return true}else if(e.altKey)e.which==
+68&&c.set(k.slice(0,m)+k.slice(m).replace(/[^ ]+ |[^ ]+$/,""),true);else return true;return false}else if(e.altKey&&e.which==68||e.ctrlKey&&[65,66,68,69,80,78,70].has(e.which)||[35,36,37,38,39,40].has(e.which))return false});return c};var M=[];g.jrpc=function(a,d,c,i,j,p){d=g.json_stringify({jsonrpc:"2.0",method:c,params:i,id:d});return g.ajax({url:a,data:d,success:j,error:p,contentType:"application/json",dataType:"json",beforeSend:function(u){M.push(u)},async:true,cache:false,type:"POST"})};J=/ {13}$/;
+var ba=[["jQuery Terminal","(c) 2011 jcubic"],["JQuery Terminal Emulator v. 0.3.3","Copyright (c) 2011 Jakub Jankiewicz <http://jcubic.pl>".replace(/ *<.*>/,"")],["JQuery Terminal Emulator version version 0.3.3","Copyright (c) 2011 Jakub Jankiewicz <http://jcubic.pl>"],[" _______ ________ __"," / / _ /_ ____________ _/__ ___/______________ _____ / /"," __ / / // / // / _ / _/ // / / / _ / _/ / / \\/ / _ \\/ /","/ / / // / // / ___/ // // / / / ___/ // / / / / /\\ / // / /__",
+"\\___/____ \\\\__/____/_/ \\__ / /_/____/_//_/ /_/ /_/ \\/\\__\\_\\___/"," \\/ /____/ ".replace(J,"")+"version 0.3.3","Copyright (c) 2011 Jakub Jankiewicz <http://jcubic.pl>"],[" __ _____ ________ __"," / // _ /__ __ _____ ___ __ _/__ ___/__ ___ ______ __ __ __ ___ / /"," __ / // // // // // _ // _// // / / // _ // _// // // \\/ // _ \\/ /","/ / // // // // // ___// / / // / / // ___// / / / / // // /\\ // // / /__",
+"\\___//____ \\\\___//____//_/ _\\_ / /_//____//_/ /_/ /_//_//_/ /_/ \\__\\_\\___/"," \\/ /____/ ".replace(J,"")+"version 0.3.3","Copyright (c) 2011 Jakub Jankiewicz <http://jcubic.pl>"]],H=new function(a){var d=a?[a]:[],c=0;g.extend(this,{rotate:function(){if(d.length==1)return d[0];else{if(c==d.length-1)c=0;else++c;return d[c]}},length:function(){return d.length},set:function(i){for(var j=d.length;j--;)if(d[j]===i){c=j;return}this.append(i)},
+front:function(){return d[c]},append:function(i){d.push(i)}})};M=[];g.fn.terminal=function(a,d){function c(){var b=g("<span>x</span>").appendTo(f),h=Math.floor(f.width()/b.width());b.remove();return h}function i(b,h){if(typeof b=="string")f.error("["+h+"]: "+b);else{f.error("["+h+"]: "+b.fileName+": "+b.message);f.pause();g.get(b.fileName,function(n){f.resume();var s=b.lineNumber-1;f.error("["+b.lineNumber+"]: "+n.split("\n")[s])})}}function j(b,h){try{if(typeof h=="function")h(function(){});
+else if(typeof h!="string")throw b+" must be string or function";}catch(n){i(n,b.toUpperCase());return false}return true}function p(b){b.scrollTop(f.attr("scrollHeight"))}function u(b){b=typeof b=="string"?b:String(b);var h;if(b.length>A){b=b.split("\n");h=g("<div></div>");for(var n=b.length,s=0;s<n;++s)if(b[s]===""||b[s]=="\r")h.append("<div> </div>");else if(b[s].length>A){var r=P(b[s],A);g.each(r,function(y,t){g("<div/>").html(x(t)).appendTo(h)})}else g("<div/>").html(x(b[s])).appendTo(h)}else h=
+g("<div/>").html(x(b));e.append(h);h.width("100%");p(f);return h}function k(b,h){var n=1,s=function(r,y){h.pause();g.jrpc(b,n++,r,y,function(t){if(t.error)h.error("[RPC] "+t.error.message);else if(typeof t.result=="string")h.echo(t.result);else if(t.result instanceof Array)h.echo(t.result.join(" "));else if(typeof t.result=="object"){var B="",I;for(I in t.result)if(t.result.hasOwnProperty(I))B+=I+": "+t.result[I]+"\n";h.echo(B)}h.resume()},function(t,B){h.error("[AJAX] "+B+" - Server reponse is: \n"+
+t.responseText);h.resume()})};return function(r,y){if(r!==""){var t,B;if(r.match(/[^ ]* /)){r=r.split(/ */);t=r[0];B=r.slice(1)}else{t=r;B=[]}if(!l.login||t=="help")s(t,B);else{var I=y.token();I?s(t,[I].concat(B)):y.error("[AUTH] Access denied (no token)")}}}}function m(b){var h=o.prompt();if(o.mask())b=b.replace(/./g,"*");typeof h=="function"?h(function(n){f.echo(n+" "+b)}):f.echo(h+" "+b)}function C(b){try{var h=w.top();if(b=="exit"&&l.exit)if(w.size()==1)l.login?K():f.echo("You can exit from main interpeter");
+else f.pop("exit");else{m(b);b=="clear"&&l.clear?f.clear():h.eval(b,f)}}catch(n){i(n,"USER");throw n;}}function F(){var b=null;o.prompt("login:");l.history&&o.history().disable();o.commands(function(h){try{m(h);if(b){o.mask(false);f.pause();l.login(b,h,function(s){if(s){var r=l.name;r=r?"_"+r:"";g.Storage.set("token"+r,s);g.Storage.set("login"+r,b);o.commands(C);L()}else{f.error("Wrong password try again");o.prompt("login:");b=null}f.resume();l.history&&o.history().enable()})}else{b=h;o.prompt("password:");
+o.mask(true)}}catch(n){i(n,"LOGIN",f);throw n;}})}function K(){var b=l.name;b=b?"_"+b:"";g.Storage.remove("token"+b,null);g.Storage.remove("login"+b,null);l.history&&o.history().disable();F()}function z(){var b=w.top(),h="";if(b.name!==v&&b.name!=="")h+=b.name+"_";h+=q;o.name(h);o.prompt(b.prompt);l.history&&o.history().enable();o.set("");if(typeof b.onStart=="function")b.onStart(f)}function L(){z();if(d.greetings===v)f.echo(f.signature);else d.greetings&&f.echo(d.greetings);if(typeof l.onInit=="function")l.onInit(f)}
+function N(b){if(l.keypress&&l.keypress(b,f)===false)return false;if(f.paused()){if(b.which==100&&b.ctrlKey){for(b=M.length;b--;){var h=M[b];if(4!=h.readyState)try{h.abort()}catch(n){f.error("error in aborting ajax")}}f.resume();return false}}else if(b.which==100&&b.ctrlKey){if(l.exit&&o.get()==="")if(w.size()>1||l.login!==v)f.pop("");else{f.resume();f.echo("")}return false}else if(b.which==118&&b.ctrlKey){f.oneTime(1,function(){f.attr({scrollTop:f.attr("scrollHeight")})});return true}else if(b.keyCode==
+9&&b.ctrlKey)f.focus(false);else if(b.keyCode==34)f.scroll(f.height());else b.keyCode==33?f.scroll(-f.height()):f.attr({scrollTop:f.attr("scrollHeight")})}var f=this,E=[],e,q=H.length(),A,l={name:null,prompt:">",history:true,exit:true,clear:true,enabled:true,login:null,onInit:null,onExit:null,keypress:null,keydown:null};if(d){d.width&&f.width(d.width);d.height&&f.height(d.height);g.extend(l,d)}var G=!l.enabled;if(f.length===0)throw'Sorry, but terminal said that "'+f.selector+'" is not valid selector';
+if(f.data("terminal")){f.ajaxSend(function(b,h){M.push(h)});return f.data("terminal")}e=g("<div>").addClass("terminal-output").appendTo(f);f.addClass("terminal").append("<div/>");g.extend(f,{clear:function(){e.html("");o.set("");E=[];f.attr({scrollTop:0});return f},paused:function(){return G},pause:function(){if(o){f.disable();o.hide()}return f},resume:function(){if(o){f.enable();o.show();p(f)}return f},cols:function(){return A},rows:function(){return E.length},history:function(){return o.history().data()},
+next:function(){if(H.length()==1)return f;else{H.front().disable();var b=H.rotate().enable(),h=b.offset().top-50;g("html,body").animate({scrollTop:h},500);return b}},focus:function(b){f.oneTime(1,function(){if(H.length()==1)b===false?f.disable():f.enable();else if(b===false)f.next();else{H.front().disable();H.set(f);f.enable()}});return f},enable:function(){A===v&&f.resize();if(G)if(o){o.enable();G=false}return f},disable:function(){if(o){G=true;o.disable()}return f},enabled:function(){return G},
+signature:function(){var b=f.cols();b=b<15?null:b<35?0:b<55?1:b<64?2:b<75?3:4;return b!==null?ba[b].join("\n")+"\n":""},get_command:function(){return o.get()},insert:function(b){o.insert(b);return f},set_prompt:function(b){j("prompt",b)&&o.prompt(b);return f},set_command:function(b){o.set(b);return f},set_mask:function(b){o.mask(b);return f},get_output:function(){return g.map(E,function(b,h){return typeof h=="function"?h():h}).get().join("\n")},resize:function(b,h){if(b&&h){f.width(b);f.height(h)}A=
+c();o.resize(A);var n=e.detach();e.html("");g.each(E,function(s,r){u(typeof r=="function"?r():r)});f.prepend(n);p(f);return f},echo:function(b){E.push(b);return u(typeof b=="function"?b():b)},error:function(b){f.echo(b).addClass("error")},scroll:function(b){b>f.attr("scrollTop")&&b>0&&f.attr("scrollTop",0);var h=f.attr("scrollTop");f.attr("scrollTop",h+b);return f},logout:l.login?function(){for(;w.size()>1;)w.pop();K();return f}:function(){throw"You don't have login function";},token:l.login?function(){var b=
+l.name;return g.Storage.get("token"+(b?"_"+b:""))}:null,login_name:l.login?function(){var b=l.name;return g.Storage.get("login_"+(b?"_"+b:""))}:null,name:function(){return l.name},push:function(b,h){if(!h.prompt||j("prompt",h.prompt)){if(typeof b=="string")b=k(h.eval,f);w.push(g.extend({eval:b},h));z()}return f},pop:function(b){b!==v&&m(b);if(w.top().name===l.name){if(l.login){K();if(typeof l.onExit=="function")l.onExit(f)}}else{b=w.pop();z();if(typeof b.onExit=="function")b.onExit(f)}return f}});
+var D;switch(typeof a){case "string":D=a;a=k(a,f);break;case "object":a=function(b){return function(h){if(h!=""){h=h.split(/ */);var n=h[0];h=h.slice(1);var s=b[n];typeof s=="function"?s.apply(f,h):f.echo("Command '"+n+"' Not Found")}}}(a)}if(D&&typeof l.login=="string"||D)l.login=function(b){var h=1;return function(n,s,r){f.pause();g.jrpc(D,h++,b,[n,s],function(y){f.resume();!y.error&&y.result?r(y.result):r(null)},function(y,t){f.resume();f.error("[AJAX\ Response: "+t+"\n"+y.responseText)})}}(typeof l.login==
+"boolean"?"login":l.login);if(j("prompt",l.prompt)){var w=new Z({name:l.name,eval:a,prompt:l.prompt,greetings:l.greetings}),o=f.find(".terminal-output").next().cmd({prompt:l.prompt,history:l.history,width:"100%",keydown:l.keydown?function(b){return l.keydown(b,f)}:null,keypress:N,commands:C});f.livequery(function(){f.resize()});H.append(f);l.enabled===true?f.focus():f.disable();g(window).resize(f.resize);f.click(function(){f.focus()});f.token&&!f.token()&&f.login_name&&!f.login_name()?F():L();typeof g.fn.init.prototype.mousewheel===
+"function"&&f.mousewheel(function(b,h){h>0?f.scroll(-40):f.scroll(40);return false},true)}f.data("terminal",f);return f}})(jQuery);
|
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
|
@@ -0,0 +1,12 @@ + <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" >
+ <head>
+ <meta http-equiv="X-UA-Compatible" content="IE=8" />
+ <title>{% block title %}{% endblock title %}</title>
+ <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js"></script>
+ {% block extra_head %}{% endblock extra_head %}
+ </head>
+ <body>
+ {% block content %}{% endblock content %}
+ </body>
+</html>
|
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
This file's diff was not loaded because this changeset is very large. Load changes Loading... |
|
@@ -0,0 +1,7 @@ + #!/bin/sh
+
+if [ ! -e ~/miniredis.pid ]; then
+ nohup python miniredis.py -p 56784 -d ~/miniredis.db -l ~/miniredis.out --pid ~/miniredis.pid &
+else
+ echo MiniRedis is running, or crashed
+fi
|
Loading...