зеркало из https://github.com/mozilla/ubiquity.git
Added python-modules dir for build system from jetpack's repository. Soon we'll replace manage.py to use paver and jsbridge from this directory.
This commit is contained in:
Родитель
54a8be154b
Коммит
74b78d8e3f
|
@ -0,0 +1,159 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Mozilla Corporation Code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Mikeal Rogers.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2008 -2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mikeal Rogers <mikeal.rogers@gmail.com>
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
|
||||
import socket
|
||||
import os
|
||||
import copy
|
||||
from time import sleep
|
||||
|
||||
import mozrunner
|
||||
|
||||
from network import Bridge, BackChannel, create_network
|
||||
from jsobjects import JSObject
|
||||
|
||||
settings_env = 'JSBRIDGE_SETTINGS_FILE'
|
||||
|
||||
parent = os.path.abspath(os.path.dirname(__file__))
|
||||
extension_path = os.path.join(parent, 'extension')
|
||||
|
||||
window_string = "Components.classes['@mozilla.org/appshell/window-mediator;1'].getService(Components.interfaces.nsIWindowMediator).getMostRecentWindow('')"
|
||||
|
||||
def wait_and_create_network(host, port, timeout=10):
|
||||
ttl = 0
|
||||
while ttl < timeout:
|
||||
sleep(.25)
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
break
|
||||
except socket.error:
|
||||
pass
|
||||
return create_network(host, port)
|
||||
|
||||
class CLI(mozrunner.CLI):
|
||||
|
||||
parser_options = copy.copy(mozrunner.CLI.parser_options)
|
||||
parser_options[('-D', '--debug',)] = dict(dest="debug",
|
||||
action="store_true",
|
||||
help="Install debugging plugins.",
|
||||
metavar="JSBRIDGE_DEBUG",
|
||||
default=False )
|
||||
parser_options[('-s', '--shell',)] = dict(dest="shell",
|
||||
action="store_true",
|
||||
help="Start a Python shell",
|
||||
metavar="JSBRIDGE_SHELL",
|
||||
default=False )
|
||||
parser_options[('-u', '--usecode',)] = dict(dest="usecode", action="store_true",
|
||||
help="Use code module instead of iPython",
|
||||
default=False)
|
||||
parser_options[('-P', '--port')] = dict(dest="port", default="24242",
|
||||
help="TCP port to run jsbridge on.")
|
||||
|
||||
debug_plugins = [os.path.join(parent, 'xpi', 'xush-0.2-fx.xpi')]
|
||||
|
||||
def get_profile(self, *args, **kwargs):
|
||||
if self.options.debug:
|
||||
kwargs.setdefault('preferences',
|
||||
{}).update({'extensions.checkCompatibility':False})
|
||||
profile = super(CLI, self).get_profile(*args, **kwargs)
|
||||
profile.install_plugin(extension_path)
|
||||
if self.options.debug:
|
||||
for p in self.debug_plugins:
|
||||
profile.install_plugin(p)
|
||||
return profile
|
||||
|
||||
def get_runner(self, *args, **kwargs):
|
||||
runner = super(CLI, self).get_runner(*args, **kwargs)
|
||||
if self.options.debug:
|
||||
runner.cmdargs.append('-jsconsole')
|
||||
runner.cmdargs += ['-jsbridge', self.options.port]
|
||||
return runner
|
||||
|
||||
def run(self):
|
||||
runner = self.parse_and_get_runner()
|
||||
runner.start()
|
||||
self.start_jsbridge_network()
|
||||
if self.options.shell:
|
||||
self.start_shell(runner)
|
||||
else:
|
||||
try:
|
||||
runner.wait()
|
||||
except KeyboardInterrupt:
|
||||
runner.stop()
|
||||
|
||||
runner.profile.cleanup()
|
||||
|
||||
def start_shell(self, runner):
|
||||
try:
|
||||
import IPython
|
||||
except:
|
||||
IPython = None
|
||||
jsobj = JSObject(self.bridge, window_string)
|
||||
|
||||
if IPython is None or self.options.usecode:
|
||||
import code
|
||||
code.interact(local={"jsobj":jsobj,
|
||||
"getBrowserWindow":lambda : getBrowserWindow(self.bridge),
|
||||
"back_channel":self.back_channel,
|
||||
})
|
||||
else:
|
||||
from IPython.Shell import IPShellEmbed
|
||||
ipshell = IPShellEmbed([])
|
||||
ipshell(local_ns={"jsobj":jsobj,
|
||||
"getBrowserWindow":lambda : getBrowserWindow(self.bridge),
|
||||
"back_channel":self.back_channel,
|
||||
})
|
||||
runner.stop()
|
||||
|
||||
def start_jsbridge_network(self, timeout=10):
|
||||
port = int(self.options.port)
|
||||
host = '127.0.0.1'
|
||||
self.back_channel, self.bridge = wait_and_create_network(host, port, timeout)
|
||||
|
||||
def cli():
|
||||
CLI().run()
|
||||
|
||||
def getBrowserWindow(bridge):
|
||||
return JSObject(bridge, "Components.classes['@mozilla.org/appshell/window-mediator;1'].getService(Components.interfaces.nsIWindowMediator).getMostRecentWindow('')")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
resource jsbridge resource/
|
||||
|
||||
content jsbridge chrome/content/
|
||||
|
||||
overlay chrome://browser/content/browser.xul chrome://jsbridge/content/overlay.xul
|
||||
overlay chrome://messenger/content/mailWindowOverlay.xul chrome://jsbridge/content/overlay.xul
|
||||
|
||||
overlay chrome://calendar/content/calendar.xul chrome://jsbridge/content/overlay.xul
|
||||
|
||||
overlay windowtype:Songbird:Main chrome://jsbridge/content/overlay.xul
|
|
@ -0,0 +1,40 @@
|
|||
// ***** BEGIN LICENSE BLOCK *****
|
||||
// Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
//
|
||||
// The contents of this file are subject to the Mozilla Public License Version
|
||||
// 1.1 (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
// http://www.mozilla.org/MPL/
|
||||
//
|
||||
// Software distributed under the License is distributed on an "AS IS" basis,
|
||||
// WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
// for the specific language governing rights and limitations under the
|
||||
// License.
|
||||
//
|
||||
// The Original Code is Mozilla Corporation Code.
|
||||
//
|
||||
// The Initial Developer of the Original Code is
|
||||
// Mikeal Rogers.
|
||||
// Portions created by the Initial Developer are Copyright (C) 2008
|
||||
// the Initial Developer. All Rights Reserved.
|
||||
//
|
||||
// Contributor(s):
|
||||
// Mikeal Rogers <mikeal.rogers@gmail.com>
|
||||
//
|
||||
// Alternatively, the contents of this file may be used under the terms of
|
||||
// either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
// the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
// in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
// of those above. If you wish to allow use of your version of this file only
|
||||
// under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
// use your version of this file under the terms of the MPL, indicate your
|
||||
// decision by deleting the provisions above and replace them with the notice
|
||||
// and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
// the provisions above, a recipient may use your version of this file under
|
||||
// the terms of any one of the MPL, the GPL or the LGPL.
|
||||
//
|
||||
// ***** END LICENSE BLOCK *****
|
||||
|
||||
var __init = {}; Components.utils.import('resource://jsbridge/modules/init.js', __init);
|
||||
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0"?>
|
||||
<overlay id="jsbridge-overlay"
|
||||
xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
|
||||
<script src="overlay.js"/>
|
||||
</overlay>
|
|
@ -0,0 +1,172 @@
|
|||
const nsIAppShellService = Components.interfaces.nsIAppShellService;
|
||||
const nsISupports = Components.interfaces.nsISupports;
|
||||
const nsICategoryManager = Components.interfaces.nsICategoryManager;
|
||||
const nsIComponentRegistrar = Components.interfaces.nsIComponentRegistrar;
|
||||
const nsICommandLine = Components.interfaces.nsICommandLine;
|
||||
const nsICommandLineHandler = Components.interfaces.nsICommandLineHandler;
|
||||
const nsIFactory = Components.interfaces.nsIFactory;
|
||||
const nsIModule = Components.interfaces.nsIModule;
|
||||
const nsIWindowWatcher = Components.interfaces.nsIWindowWatcher;
|
||||
|
||||
// CHANGEME: to the chrome URI of your extension or application
|
||||
const CHROME_URI = "chrome://jsbridge/content/";
|
||||
|
||||
// CHANGEME: change the contract id, CID, and category to be unique
|
||||
// to your application.
|
||||
const clh_contractID = "@mozilla.org/commandlinehandler/general-startup;1?type=jsbridge";
|
||||
|
||||
// use uuidgen to generate a unique ID
|
||||
const clh_CID = Components.ID("{2872d428-14f6-11de-ac86-001f5bd9235c}");
|
||||
|
||||
// category names are sorted alphabetically. Typical command-line handlers use a
|
||||
// category that begins with the letter "m".
|
||||
const clh_category = "jsbridge";
|
||||
|
||||
var aConsoleService = Components.classes["@mozilla.org/consoleservice;1"].
|
||||
getService(Components.interfaces.nsIConsoleService);
|
||||
|
||||
/**
|
||||
* Utility functions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Opens a chrome window.
|
||||
* @param aChromeURISpec a string specifying the URI of the window to open.
|
||||
* @param aArgument an argument to pass to the window (may be null)
|
||||
*/
|
||||
function openWindow(aChromeURISpec, aArgument)
|
||||
{
|
||||
var ww = Components.classes["@mozilla.org/embedcomp/window-watcher;1"].
|
||||
getService(Components.interfaces.nsIWindowWatcher);
|
||||
ww.openWindow(null, aChromeURISpec, "_blank",
|
||||
"chrome,menubar,toolbar,status,resizable,dialog=no",
|
||||
aArgument);
|
||||
}
|
||||
|
||||
/**
|
||||
* The XPCOM component that implements nsICommandLineHandler.
|
||||
* It also implements nsIFactory to serve as its own singleton factory.
|
||||
*/
|
||||
const jsbridgeHandler = {
|
||||
/* nsISupports */
|
||||
QueryInterface : function clh_QI(iid)
|
||||
{
|
||||
if (iid.equals(nsICommandLineHandler) ||
|
||||
iid.equals(nsIFactory) ||
|
||||
iid.equals(nsISupports))
|
||||
return this;
|
||||
|
||||
throw Components.results.NS_ERROR_NO_INTERFACE;
|
||||
},
|
||||
|
||||
/* nsICommandLineHandler */
|
||||
|
||||
handle : function clh_handle(cmdLine)
|
||||
{
|
||||
try {
|
||||
var port = cmdLine.handleFlagWithParam("jsbridge", false);
|
||||
if (port) {
|
||||
var server = {};
|
||||
Components.utils.import('resource://jsbridge/modules/server.js', server);
|
||||
server.startServer(parseInt(port));
|
||||
// dump(port);
|
||||
} else {
|
||||
var server = {};
|
||||
Components.utils.import('resource://jsbridge/modules/server.js', server);
|
||||
server.startServer(24242);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
Components.utils.reportError("incorrect parameter passed to -jsbridge on the command line.");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
// CHANGEME: change the help info as appropriate, but
|
||||
// follow the guidelines in nsICommandLineHandler.idl
|
||||
// specifically, flag descriptions should start at
|
||||
// character 24, and lines should be wrapped at
|
||||
// 72 characters with embedded newlines,
|
||||
// and finally, the string should end with a newline
|
||||
helpInfo : " -jsbridge Port to run jsbridge on.\n",
|
||||
|
||||
/* nsIFactory */
|
||||
|
||||
createInstance : function clh_CI(outer, iid)
|
||||
{
|
||||
if (outer != null)
|
||||
throw Components.results.NS_ERROR_NO_AGGREGATION;
|
||||
|
||||
return this.QueryInterface(iid);
|
||||
},
|
||||
|
||||
lockFactory : function clh_lock(lock)
|
||||
{
|
||||
/* no-op */
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* The XPCOM glue that implements nsIModule
|
||||
*/
|
||||
const jsbridgeHandlerModule = {
|
||||
/* nsISupports */
|
||||
QueryInterface : function mod_QI(iid)
|
||||
{
|
||||
if (iid.equals(nsIModule) ||
|
||||
iid.equals(nsISupports))
|
||||
return this;
|
||||
|
||||
throw Components.results.NS_ERROR_NO_INTERFACE;
|
||||
},
|
||||
|
||||
/* nsIModule */
|
||||
getClassObject : function mod_gch(compMgr, cid, iid)
|
||||
{
|
||||
if (cid.equals(clh_CID))
|
||||
return jsbridgeHandler.QueryInterface(iid);
|
||||
|
||||
throw Components.results.NS_ERROR_NOT_REGISTERED;
|
||||
},
|
||||
|
||||
registerSelf : function mod_regself(compMgr, fileSpec, location, type)
|
||||
{
|
||||
compMgr.QueryInterface(nsIComponentRegistrar);
|
||||
|
||||
compMgr.registerFactoryLocation(clh_CID,
|
||||
"jsbridgeHandler",
|
||||
clh_contractID,
|
||||
fileSpec,
|
||||
location,
|
||||
type);
|
||||
|
||||
var catMan = Components.classes["@mozilla.org/categorymanager;1"].
|
||||
getService(nsICategoryManager);
|
||||
catMan.addCategoryEntry("command-line-handler",
|
||||
clh_category,
|
||||
clh_contractID, true, true);
|
||||
},
|
||||
|
||||
unregisterSelf : function mod_unreg(compMgr, location, type)
|
||||
{
|
||||
compMgr.QueryInterface(nsIComponentRegistrar);
|
||||
compMgr.unregisterFactoryLocation(clh_CID, location);
|
||||
|
||||
var catMan = Components.classes["@mozilla.org/categorymanager;1"].
|
||||
getService(nsICategoryManager);
|
||||
catMan.deleteCategoryEntry("command-line-handler", clh_category);
|
||||
},
|
||||
|
||||
canUnload : function (compMgr)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
/* The NSGetModule function is the magic entry point that XPCOM uses to find what XPCOM objects
|
||||
* this component provides
|
||||
*/
|
||||
function NSGetModule(comMgr, fileSpec)
|
||||
{
|
||||
return jsbridgeHandlerModule;
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
<?xml version="1.0"?>
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<em:id>jsbridge@mozilla.com</em:id>
|
||||
<em:name>jsbridge</em:name>
|
||||
<em:version>1.0</em:version>
|
||||
<em:creator>Mikeal Rogers</em:creator>
|
||||
<em:description>Python to JavaScript bridge</em:description>
|
||||
<em:targetApplication>
|
||||
<!-- Firefox -->
|
||||
<Description>
|
||||
<em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
|
||||
<em:minVersion>1.5</em:minVersion>
|
||||
<em:maxVersion>3.6a1pre</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
<em:targetApplication>
|
||||
<!-- Thunderbird -->
|
||||
<Description>
|
||||
<em:id>{3550f703-e582-4d05-9a08-453d09bdfdc6}</em:id>
|
||||
<em:minVersion>3*</em:minVersion>
|
||||
<em:maxVersion>3.*</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
<em:targetApplication>
|
||||
<!-- Sunbird -->
|
||||
<Description>
|
||||
<em:id>{718e30fb-e89b-41dd-9da7-e25a45638b28}</em:id>
|
||||
<em:minVersion>0.6a1</em:minVersion>
|
||||
<em:maxVersion>1.0pre</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
<em:targetApplication>
|
||||
<!-- Songbird -->
|
||||
<Description>
|
||||
<em:id>songbird@songbirdnest.com</em:id>
|
||||
<em:minVersion>0.3pre</em:minVersion>
|
||||
<em:maxVersion>1.0.*</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
<em:targetApplication>
|
||||
<Description>
|
||||
<em:id>toolkit@mozilla.org</em:id>
|
||||
<em:minVersion>1.9</em:minVersion>
|
||||
<em:maxVersion>1.9.*</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
</Description>
|
||||
|
||||
|
||||
</RDF>
|
|
@ -0,0 +1,13 @@
|
|||
var EXPORTED_SYMBOLS = ["backchannels", "fireEvent", "addBackChannel"];
|
||||
|
||||
var backchannels = [];
|
||||
|
||||
var fireEvent = function (name, obj) {
|
||||
for each(backchannel in backchannels) {
|
||||
backchannel.session.encodeOut({'eventType':name, 'result':obj});
|
||||
}
|
||||
}
|
||||
|
||||
var addBackChannel = function (backchannel) {
|
||||
backchannels.push(backchannel);
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
// ***** BEGIN LICENSE BLOCK *****
|
||||
// Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
//
|
||||
// The contents of this file are subject to the Mozilla Public License Version
|
||||
// 1.1 (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
// http://www.mozilla.org/MPL/
|
||||
//
|
||||
// Software distributed under the License is distributed on an "AS IS" basis,
|
||||
// WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
// for the specific language governing rights and limitations under the
|
||||
// License.
|
||||
//
|
||||
// The Original Code is Mozilla Corporation Code.
|
||||
//
|
||||
// The Initial Developer of the Original Code is
|
||||
// Mikeal Rogers.
|
||||
// Portions created by the Initial Developer are Copyright (C) 2008
|
||||
// the Initial Developer. All Rights Reserved.
|
||||
//
|
||||
// Contributor(s):
|
||||
// Mikeal Rogers <mikeal.rogers@gmail.com>
|
||||
//
|
||||
// Alternatively, the contents of this file may be used under the terms of
|
||||
// either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
// the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
// in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
// of those above. If you wish to allow use of your version of this file only
|
||||
// under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
// use your version of this file under the terms of the MPL, indicate your
|
||||
// decision by deleting the provisions above and replace them with the notice
|
||||
// and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
// the provisions above, a recipient may use your version of this file under
|
||||
// the terms of any one of the MPL, the GPL or the LGPL.
|
||||
//
|
||||
// ***** END LICENSE BLOCK *****
|
||||
|
||||
var EXPORTED_SYMBOLS = ["server"];
|
||||
|
||||
var server = {}; Components.utils.import('resource://jsbridge/modules/server.js', server);
|
|
@ -0,0 +1,471 @@
|
|||
/*
|
||||
http://www.JSON.org/json2.js
|
||||
2008-05-25
|
||||
|
||||
Public Domain.
|
||||
|
||||
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
See http://www.JSON.org/js.html
|
||||
|
||||
This file creates a global JSON object containing two methods: stringify
|
||||
and parse.
|
||||
|
||||
JSON.stringify(value, replacer, space)
|
||||
value any JavaScript value, usually an object or array.
|
||||
|
||||
replacer an optional parameter that determines how object
|
||||
values are stringified for objects without a toJSON
|
||||
method. It can be a function or an array.
|
||||
|
||||
space an optional parameter that specifies the indentation
|
||||
of nested structures. If it is omitted, the text will
|
||||
be packed without extra whitespace. If it is a number,
|
||||
it will specify the number of spaces to indent at each
|
||||
level. If it is a string (such as '\t' or ' '),
|
||||
it contains the characters used to indent at each level.
|
||||
|
||||
This method produces a JSON text from a JavaScript value.
|
||||
|
||||
When an object value is found, if the object contains a toJSON
|
||||
method, its toJSON method will be called and the result will be
|
||||
stringified. A toJSON method does not serialize: it returns the
|
||||
value represented by the name/value pair that should be serialized,
|
||||
or undefined if nothing should be serialized. The toJSON method
|
||||
will be passed the key associated with the value, and this will be
|
||||
bound to the object holding the key.
|
||||
|
||||
For example, this would serialize Dates as ISO strings.
|
||||
|
||||
Date.prototype.toJSON = function (key) {
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return n < 10 ? '0' + n : n;
|
||||
}
|
||||
|
||||
return this.getUTCFullYear() + '-' +
|
||||
f(this.getUTCMonth() + 1) + '-' +
|
||||
f(this.getUTCDate()) + 'T' +
|
||||
f(this.getUTCHours()) + ':' +
|
||||
f(this.getUTCMinutes()) + ':' +
|
||||
f(this.getUTCSeconds()) + 'Z';
|
||||
};
|
||||
|
||||
You can provide an optional replacer method. It will be passed the
|
||||
key and value of each member, with this bound to the containing
|
||||
object. The value that is returned from your method will be
|
||||
serialized. If your method returns undefined, then the member will
|
||||
be excluded from the serialization.
|
||||
|
||||
If the replacer parameter is an array, then it will be used to
|
||||
select the members to be serialized. It filters the results such
|
||||
that only members with keys listed in the replacer array are
|
||||
stringified.
|
||||
|
||||
Values that do not have JSON representations, such as undefined or
|
||||
functions, will not be serialized. Such values in objects will be
|
||||
dropped; in arrays they will be replaced with null. You can use
|
||||
a replacer function to replace those with JSON values.
|
||||
JSON.stringify(undefined) returns undefined.
|
||||
|
||||
The optional space parameter produces a stringification of the
|
||||
value that is filled with line breaks and indentation to make it
|
||||
easier to read.
|
||||
|
||||
If the space parameter is a non-empty string, then that string will
|
||||
be used for indentation. If the space parameter is a number, then
|
||||
the indentation will be that many spaces.
|
||||
|
||||
Example:
|
||||
|
||||
text = JSON.stringify(['e', {pluribus: 'unum'}]);
|
||||
// text is '["e",{"pluribus":"unum"}]'
|
||||
|
||||
|
||||
text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
|
||||
// text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
|
||||
|
||||
text = JSON.stringify([new Date()], function (key, value) {
|
||||
return this[key] instanceof Date ?
|
||||
'Date(' + this[key] + ')' : value;
|
||||
});
|
||||
// text is '["Date(---current time---)"]'
|
||||
|
||||
|
||||
JSON.parse(text, reviver)
|
||||
This method parses a JSON text to produce an object or array.
|
||||
It can throw a SyntaxError exception.
|
||||
|
||||
The optional reviver parameter is a function that can filter and
|
||||
transform the results. It receives each of the keys and values,
|
||||
and its return value is used instead of the original value.
|
||||
If it returns what it received, then the structure is not modified.
|
||||
If it returns undefined then the member is deleted.
|
||||
|
||||
Example:
|
||||
|
||||
// Parse the text. Values that look like ISO date strings will
|
||||
// be converted to Date objects.
|
||||
|
||||
myData = JSON.parse(text, function (key, value) {
|
||||
var a;
|
||||
if (typeof value === 'string') {
|
||||
a =
|
||||
/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
|
||||
if (a) {
|
||||
return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
|
||||
+a[5], +a[6]));
|
||||
}
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
|
||||
var d;
|
||||
if (typeof value === 'string' &&
|
||||
value.slice(0, 5) === 'Date(' &&
|
||||
value.slice(-1) === ')') {
|
||||
d = new Date(value.slice(5, -1));
|
||||
if (d) {
|
||||
return d;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
|
||||
This is a reference implementation. You are free to copy, modify, or
|
||||
redistribute.
|
||||
|
||||
This code should be minified before deployment.
|
||||
See http://javascript.crockford.com/jsmin.html
|
||||
|
||||
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
NOT CONTROL.
|
||||
*/
|
||||
|
||||
/*jslint evil: true */
|
||||
|
||||
/*global JSON */
|
||||
|
||||
/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", call,
|
||||
charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, getUTCMinutes,
|
||||
getUTCMonth, getUTCSeconds, hasOwnProperty, join, lastIndex, length,
|
||||
parse, propertyIsEnumerable, prototype, push, replace, slice, stringify,
|
||||
test, toJSON, toString
|
||||
*/
|
||||
|
||||
var EXPORTED_SYMBOLS = ["JSON"];
|
||||
|
||||
if (!this.JSON) {
|
||||
|
||||
// Create a JSON object only if one does not already exist. We create the
|
||||
// object in a closure to avoid creating global variables.
|
||||
|
||||
JSON = function () {
|
||||
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return n < 10 ? '0' + n : n;
|
||||
}
|
||||
|
||||
Date.prototype.toJSON = function (key) {
|
||||
|
||||
return this.getUTCFullYear() + '-' +
|
||||
f(this.getUTCMonth() + 1) + '-' +
|
||||
f(this.getUTCDate()) + 'T' +
|
||||
f(this.getUTCHours()) + ':' +
|
||||
f(this.getUTCMinutes()) + ':' +
|
||||
f(this.getUTCSeconds()) + 'Z';
|
||||
};
|
||||
|
||||
var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
|
||||
escapeable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
|
||||
gap,
|
||||
indent,
|
||||
meta = { // table of character substitutions
|
||||
'\b': '\\b',
|
||||
'\t': '\\t',
|
||||
'\n': '\\n',
|
||||
'\f': '\\f',
|
||||
'\r': '\\r',
|
||||
'"' : '\\"',
|
||||
'\\': '\\\\'
|
||||
},
|
||||
rep;
|
||||
|
||||
|
||||
function quote(string) {
|
||||
|
||||
// If the string contains no control characters, no quote characters, and no
|
||||
// backslash characters, then we can safely slap some quotes around it.
|
||||
// Otherwise we must also replace the offending characters with safe escape
|
||||
// sequences.
|
||||
|
||||
escapeable.lastIndex = 0;
|
||||
return escapeable.test(string) ?
|
||||
'"' + string.replace(escapeable, function (a) {
|
||||
var c = meta[a];
|
||||
if (typeof c === 'string') {
|
||||
return c;
|
||||
}
|
||||
return '\\u' + ('0000' +
|
||||
(+(a.charCodeAt(0))).toString(16)).slice(-4);
|
||||
}) + '"' :
|
||||
'"' + string + '"';
|
||||
}
|
||||
|
||||
|
||||
function str(key, holder) {
|
||||
|
||||
// Produce a string from holder[key].
|
||||
|
||||
var i, // The loop counter.
|
||||
k, // The member key.
|
||||
v, // The member value.
|
||||
length,
|
||||
mind = gap,
|
||||
partial,
|
||||
value = holder[key];
|
||||
|
||||
// If the value has a toJSON method, call it to obtain a replacement value.
|
||||
|
||||
if (value && typeof value === 'object' &&
|
||||
typeof value.toJSON === 'function') {
|
||||
value = value.toJSON(key);
|
||||
}
|
||||
|
||||
// If we were called with a replacer function, then call the replacer to
|
||||
// obtain a replacement value.
|
||||
|
||||
if (typeof rep === 'function') {
|
||||
value = rep.call(holder, key, value);
|
||||
}
|
||||
|
||||
// What happens next depends on the value's type.
|
||||
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
return quote(value);
|
||||
|
||||
case 'number':
|
||||
|
||||
// JSON numbers must be finite. Encode non-finite numbers as null.
|
||||
|
||||
return isFinite(value) ? String(value) : 'null';
|
||||
|
||||
case 'boolean':
|
||||
case 'null':
|
||||
|
||||
// If the value is a boolean or null, convert it to a string. Note:
|
||||
// typeof null does not produce 'null'. The case is included here in
|
||||
// the remote chance that this gets fixed someday.
|
||||
|
||||
return String(value);
|
||||
|
||||
// If the type is 'object', we might be dealing with an object or an array or
|
||||
// null.
|
||||
|
||||
case 'object':
|
||||
|
||||
// Due to a specification blunder in ECMAScript, typeof null is 'object',
|
||||
// so watch out for that case.
|
||||
|
||||
if (!value) {
|
||||
return 'null';
|
||||
}
|
||||
|
||||
// Make an array to hold the partial results of stringifying this object value.
|
||||
|
||||
gap += indent;
|
||||
partial = [];
|
||||
|
||||
// If the object has a dontEnum length property, we'll treat it as an array.
|
||||
|
||||
if (typeof value.length === 'number' &&
|
||||
!(value.propertyIsEnumerable('length'))) {
|
||||
|
||||
// The object is an array. Stringify every element. Use null as a placeholder
|
||||
// for non-JSON values.
|
||||
|
||||
length = value.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
partial[i] = str(i, value) || 'null';
|
||||
}
|
||||
|
||||
// Join all of the elements together, separated with commas, and wrap them in
|
||||
// brackets.
|
||||
|
||||
v = partial.length === 0 ? '[]' :
|
||||
gap ? '[\n' + gap +
|
||||
partial.join(',\n' + gap) + '\n' +
|
||||
mind + ']' :
|
||||
'[' + partial.join(',') + ']';
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
|
||||
// If the replacer is an array, use it to select the members to be stringified.
|
||||
|
||||
if (rep && typeof rep === 'object') {
|
||||
length = rep.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
k = rep[i];
|
||||
if (typeof k === 'string') {
|
||||
v = str(k, value, rep);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (gap ? ': ' : ':') + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
// Otherwise, iterate through all of the keys in the object.
|
||||
|
||||
for (k in value) {
|
||||
if (Object.hasOwnProperty.call(value, k)) {
|
||||
v = str(k, value, rep);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (gap ? ': ' : ':') + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Join all of the member texts together, separated with commas,
|
||||
// and wrap them in braces.
|
||||
|
||||
v = partial.length === 0 ? '{}' :
|
||||
gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
|
||||
mind + '}' : '{' + partial.join(',') + '}';
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
// Return the JSON object containing the stringify and parse methods.
|
||||
|
||||
return {
|
||||
stringify: function (value, replacer, space) {
|
||||
|
||||
// The stringify method takes a value and an optional replacer, and an optional
|
||||
// space parameter, and returns a JSON text. The replacer can be a function
|
||||
// that can replace values, or an array of strings that will select the keys.
|
||||
// A default replacer method can be provided. Use of the space parameter can
|
||||
// produce text that is more easily readable.
|
||||
|
||||
var i;
|
||||
gap = '';
|
||||
indent = '';
|
||||
|
||||
// If the space parameter is a number, make an indent string containing that
|
||||
// many spaces.
|
||||
|
||||
if (typeof space === 'number') {
|
||||
for (i = 0; i < space; i += 1) {
|
||||
indent += ' ';
|
||||
}
|
||||
|
||||
// If the space parameter is a string, it will be used as the indent string.
|
||||
|
||||
} else if (typeof space === 'string') {
|
||||
indent = space;
|
||||
}
|
||||
|
||||
// If there is a replacer, it must be a function or an array.
|
||||
// Otherwise, throw an error.
|
||||
|
||||
rep = replacer;
|
||||
if (replacer && typeof replacer !== 'function' &&
|
||||
(typeof replacer !== 'object' ||
|
||||
typeof replacer.length !== 'number')) {
|
||||
throw new Error('JSON.stringify');
|
||||
}
|
||||
|
||||
// Make a fake root object containing our value under the key of ''.
|
||||
// Return the result of stringifying the value.
|
||||
|
||||
return str('', {'': value});
|
||||
},
|
||||
|
||||
|
||||
parse: function (text, reviver) {
|
||||
|
||||
// The parse method takes a text and an optional reviver function, and returns
|
||||
// a JavaScript value if the text is a valid JSON text.
|
||||
|
||||
var j;
|
||||
|
||||
function walk(holder, key) {
|
||||
|
||||
// The walk method is used to recursively walk the resulting structure so
|
||||
// that modifications can be made.
|
||||
|
||||
var k, v, value = holder[key];
|
||||
if (value && typeof value === 'object') {
|
||||
for (k in value) {
|
||||
if (Object.hasOwnProperty.call(value, k)) {
|
||||
v = walk(value, k);
|
||||
if (v !== undefined) {
|
||||
value[k] = v;
|
||||
} else {
|
||||
delete value[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return reviver.call(holder, key, value);
|
||||
}
|
||||
|
||||
|
||||
// Parsing happens in four stages. In the first stage, we replace certain
|
||||
// Unicode characters with escape sequences. JavaScript handles many characters
|
||||
// incorrectly, either silently deleting them, or treating them as line endings.
|
||||
|
||||
cx.lastIndex = 0;
|
||||
if (cx.test(text)) {
|
||||
text = text.replace(cx, function (a) {
|
||||
return '\\u' + ('0000' +
|
||||
(+(a.charCodeAt(0))).toString(16)).slice(-4);
|
||||
});
|
||||
}
|
||||
|
||||
// In the second stage, we run the text against regular expressions that look
|
||||
// for non-JSON patterns. We are especially concerned with '()' and 'new'
|
||||
// because they can cause invocation, and '=' because it can cause mutation.
|
||||
// But just to be safe, we want to reject all unexpected forms.
|
||||
|
||||
// We split the second stage into 4 regexp operations in order to work around
|
||||
// crippling inefficiencies in IE's and Safari's regexp engines. First we
|
||||
// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
|
||||
// replace all simple value tokens with ']' characters. Third, we delete all
|
||||
// open brackets that follow a colon or comma or that begin the text. Finally,
|
||||
// we look to see that the remaining characters are only whitespace or ']' or
|
||||
// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
|
||||
|
||||
if (/^[\],:{}\s]*$/.
|
||||
test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
|
||||
replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
|
||||
replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
|
||||
|
||||
// In the third stage we use the eval function to compile the text into a
|
||||
// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
|
||||
// in JavaScript: it can begin a block or an object literal. We wrap the text
|
||||
// in parens to eliminate the ambiguity.
|
||||
|
||||
j = eval('(' + text + ')');
|
||||
|
||||
// In the optional fourth stage, we recursively walk the new structure, passing
|
||||
// each name/value pair to a reviver function for possible transformation.
|
||||
|
||||
return typeof reviver === 'function' ?
|
||||
walk({'': j}, '') : j;
|
||||
}
|
||||
|
||||
// If the text is not JSON parseable, then a SyntaxError is thrown.
|
||||
|
||||
throw new SyntaxError('JSON.parse');
|
||||
}
|
||||
};
|
||||
}();
|
||||
}
|
|
@ -0,0 +1,304 @@
|
|||
// ***** BEGIN LICENSE BLOCK *****
|
||||
// Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
//
|
||||
// The contents of this file are subject to the Mozilla Public License Version
|
||||
// 1.1 (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
// http://www.mozilla.org/MPL/
|
||||
//
|
||||
// Software distributed under the License is distributed on an "AS IS" basis,
|
||||
// WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
// for the specific language governing rights and limitations under the
|
||||
// License.
|
||||
//
|
||||
// The Original Code is Mozilla Corporation Code.
|
||||
//
|
||||
// The Initial Developer of the Original Code is
|
||||
// based on the MozRepl project.
|
||||
// Portions created by the Initial Developer are Copyright (C) 2008
|
||||
// the Initial Developer. All Rights Reserved.
|
||||
//
|
||||
// Contributor(s):
|
||||
// Mikeal Rogers <mikeal.rogers@gmail.com>
|
||||
// Massimiliano Mirra <bard@hyperstruct.net>
|
||||
//
|
||||
// Alternatively, the contents of this file may be used under the terms of
|
||||
// either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
// the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
// in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
// of those above. If you wish to allow use of your version of this file only
|
||||
// under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
// use your version of this file under the terms of the MPL, indicate your
|
||||
// decision by deleting the provisions above and replace them with the notice
|
||||
// and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
// the provisions above, a recipient may use your version of this file under
|
||||
// the terms of any one of the MPL, the GPL or the LGPL.
|
||||
//
|
||||
// ***** END LICENSE BLOCK *****
|
||||
|
||||
var EXPORTED_SYMBOLS = ["Server", "server", "AsyncRead", "Session", "sessions", "globalRegistry", "startServer"];
|
||||
|
||||
var events = {}; Components.utils.import("resource://jsbridge/modules/events.js", events);
|
||||
|
||||
const Cc = Components.classes;
|
||||
const Ci = Components.interfaces;
|
||||
const loader = Cc['@mozilla.org/moz/jssubscript-loader;1']
|
||||
.getService(Ci.mozIJSSubScriptLoader);
|
||||
|
||||
var hwindow = Components.classes["@mozilla.org/appshell/appShellService;1"]
|
||||
.getService(Components.interfaces.nsIAppShellService)
|
||||
.hiddenDOMWindow;
|
||||
|
||||
var nativeJSON = Components.classes["@mozilla.org/dom/json;1"]
|
||||
.createInstance(Components.interfaces.nsIJSON);
|
||||
|
||||
var json2 = Components.utils.import("resource://jsbridge/modules/json2.js");
|
||||
|
||||
var jsonEncode = json2.JSON.stringify;
|
||||
|
||||
var uuidgen = Components.classes["@mozilla.org/uuid-generator;1"]
|
||||
.getService(Components.interfaces.nsIUUIDGenerator);
|
||||
|
||||
function AsyncRead (session) {
|
||||
this.session = session;
|
||||
}
|
||||
AsyncRead.prototype.onStartRequest = function (request, context) {};
|
||||
AsyncRead.prototype.onStopRequest = function (request, context, status) {
|
||||
this.session.onQuit();
|
||||
}
|
||||
AsyncRead.prototype.onDataAvailable = function (request, context, inputStream, offset, count) {
|
||||
var str = {};
|
||||
this.session.instream.readString(count, str);
|
||||
this.session.receive(str.value);
|
||||
}
|
||||
|
||||
|
||||
|
||||
globalRegistry = {};
|
||||
|
||||
function Bridge (session) {
|
||||
this.session = session;
|
||||
this.registry = globalRegistry;
|
||||
}
|
||||
Bridge.prototype._register = function (_type) {
|
||||
this.bridgeType = _type;
|
||||
if (_type == "backchannel") {
|
||||
events.addBackChannel(this);
|
||||
}
|
||||
}
|
||||
Bridge.prototype.register = function (uuid, _type) {
|
||||
try {
|
||||
this._register(_type);
|
||||
var passed = true;
|
||||
} catch(e) {
|
||||
if (typeof(e) == "string") {
|
||||
var exception = e;
|
||||
} else {
|
||||
var exception = {'name':e.name, 'message':e.message};
|
||||
}
|
||||
this.session.encodeOut({'result':false, 'exception':exception, 'uuid':uuid});
|
||||
}
|
||||
if (passed != undefined) {
|
||||
this.session.encodeOut({"result":true, 'eventType':'register', 'uuid':uuid});
|
||||
}
|
||||
|
||||
}
|
||||
Bridge.prototype._describe = function (obj) {
|
||||
var response = {};
|
||||
if (obj == null) {
|
||||
var type = "null";
|
||||
} else {
|
||||
var type = typeof(obj);
|
||||
}
|
||||
if (type == "object") {
|
||||
if (obj.length != undefined) {
|
||||
var type = "array";
|
||||
}
|
||||
response.attributes = [];
|
||||
for (i in obj) {
|
||||
response.attributes = response.attributes.concat(i);
|
||||
}
|
||||
}
|
||||
else if (type != "function"){
|
||||
response.data = obj;
|
||||
}
|
||||
response.type = type;
|
||||
return response;
|
||||
}
|
||||
Bridge.prototype.describe = function (uuid, obj) {
|
||||
var response = this._describe(obj);
|
||||
response.uuid = uuid;
|
||||
response.result = true;
|
||||
this.session.encodeOut(response);
|
||||
}
|
||||
Bridge.prototype._set = function (obj) {
|
||||
var uuid = uuidgen.generateUUID().toString();
|
||||
this.registry[uuid] = obj;
|
||||
return uuid;
|
||||
}
|
||||
Bridge.prototype.set = function (uuid, obj) {
|
||||
var ruuid = this._set(obj);
|
||||
this.session.encodeOut({'result':true, 'data':'bridge.registry["'+ruuid+'"]', 'uuid':uuid});
|
||||
}
|
||||
Bridge.prototype._setAttribute = function (obj, name, value) {
|
||||
obj[name] = value;
|
||||
return value;
|
||||
}
|
||||
Bridge.prototype.setAttribute = function (uuid, obj, name, value) {
|
||||
// log(uuid, String(obj), name, String(value))
|
||||
try {
|
||||
var result = this._setAttribute(obj, name, value);
|
||||
} catch(e) {
|
||||
if (typeof(e) == "string") {
|
||||
var exception = e;
|
||||
} else {
|
||||
var exception = {'name':e.name, 'message':e.message};
|
||||
}
|
||||
this.session.encodeOut({'result':false, 'exception':exception, 'uuid':uuid});
|
||||
}
|
||||
if (result != undefined) {
|
||||
this.set(uuid, obj[name]);
|
||||
}
|
||||
}
|
||||
Bridge.prototype._execFunction = function (func, args) {
|
||||
return func.apply(this.session.sandbox, args);
|
||||
}
|
||||
Bridge.prototype.execFunction = function (uuid, func, args) {
|
||||
try {
|
||||
var data = this._execFunction(func, args);
|
||||
var result = true;
|
||||
} catch(e) {
|
||||
if (typeof(e) == "string") {
|
||||
var exception = e;
|
||||
} else {
|
||||
var exception = {'name':e.name, 'message':e.message};
|
||||
}
|
||||
this.session.encodeOut({'result':false, 'exception':exception, 'uuid':uuid});
|
||||
var result = true;
|
||||
}
|
||||
if (data != undefined) {
|
||||
this.set(uuid, data);
|
||||
} else if ( result == true) {
|
||||
this.session.encodeOut({'result':true, 'data':null, 'uuid':uuid});
|
||||
} else {
|
||||
throw 'Something very bad happened.'
|
||||
}
|
||||
}
|
||||
|
||||
backstage = this;
|
||||
|
||||
function Session (transport) {
|
||||
this.transpart = transport;
|
||||
this.sandbox = Components.utils.Sandbox(backstage);
|
||||
this.sandbox.bridge = new Bridge(this);
|
||||
this.sandbox.openPreferences = hwindow.openPreferences;
|
||||
try {
|
||||
this.outstream = transport.openOutputStream(Ci.nsITransport.OPEN_BLOCKING , 0, 0);
|
||||
this.stream = transport.openInputStream(0, 0, 0);
|
||||
this.instream = Cc['@mozilla.org/intl/converter-input-stream;1']
|
||||
.createInstance(Ci.nsIConverterInputStream);
|
||||
this.instream.init(this.stream, 'UTF-8', 1024,
|
||||
Ci.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER);
|
||||
} catch(e) {
|
||||
log('jsbridge: Error: ' + e);
|
||||
}
|
||||
// log('jsbridge: Accepted connection.');
|
||||
|
||||
this.pump = Cc['@mozilla.org/network/input-stream-pump;1']
|
||||
.createInstance(Ci.nsIInputStreamPump);
|
||||
this.pump.init(this.stream, -1, -1, 0, 0, false);
|
||||
this.pump.asyncRead(new AsyncRead(this), null);
|
||||
}
|
||||
Session.prototype.onOutput = function(string) {
|
||||
// log('jsbridge write: '+string)
|
||||
if (typeof(string) != "string") {
|
||||
throw "This is not a string"
|
||||
}
|
||||
try {
|
||||
this.outstream.write(string, string.length);
|
||||
} catch (e) {
|
||||
throw "Why is this failing "+string
|
||||
}
|
||||
// this.outstream.write(string, string.length);
|
||||
};
|
||||
Session.prototype.onQuit = function() {
|
||||
this.instream.close();
|
||||
this.outstream.close();
|
||||
sessions.remove(session);
|
||||
};
|
||||
Session.prototype.encodeOut = function (obj) {
|
||||
try {
|
||||
this.onOutput(jsonEncode(obj));
|
||||
} catch(e) {
|
||||
if (typeof(e) == "string") {
|
||||
var exception = e;
|
||||
} else {
|
||||
var exception = {'name':e.name, 'message':e.message};
|
||||
}
|
||||
this.onOutput(jsonEncode({'result':false, 'exception':exception}));
|
||||
}
|
||||
|
||||
}
|
||||
Session.prototype.receive = function(data) {
|
||||
// log('jsbrige receive: '+data);
|
||||
Components.utils.evalInSandbox(data, this.sandbox);
|
||||
}
|
||||
|
||||
var sessions = {
|
||||
_list: [],
|
||||
add: function(session) {
|
||||
this._list.push(session);
|
||||
},
|
||||
remove: function(session) {
|
||||
var index = this._list.indexOf(session);
|
||||
if(index != -1)
|
||||
this._list.splice(index, 1);
|
||||
},
|
||||
get: function(index) {
|
||||
return this._list[index];
|
||||
},
|
||||
quit: function() {
|
||||
this._list.forEach(
|
||||
function(session) { session.quit; });
|
||||
this._list.splice(0, this._list.length);
|
||||
}
|
||||
};
|
||||
|
||||
function Server (port) {
|
||||
this.port = port;
|
||||
}
|
||||
Server.prototype.start = function () {
|
||||
try {
|
||||
this.serv = Cc['@mozilla.org/network/server-socket;1']
|
||||
.createInstance(Ci.nsIServerSocket);
|
||||
this.serv.init(this.port, true, -1);
|
||||
this.serv.asyncListen(this);
|
||||
// log('jsbridge: Listening...');
|
||||
} catch(e) {
|
||||
log('jsbridge: Exception: ' + e);
|
||||
}
|
||||
}
|
||||
Server.prototype.stop = function () {
|
||||
log('jsbridge: Closing...');
|
||||
this.serv.close();
|
||||
this.sessions.quit();
|
||||
this.serv = undefined;
|
||||
}
|
||||
Server.prototype.onStopListening = function (serv, status) {
|
||||
// Stub function
|
||||
}
|
||||
Server.prototype.onSocketAccepted = function (serv, transport) {
|
||||
session = new Session(transport)
|
||||
sessions.add(session);
|
||||
}
|
||||
|
||||
function log(msg) {
|
||||
dump(msg + '\n');
|
||||
}
|
||||
|
||||
function startServer(port) {
|
||||
var server = new Server(port)
|
||||
server.start()
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,167 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Mozilla Corporation Code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Mikeal Rogers.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2008
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mikeal Rogers <mikeal.rogers@gmail.com>
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
|
||||
def init_jsobject(cls, bridge, name, value, description=None):
|
||||
"""Initialize a js object that is a subclassed base type; int, str, unicode, float."""
|
||||
obj = cls(value)
|
||||
obj._bridge_ = bridge
|
||||
obj._name_ = name
|
||||
obj._description_ = description
|
||||
return obj
|
||||
|
||||
def create_jsobject(bridge, fullname, value=None, obj_type=None, override_set=False):
|
||||
"""Create a single JSObject for named object on other side of the bridge.
|
||||
|
||||
Handles various initization cases for different JSObjects."""
|
||||
description = bridge.describe(fullname)
|
||||
obj_type = description['type']
|
||||
value = description.get('data', None)
|
||||
|
||||
if value is True or value is False:
|
||||
return value
|
||||
|
||||
if js_type_cases.has_key(obj_type):
|
||||
cls, needs_init = js_type_cases[obj_type]
|
||||
# Objects that requires initialization are base types that have "values".
|
||||
if needs_init:
|
||||
obj = init_jsobject(cls, bridge, fullname, value, description=description)
|
||||
else:
|
||||
obj = cls(bridge, fullname, description=description, override_set=override_set)
|
||||
return obj
|
||||
else:
|
||||
# Something very bad happened, we don't have a representation for the given type.
|
||||
raise TypeError("Don't have a JSObject for javascript type "+obj_type)
|
||||
|
||||
class JSObject(object):
|
||||
"""Base javascript object representation."""
|
||||
_loaded_ = False
|
||||
|
||||
def __init__(self, bridge, name, override_set=False, description=None, *args, **kwargs):
|
||||
self._bridge_ = bridge
|
||||
if not override_set:
|
||||
name = bridge.set(name)['data']
|
||||
self._name_ = name
|
||||
self._description_ = description
|
||||
|
||||
def __jsget__(self, name):
|
||||
"""Abstraction for final step in get events; __getitem__ and __getattr__.
|
||||
"""
|
||||
result = create_jsobject(self._bridge_, name, override_set=True)
|
||||
return result
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Get the object from jsbridge.
|
||||
|
||||
Handles lazy loading of all attributes of self."""
|
||||
# A little hack so that ipython returns all the names.
|
||||
if name == '_getAttributeNames':
|
||||
return lambda : self._bridge_.describe(self._name_)['attributes']
|
||||
|
||||
attributes = self._bridge_.describe(self._name_)['attributes']
|
||||
if name in attributes:
|
||||
return self.__jsget__(self._name_+'.'+name)
|
||||
else:
|
||||
raise AttributeError(name+" is undefined.")
|
||||
|
||||
__getitem__ = __getattr__
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""Set the given JSObject as an attribute of this JSObject and make proper javascript
|
||||
assignment on the other side of the bridge."""
|
||||
if name.startswith('_') and name.endswith('_'):
|
||||
return object.__setattr__(self, name, value)
|
||||
|
||||
response = self._bridge_.setAttribute(self._name_, name, value)
|
||||
object.__setattr__(self, name, create_jsobject(self._bridge_, response['data'], override_set=True))
|
||||
|
||||
__setitem__ = __setattr__
|
||||
|
||||
class JSFunction(JSObject):
|
||||
"""Javascript function represenation.
|
||||
|
||||
Returns a JSObject instance for the serialized js type with
|
||||
name set to the full javascript call for this function.
|
||||
"""
|
||||
|
||||
def __init__(self, bridge, name, override_set=False, description=None, *args, **kwargs):
|
||||
self._bridge_ = bridge
|
||||
self._name_ = name
|
||||
self._description_ = description
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
assert len(kwargs) is 0
|
||||
response = self._bridge_.execFunction(self._name_, args)
|
||||
if response['data'] is not None:
|
||||
return create_jsobject(self._bridge_, response['data'], override_set=True)
|
||||
|
||||
|
||||
class JSString(JSObject, unicode):
|
||||
"Javascript string representation."
|
||||
__init__ = unicode.__init__
|
||||
|
||||
class JSInt(JSObject, int):
|
||||
"""Javascript number representation for Python int."""
|
||||
__init__ = int.__init__
|
||||
|
||||
class JSFloat(JSObject, float):
|
||||
"""Javascript number representation for Python float."""
|
||||
__init__ = float.__init__
|
||||
|
||||
class JSUndefined(JSObject):
|
||||
"""Javascript undefined representation."""
|
||||
__str__ = lambda self : "undefined"
|
||||
|
||||
def __cmp__(self, other):
|
||||
if isinstance(other, JSUndefined):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
__nonzero__ = lambda self: False
|
||||
|
||||
js_type_cases = {'function' :(JSFunction, False,),
|
||||
'object' :(JSObject, False,),
|
||||
'array' :(JSObject, False,),
|
||||
'string' :(JSString, True,),
|
||||
'number' :(JSFloat, True,),
|
||||
'undefined':(JSUndefined, False,),
|
||||
}
|
||||
py_type_cases = {unicode :JSString,
|
||||
str :JSString,
|
||||
int :JSInt,
|
||||
float :JSFloat,
|
||||
}
|
|
@ -0,0 +1,268 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Mozilla Corporation Code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Mikeal Rogers.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2008
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mikeal Rogers <mikeal.rogers@gmail.com>
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
|
||||
import asyncore
|
||||
import socket
|
||||
import logging
|
||||
import uuid
|
||||
from time import sleep
|
||||
from threading import Thread
|
||||
|
||||
try:
|
||||
import json as simplejson
|
||||
except:
|
||||
import simplejson
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JavaScriptException(Exception): pass
|
||||
|
||||
class Telnet(object, asyncore.dispatcher):
|
||||
def __init__(self, host, port):
|
||||
self.host, self.port = host, port
|
||||
asyncore.dispatcher.__init__(self)
|
||||
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.connect((host, port))
|
||||
self.buffer = ''
|
||||
self.logger = logger
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def handle_close(self):
|
||||
self.close()
|
||||
|
||||
def handle_expt(self): self.close() # connection failed, shutdown
|
||||
|
||||
def writable(self):
|
||||
return (len(self.buffer) > 0)
|
||||
|
||||
def handle_write(self):
|
||||
sent = self.send(self.buffer)
|
||||
self.buffer = self.buffer[sent:]
|
||||
|
||||
def send(self, b):
|
||||
asyncore.dispatcher.send(self, b)
|
||||
|
||||
def read_all(self):
|
||||
import socket
|
||||
data = ''
|
||||
while 1:
|
||||
try:
|
||||
data += self.recv(4096)
|
||||
except socket.error:
|
||||
return data
|
||||
|
||||
def handle_read(self):
|
||||
self.data = self.read_all()
|
||||
self.process_read(self.data)
|
||||
|
||||
read_callback = lambda self, data: None
|
||||
|
||||
decoder = simplejson.JSONDecoder()
|
||||
|
||||
try:
|
||||
from json.encoder import encode_basestring_ascii, encode_basestring
|
||||
except:
|
||||
from simplejson.encoder import encode_basestring_ascii, encode_basestring
|
||||
|
||||
class JSObjectEncoder(simplejson.JSONEncoder):
|
||||
"""Encoder that supports jsobject references by name."""
|
||||
|
||||
def _iterencode(self, o, markers=None):
|
||||
import jsobjects
|
||||
if isinstance(o, jsobjects.JSObject):
|
||||
yield o._name_
|
||||
elif isinstance(o, basestring):
|
||||
if self.ensure_ascii:
|
||||
encoder = encode_basestring_ascii
|
||||
else:
|
||||
encoder = encode_basestring
|
||||
_encoding = self.encoding
|
||||
if (_encoding is not None and isinstance(o, str)
|
||||
and not (_encoding == 'utf-8')):
|
||||
o = o.decode(_encoding)
|
||||
yield encoder(o)
|
||||
elif o is None:
|
||||
yield 'null'
|
||||
elif o is True:
|
||||
yield 'true'
|
||||
elif o is False:
|
||||
yield 'false'
|
||||
elif isinstance(o, (int, long)):
|
||||
yield str(o)
|
||||
elif isinstance(o, float):
|
||||
yield getattr(simplejson.encoder, 'floatstr', simplejson.encoder._floatstr)(o, self.allow_nan)
|
||||
elif isinstance(o, (list, tuple)):
|
||||
for chunk in self._iterencode_list(o, markers):
|
||||
yield chunk
|
||||
elif isinstance(o, dict):
|
||||
for chunk in self._iterencode_dict(o, markers):
|
||||
yield chunk
|
||||
else:
|
||||
if markers is not None:
|
||||
markerid = id(o)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = o
|
||||
for chunk in self._iterencode_default(o, markers):
|
||||
yield chunk
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
encoder = JSObjectEncoder()
|
||||
|
||||
class Bridge(Telnet):
|
||||
|
||||
trashes = []
|
||||
reading = False
|
||||
sbuffer = ''
|
||||
events_list = []
|
||||
|
||||
callbacks = {}
|
||||
|
||||
bridge_type = "bridge"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
Telnet.__init__(self, *args, **kwargs)
|
||||
self.connect(args)
|
||||
|
||||
def handle_connect(self):
|
||||
self.register()
|
||||
|
||||
def run(self, _uuid, exec_string, interval=0, raise_exeption=True):
|
||||
exec_string += '\r\n'
|
||||
self.send(exec_string)
|
||||
|
||||
while _uuid not in self.callbacks.keys():
|
||||
sleep(interval)
|
||||
|
||||
callback = self.callbacks.pop(_uuid)
|
||||
if callback['result'] is False and raise_exeption is True:
|
||||
raise JavaScriptException(callback['exception'])
|
||||
return callback
|
||||
|
||||
def register(self):
|
||||
_uuid = str(uuid.uuid1())
|
||||
self.send('bridge.register("'+_uuid+'", "'+self.bridge_type+'")\r\n')
|
||||
|
||||
def execFunction(self, func_name, args, interval=.25):
|
||||
_uuid = str(uuid.uuid1())
|
||||
exec_args = [encoder.encode(_uuid), func_name, encoder.encode(args)]
|
||||
return self.run(_uuid, 'bridge.execFunction('+ ', '.join(exec_args)+')', interval)
|
||||
|
||||
def setAttribute(self, obj_name, name, value):
|
||||
_uuid = str(uuid.uuid1())
|
||||
exec_args = [encoder.encode(_uuid), obj_name, encoder.encode(name), encoder.encode(value)]
|
||||
return self.run(_uuid, 'bridge.setAttribute('+', '.join(exec_args)+')')
|
||||
|
||||
def set(self, obj_name):
|
||||
_uuid = str(uuid.uuid1())
|
||||
return self.run(_uuid, 'bridge.set('+', '.join([encoder.encode(_uuid), obj_name])+')')
|
||||
|
||||
def describe(self, obj_name):
|
||||
_uuid = str(uuid.uuid1())
|
||||
return self.run(_uuid, 'bridge.describe('+', '.join([encoder.encode(_uuid), obj_name])+')')
|
||||
|
||||
def fire_callbacks(self, obj):
|
||||
self.callbacks[obj['uuid']] = obj
|
||||
|
||||
def process_read(self, data):
|
||||
"""Parse out json objects and fire callbacks."""
|
||||
self.sbuffer += data
|
||||
self.reading = True
|
||||
self.parsing = True
|
||||
while self.parsing:
|
||||
# Remove erroneus data in front of callback object
|
||||
index = self.sbuffer.find('{')
|
||||
if index is not -1 and index is not 0:
|
||||
self.sbuffer = self.sbuffer[index:]
|
||||
# Try to get a json object from the data stream
|
||||
try:
|
||||
obj, index = decoder.raw_decode(self.sbuffer)
|
||||
except Exception, e:
|
||||
self.parsing = False
|
||||
# If we got an object fire the callback infra
|
||||
if self.parsing:
|
||||
self.fire_callbacks(obj)
|
||||
self.sbuffer = self.sbuffer[index:]
|
||||
|
||||
class BackChannel(Bridge):
|
||||
|
||||
bridge_type = "backchannel"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BackChannel, self).__init__(*args, **kwargs)
|
||||
self.uuid_listener_index = {}
|
||||
self.event_listener_index = {}
|
||||
self.global_listeners = []
|
||||
|
||||
def fire_callbacks(self, obj):
|
||||
"""Handle all callback fireing on json objects pulled from the data stream."""
|
||||
self.fire_event(**dict([(str(key), value,) for key, value in obj.items()]))
|
||||
|
||||
def add_listener(self, callback, uuid=None, eventType=None):
|
||||
if uuid is not None:
|
||||
self.uuid_listener_index.setdefault(uuid, []).append(callback)
|
||||
if eventType is not None:
|
||||
self.event_listener_index.setdefault(eventType, []).append(callback)
|
||||
|
||||
def add_global_listener(self, callback):
|
||||
self.global_listeners.append(callback)
|
||||
|
||||
def fire_event(self, eventType=None, uuid=None, result=None, exception=None):
|
||||
event = eventType
|
||||
if uuid is not None and self.uuid_listener_index.has_key(uuid):
|
||||
for callback in self.uuid_listener_index[uuid]:
|
||||
callback(result)
|
||||
if event is not None and self.event_listener_index.has_key(event):
|
||||
for callback in self.event_listener_index[event]:
|
||||
callback(result)
|
||||
for listener in self.global_listeners:
|
||||
listener(eventType, result)
|
||||
|
||||
def create_network(hostname, port):
|
||||
|
||||
back_channel = BackChannel(hostname, port)
|
||||
bridge = Bridge(hostname, port)
|
||||
|
||||
thread = Thread(target=asyncore.loop)
|
||||
getattr(thread, 'setDaemon', lambda x : None)(True)
|
||||
thread.start()
|
||||
|
||||
return back_channel, bridge
|
|
@ -0,0 +1,558 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Mozilla Corporation Code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Mikeal Rogers.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2008-2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mikeal Rogers <mikeal.rogers@gmail.com>
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
|
||||
import os, sys
|
||||
import copy
|
||||
import tempfile
|
||||
import shutil
|
||||
import signal
|
||||
import commands
|
||||
import zipfile
|
||||
import optparse
|
||||
import killableprocess
|
||||
import subprocess
|
||||
from xml.etree import ElementTree
|
||||
from distutils import dir_util
|
||||
from time import sleep
|
||||
|
||||
try:
|
||||
import simplejson
|
||||
except ImportError:
|
||||
import json as simplejson
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
copytree = dir_util.copy_tree
|
||||
|
||||
if sys.platform != 'win32':
|
||||
import pwd
|
||||
else:
|
||||
import win32api, win32pdhutil, win32con
|
||||
|
||||
def findInPath(fileName, path=os.environ['PATH']):
|
||||
dirs = path.split(os.pathsep)
|
||||
for dir in dirs:
|
||||
if os.path.isfile(os.path.join(dir, fileName)):
|
||||
return os.path.join(dir, fileName)
|
||||
if os.name == 'nt' or sys.platform == 'cygwin':
|
||||
if os.path.isfile(os.path.join(dir, fileName + ".exe")):
|
||||
return os.path.join(dir, fileName + ".exe")
|
||||
return None
|
||||
|
||||
stdout = -1
|
||||
stderr = sys.stderr
|
||||
stdin = sys.stdin
|
||||
|
||||
def run_command(cmd, env=None, **kwargs):
|
||||
"""Run the given command in killable process."""
|
||||
killable_kwargs = {'stdout':stdout ,'stderr':stderr, 'stdin':stdin}
|
||||
killable_kwargs.update(kwargs)
|
||||
|
||||
if sys.platform != "win32":
|
||||
return killableprocess.Popen(cmd, preexec_fn=lambda : os.setpgid(0, 0), env=env, **killable_kwargs)
|
||||
else:
|
||||
return killableprocess.Popen(cmd, **killable_kwargs)
|
||||
|
||||
def get_pids(name, minimun_pid=0):
|
||||
"""Get all the pids matching name, exclude any pids below minimum_pid."""
|
||||
if os.name == 'nt' or sys.platform == 'cygwin':
|
||||
#win32pdhutil.ShowAllProcesses() #uncomment for testing
|
||||
pids = win32pdhutil.FindPerformanceAttributesByName(name)
|
||||
|
||||
else:
|
||||
get_pids_cmd = ['ps', 'ax']
|
||||
h = killableprocess.runCommand(get_pids_cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
h.wait()
|
||||
data = h.stdout.readlines()
|
||||
pids = [int(line.split()[0]) for line in data if line.find(name) is not -1]
|
||||
|
||||
matching_pids = [m for m in pids if m > minimun_pid]
|
||||
return matching_pids
|
||||
|
||||
def kill_process_by_name(name):
|
||||
"""Find and kill all processes containing a certain name"""
|
||||
|
||||
pids = get_pids(name)
|
||||
|
||||
if os.name == 'nt' or sys.platform == 'cygwin':
|
||||
for p in pids:
|
||||
handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0, p) #get process handle
|
||||
win32api.TerminateProcess(handle,0) #kill by handle
|
||||
win32api.CloseHandle(handle) #close api
|
||||
|
||||
else:
|
||||
for pid in pids:
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
except OSError: pass
|
||||
sleep(.5)
|
||||
if len(get_pids(name)) is not 0:
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
sleep(.5)
|
||||
if len(get_pids(name)) is not 0:
|
||||
logger.error('Could not kill process')
|
||||
|
||||
def NaN(str):
|
||||
try: int(str); return False;
|
||||
except: return True
|
||||
|
||||
def makedirs(name):
|
||||
# from errno import EEXIST
|
||||
head, tail = os.path.split(name)
|
||||
if not tail:
|
||||
head, tail = os.path.split(head)
|
||||
if head and tail and not os.path.exists(head):
|
||||
try:
|
||||
makedirs(head)
|
||||
except OSError, e:
|
||||
pass
|
||||
if tail == os.curdir: # xxx/newdir/. exists if xxx/newdir exists
|
||||
return
|
||||
try:
|
||||
os.mkdir(name)
|
||||
except:
|
||||
pass
|
||||
|
||||
class Profile(object):
|
||||
"""Handles all operations regarding profile. Created new profiles, installs extensions,
|
||||
sets preferences and handles cleanup."""
|
||||
def __init__(self, default_profile=None, profile=None, create_new=True,
|
||||
plugins=[], preferences={}):
|
||||
self.plugins_installed = []
|
||||
self.default_profile = default_profile
|
||||
self.profile = profile
|
||||
self.create_new = create_new
|
||||
self.plugins = plugins
|
||||
if not hasattr(self, 'preferences'):
|
||||
self.preferences = preferences
|
||||
else:
|
||||
self.preferences = copy.copy(self.preferences)
|
||||
self.preferences.update(preferences)
|
||||
|
||||
if profile is not None and create_new is True:
|
||||
raise Exception('You cannot set the profie location is you want mozrunner to create a new on for you.')
|
||||
if create_new is False and profile is None:
|
||||
raise Exception('If you set create_new to False you must provide the location of the profile you would like to run')
|
||||
if create_new is True:
|
||||
if default_profile is None:
|
||||
self.default_profile = self.find_default_profile()
|
||||
self.profile = self.create_new_profile(self.default_profile)
|
||||
for plugin in plugins:
|
||||
self.install_plugin(plugin)
|
||||
|
||||
self.set_preferences(self.preferences)
|
||||
|
||||
def find_default_profile(self):
|
||||
"""Finds the default profile on the local system for self.names"""
|
||||
default_profile = None
|
||||
|
||||
if sys.platform == 'linux2':
|
||||
# This is unfortunately hardcoded to work with Firefox
|
||||
# the code is so hairy I'm just affraid to generalize it or port it
|
||||
# knowing that it's 99% functional for Firefox.
|
||||
for path, name in (('/opt', 'firefox',),
|
||||
('/usr/lib', 'iceweasel',),
|
||||
('/usr/share', 'firefox',),
|
||||
('/usr/lib/', 'mozilla-firefox',),
|
||||
('/usr/lib/', 'firefox',),
|
||||
):
|
||||
if os.path.isdir(path):
|
||||
profiles = sorted([d for d in os.listdir(os.path.join(path)) if (
|
||||
d.startswith(name) ) and
|
||||
( os.path.isdir(os.path.join(path, d, 'defaults', 'profile')) ) and
|
||||
( ('-' not in d) or ( len(name+'-') <= len(d) and not
|
||||
NaN(d[len(name+'-')]) or
|
||||
(d == 'mozilla-firefox')) )
|
||||
])
|
||||
if len(profiles) > 0:
|
||||
default_profile = os.path.join(path, profiles[-1], 'defaults', 'profile')
|
||||
if sys.platform == 'darwin':
|
||||
for name in reversed(self.names):
|
||||
appdir = os.path.join('Applications', name.capitalize()+'.app')
|
||||
if os.path.isdir(os.path.join(os.path.expanduser('~/'), appdir)):
|
||||
appdir = os.path.join(os.path.expanduser('~/'), appdir)
|
||||
default_profile = os.path.join(appdir, 'Contents/MacOS/defaults/profile')
|
||||
elif os.path.isdir('/'+appdir):
|
||||
default_profile = os.path.join('/'+appdir,
|
||||
'Contents/MacOS/defaults/profile')
|
||||
if os.name == 'nt' or sys.platform == 'cygwin':
|
||||
for name in reversed(self.names):
|
||||
bin = findInPath(name)
|
||||
if bin is None:
|
||||
for bin in [os.path.join(os.environ['ProgramFiles'],
|
||||
'Mozilla Firefox', 'firefox.exe'),
|
||||
os.path.join(os.environ['ProgramFiles'],
|
||||
'Mozilla Firefox3', 'firefox.exe'),
|
||||
]:
|
||||
if os.path.isfile(bin):
|
||||
break
|
||||
if bin is not None and os.path.isfile(bin):
|
||||
default_profile = os.path.join(os.path.dirname(bin),
|
||||
'defaults', 'profile')
|
||||
if default_profile is None:
|
||||
raise Exception('Could not locate default profile, please set.')
|
||||
return default_profile
|
||||
|
||||
def create_new_profile(self, default_profile):
|
||||
"""Creates a new clean profile in tmp"""
|
||||
profile = tempfile.mkdtemp(suffix='.mozrunner')
|
||||
|
||||
if sys.platform == 'linux2':
|
||||
try:
|
||||
login = os.getlogin()
|
||||
except OSError:
|
||||
login = pwd.getpwuid(os.geteuid())[0]
|
||||
print commands.getoutput('chown -R %s:%s %s' % (login, login, profile))
|
||||
|
||||
if os.path.exists(profile) is True:
|
||||
shutil.rmtree(profile)
|
||||
copytree(default_profile, profile, preserve_symlinks=1)
|
||||
return profile
|
||||
|
||||
def install_plugin(self, plugin):
|
||||
"""Installs the given plugin path in the profile."""
|
||||
tmpdir = None
|
||||
if plugin.endswith('.xpi'):
|
||||
tmpdir = tempfile.mkdtemp(suffix="."+os.path.split(plugin)[-1])
|
||||
compressed_file = zipfile.ZipFile(plugin, "r")
|
||||
for name in compressed_file.namelist():
|
||||
if name.endswith('/'):
|
||||
makedirs(os.path.join(tmpdir, name))
|
||||
else:
|
||||
if not os.path.isdir(os.path.dirname(os.path.join(tmpdir, name))):
|
||||
makedirs(os.path.dirname(os.path.join(tmpdir, name)))
|
||||
data = compressed_file.read(name)
|
||||
f = open(os.path.join(tmpdir, name), 'w')
|
||||
f.write(data) ; f.close()
|
||||
plugin = tmpdir
|
||||
|
||||
tree = ElementTree.ElementTree(file=os.path.join(plugin, 'install.rdf'))
|
||||
# description_element =
|
||||
# tree.find('.//{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description/')
|
||||
|
||||
desc = tree.find('.//{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description')
|
||||
if desc and desc.attrib.has_key('{http://www.mozilla.org/2004/em-rdf#}id'):
|
||||
plugin_id = desc.attrib['{http://www.mozilla.org/2004/em-rdf#}id']
|
||||
else:
|
||||
about = [e for e in tree.findall(
|
||||
'.//{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description') if
|
||||
e.get('{http://www.w3.org/1999/02/22-rdf-syntax-ns#}about') ==
|
||||
'urn:mozilla:install-manifest'
|
||||
]
|
||||
|
||||
x = e.find('.//{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description')
|
||||
|
||||
if len(about) is 0:
|
||||
plugin_element = tree.find('.//{http://www.mozilla.org/2004/em-rdf#}id')
|
||||
plugin_id = plugin_element.text
|
||||
else:
|
||||
plugin_id = about[0].get('{http://www.mozilla.org/2004/em-rdf#}id')
|
||||
|
||||
plugin_path = os.path.join(self.profile, 'extensions', plugin_id)
|
||||
copytree(plugin, plugin_path, preserve_symlinks=1)
|
||||
self.plugins_installed.append(plugin_path)
|
||||
|
||||
def set_preferences(self, preferences):
|
||||
"""Adds preferences dict to profile preferences"""
|
||||
prefs_file = os.path.join(self.profile, 'user.js')
|
||||
f = open(prefs_file, 'a+')
|
||||
f.write('\n#MozRunner Prefs Start\n')
|
||||
|
||||
pref_lines = ['user_pref(%s, %s);' %
|
||||
(simplejson.dumps(k), simplejson.dumps(v) ) for k, v in
|
||||
preferences.items()]
|
||||
for line in pref_lines:
|
||||
f.write(line+'\n')
|
||||
f.write('#MozRunner Prefs End\n')
|
||||
f.flush() ; f.close()
|
||||
|
||||
def clean_preferences(self):
|
||||
"""Removed preferences added by mozrunner."""
|
||||
lines = open(os.path.join(self.profile, 'user.js'), 'r').read().splitlines()
|
||||
s = lines.index('#MozRunner Prefs Start') ; e = lines.index('#MozRunner Prefs End')
|
||||
cleaned_prefs = '\n'.join(lines[:s] + lines[e+1:])
|
||||
f = open(os.path.join(self.profile, 'user.js'), 'w')
|
||||
f.write(cleaned_prefs) ; f.flush() ; f.close()
|
||||
|
||||
def clean_plugins(self):
|
||||
"""Cleans up plugins in the profile."""
|
||||
for plugin in self.plugins_installed:
|
||||
shutil.rmtree(plugin)
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup operations on the profile."""
|
||||
if self.create_new:
|
||||
shutil.rmtree(self.profile)
|
||||
else:
|
||||
self.clean_preferences()
|
||||
self.clean_plugins()
|
||||
|
||||
|
||||
class FirefoxProfile(Profile):
|
||||
"""Specialized Profile subclass for Firefox"""
|
||||
preferences = {'extensions.update.enabled' : False,
|
||||
'extensions.update.notifyUser' : False,
|
||||
'browser.shell.checkDefaultBrowser' : False,
|
||||
'browser.tabs.warnOnClose' : False,
|
||||
'browser.warnOnQuit': False,
|
||||
'browser.sessionstore.resume_from_crash': False,
|
||||
}
|
||||
|
||||
@property
|
||||
def names(self):
|
||||
if sys.platform == 'darwin':
|
||||
return ['firefox', 'minefield', 'shiretoko']
|
||||
if sys.platform == 'linux2':
|
||||
return ['firefox', 'mozilla-firefox', 'iceweasel']
|
||||
if os.name == 'nt' or sys.platform == 'cygwin':
|
||||
return ['firefox']
|
||||
|
||||
class ThunderbirdProfile(Profile):
|
||||
preferences = {'extensions.update.enabled' : False,
|
||||
'extensions.update.notifyUser' : False,
|
||||
'browser.shell.checkDefaultBrowser' : False,
|
||||
'browser.tabs.warnOnClose' : False,
|
||||
'browser.warnOnQuit': False,
|
||||
'browser.sessionstore.resume_from_crash': False,
|
||||
}
|
||||
names = ["thunderbird", "shredder"]
|
||||
|
||||
|
||||
class Runner(object):
|
||||
"""Handles all running operations. Finds bins, runs and kills the process."""
|
||||
|
||||
def __init__(self, binary=None, profile=None, cmdargs=[], env=None,
|
||||
aggressively_kill=['crashreporter'], kp_kwargs={}):
|
||||
if binary is None:
|
||||
self.binary = self.find_binary()
|
||||
elif binary.endswith('.app'):
|
||||
self.binary = os.path.join(binary, 'Contents/MacOS/'+self.names[0]+'-bin')
|
||||
if profile is None:
|
||||
self.profile = self.profile_class(os.path.join(binary,
|
||||
'Contents/MacOS/defaults/profile'))
|
||||
else:
|
||||
self.binary = binary
|
||||
|
||||
|
||||
if not os.path.exists(self.binary):
|
||||
raise Exception("Binary path does not exist "+self.binary)
|
||||
|
||||
if profile is None and not hasattr(self, "profile"):
|
||||
self.profile = self.profile_class()
|
||||
elif profile is not None:
|
||||
self.profile = profile
|
||||
|
||||
self.cmdargs = cmdargs
|
||||
if env is None:
|
||||
self.env = copy.copy(os.environ)
|
||||
self.env.update({'MOZ_NO_REMOTE':"1",})
|
||||
else:
|
||||
self.env = env
|
||||
self.aggressively_kill = aggressively_kill
|
||||
self.kp_kwargs = kp_kwargs
|
||||
|
||||
def find_binary(self):
|
||||
"""Finds the binary for self.names if one was not provided."""
|
||||
binary = None
|
||||
if sys.platform == 'linux2':
|
||||
for name in reversed(self.names):
|
||||
binary = findInPath(name)
|
||||
elif os.name == 'nt' or sys.platform == 'cygwin':
|
||||
for name in reversed(self.names):
|
||||
binary = findInPath(name)
|
||||
if binary is None:
|
||||
for bin in [os.path.join(os.environ['ProgramFiles'],
|
||||
'Mozilla Firefox', 'firefox.exe'),
|
||||
os.path.join(os.environ['ProgramFiles'],
|
||||
'Mozilla Firefox3', 'firefox.exe'),
|
||||
]:
|
||||
if os.path.isfile(bin):
|
||||
binary = bin
|
||||
break
|
||||
elif sys.platform == 'darwin':
|
||||
for name in reversed(self.names):
|
||||
appdir = os.path.join('Applications', name.capitalize()+'.app')
|
||||
if os.path.isdir(os.path.join(os.path.expanduser('~/'), appdir)):
|
||||
binary = os.path.join(os.path.expanduser('~/'), appdir,
|
||||
'Contents/MacOS/'+name+'-bin')
|
||||
elif os.path.isdir('/'+appdir):
|
||||
binary = os.path.join("/"+appdir, 'Contents/MacOS/'+name+'-bin')
|
||||
|
||||
if binary is not None:
|
||||
if not os.path.isfile(binary):
|
||||
binary = binary.replace(name+'-bin', 'firefox-bin')
|
||||
if not os.path.isfile(binary):
|
||||
binary = None
|
||||
if binary is None:
|
||||
raise Exception('Mozrunner could not locate your binary, you will need to set it.')
|
||||
return binary
|
||||
|
||||
@property
|
||||
def command(self):
|
||||
"""Returns the command list to run."""
|
||||
return [self.binary, '-profile', self.profile.profile]
|
||||
|
||||
def start(self):
|
||||
"""Run self.command in the proper environment."""
|
||||
self.process_handler = run_command(self.command+self.cmdargs, self.env, **self.kp_kwargs)
|
||||
|
||||
def wait(self, timeout=None):
|
||||
"""Wait for the browser to exit."""
|
||||
self.process_handler.wait(timeout=timeout)
|
||||
|
||||
if sys.platform != 'win32':
|
||||
for name in self.names:
|
||||
for pid in get_pids(name, self.process_handler.pid):
|
||||
self.process_handler.pid = pid
|
||||
self.process_handler.wait(timeout=timeout)
|
||||
|
||||
def kill(self, kill_signal=signal.SIGTERM):
|
||||
"""Kill the browser"""
|
||||
if sys.platform != 'win32':
|
||||
self.process_handler.kill()
|
||||
for name in self.names:
|
||||
for pid in get_pids(name, self.process_handler.pid):
|
||||
self.process_handler.pid = pid
|
||||
self.process_handler.kill()
|
||||
else:
|
||||
try:
|
||||
self.process_handler.kill(group=True)
|
||||
except Exception, e:
|
||||
logger.error('Cannot kill process, '+type(e).__name__+' '+e.message)
|
||||
|
||||
for name in self.aggressively_kill:
|
||||
kill_process_by_name(name)
|
||||
|
||||
def stop(self):
|
||||
self.kill()
|
||||
|
||||
class FirefoxRunner(Runner):
|
||||
"""Specialized Runner subclass for running Firefox."""
|
||||
|
||||
profile_class = FirefoxProfile
|
||||
|
||||
@property
|
||||
def names(self):
|
||||
if sys.platform == 'darwin':
|
||||
return ['firefox', 'minefield', 'shiretoko']
|
||||
if sys.platform == 'linux2':
|
||||
return ['firefox', 'mozilla-firefox', 'iceweasel']
|
||||
if os.name == 'nt' or sys.platform == 'cygwin':
|
||||
return ['firefox']
|
||||
|
||||
class ThunderbirdRunner(Runner):
|
||||
"""Specialized Runner subclass for running Thunderbird"""
|
||||
profile_class = ThunderbirdProfile
|
||||
|
||||
names = ["thunderbird", "shredder"]
|
||||
|
||||
class CLI(object):
|
||||
"""Command line interface."""
|
||||
|
||||
parser_options = {("-b", "--binary",): dict(dest="binary", help="Binary path.",
|
||||
metavar=None, default=None),
|
||||
("-d", "--default-profile",): dict(dest="default_profile",
|
||||
help="Default profile path.",
|
||||
metavar=None, default=None),
|
||||
('-p', "--profile",): dict(dest="profile", help="Profile path.",
|
||||
metavar=None, default=None),
|
||||
('-w', "--plugins",): dict(dest="plugins",
|
||||
help="Plugin paths to install.",
|
||||
metavar=None, default=None),
|
||||
("-n", "--no-new-profile",): dict(dest="create_new",
|
||||
action="store_false",
|
||||
help="Do not create new profile.",
|
||||
metavar="MOZRUNNER_NEW_PROFILE",
|
||||
default=True ),
|
||||
}
|
||||
|
||||
runner_class = FirefoxRunner
|
||||
profile_class = FirefoxProfile
|
||||
|
||||
def __init__(self):
|
||||
self.parser = optparse.OptionParser()
|
||||
for names, opts in self.parser_options.items():
|
||||
self.parser.add_option(*names, **opts)
|
||||
|
||||
def parse_and_get_runner(self):
|
||||
"""Parses the command line arguments and returns a runner instance."""
|
||||
(options, args) = self.parser.parse_args()
|
||||
self.options = options
|
||||
self.args = args
|
||||
if self.options.plugins is None:
|
||||
plugins = []
|
||||
else:
|
||||
plugins = self.options.plugins.split(',')
|
||||
profile = self.get_profile(default_profile=options.default_profile,
|
||||
profile=options.profile, create_new=options.create_new,
|
||||
plugins=plugins)
|
||||
|
||||
runner = self.get_runner(binary=self.options.binary,
|
||||
profile=profile)
|
||||
|
||||
return runner
|
||||
|
||||
def get_profile(self, default_profile=None, profile=None, create_new=None, plugins=[],
|
||||
preferences={}):
|
||||
"""Returns the profile instance for the given command line arguments."""
|
||||
return self.profile_class(default_profile, profile, create_new, plugins, preferences)
|
||||
|
||||
def get_runner(self, binary=None, profile=None):
|
||||
"""Returns the runner instance for the given command line binary arguemt
|
||||
the profile instance returned from self.get_profile()."""
|
||||
return self.runner_class(binary, profile)
|
||||
|
||||
def run(self):
|
||||
"""Runs self.start(self.parse_and_get_runner())"""
|
||||
runner = self.parse_and_get_runner()
|
||||
self.start(runner)
|
||||
runner.profile.cleanup()
|
||||
|
||||
def start(self, runner):
|
||||
"""Starts the runner and waits for Firefox to exitor Keyboard Interrupt.
|
||||
Shoule be overwritten to provide custom running of the runner instance."""
|
||||
runner.start()
|
||||
print 'Started:', ' '.join(runner.command)
|
||||
try:
|
||||
runner.wait()
|
||||
except KeyboardInterrupt:
|
||||
runner.stop()
|
||||
|
||||
|
||||
def cli():
|
||||
CLI().run()
|
|
@ -0,0 +1,251 @@
|
|||
# killableprocess - subprocesses which can be reliably killed
|
||||
#
|
||||
# Parts of this module are copied from the subprocess.py file contained
|
||||
# in the Python distribution.
|
||||
#
|
||||
# Copyright (c) 2003-2004 by Peter Astrand <astrand@lysator.liu.se>
|
||||
#
|
||||
# Additions and modifications written by Benjamin Smedberg
|
||||
# <benjamin@smedbergs.us> are Copyright (c) 2006 by the Mozilla Foundation
|
||||
# <http://www.mozilla.org/>
|
||||
#
|
||||
# More Modifications
|
||||
# Copyright (c) 2006-2007 by Mike Taylor <bear@code-bear.com>
|
||||
# Copyright (c) 2007-2008 by Mikeal Rogers <mikeal@mozilla.com>
|
||||
#
|
||||
# By obtaining, using, and/or copying this software and/or its
|
||||
# associated documentation, you agree that you have read, understood,
|
||||
# and will comply with the following terms and conditions:
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and
|
||||
# its associated documentation for any purpose and without fee is
|
||||
# hereby granted, provided that the above copyright notice appears in
|
||||
# all copies, and that both that copyright notice and this permission
|
||||
# notice appear in supporting documentation, and that the name of the
|
||||
# author not be used in advertising or publicity pertaining to
|
||||
# distribution of the software without specific, written prior
|
||||
# permission.
|
||||
#
|
||||
# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
|
||||
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
|
||||
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
|
||||
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""killableprocess - Subprocesses which can be reliably killed
|
||||
|
||||
This module is a subclass of the builtin "subprocess" module. It allows
|
||||
processes that launch subprocesses to be reliably killed on Windows (via the Popen.kill() method.
|
||||
|
||||
It also adds a timeout argument to Wait() for a limited period of time before
|
||||
forcefully killing the process.
|
||||
|
||||
Note: On Windows, this module requires Windows 2000 or higher (no support for
|
||||
Windows 95, 98, or NT 4.0). It also requires ctypes, which is bundled with
|
||||
Python 2.5+ or available from http://python.net/crew/theller/ctypes/
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import types
|
||||
import exceptions
|
||||
|
||||
try:
|
||||
from subprocess import CalledProcessError
|
||||
except ImportError:
|
||||
# Python 2.4 doesn't implement CalledProcessError
|
||||
class CalledProcessError(Exception):
|
||||
"""This exception is raised when a process run by check_call() returns
|
||||
a non-zero exit status. The exit status will be stored in the
|
||||
returncode attribute."""
|
||||
def __init__(self, returncode, cmd):
|
||||
self.returncode = returncode
|
||||
self.cmd = cmd
|
||||
def __str__(self):
|
||||
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
|
||||
|
||||
mswindows = (sys.platform == "win32")
|
||||
|
||||
if mswindows:
|
||||
import winprocess
|
||||
else:
|
||||
import signal
|
||||
|
||||
def call(*args, **kwargs):
|
||||
waitargs = {}
|
||||
if "timeout" in kwargs:
|
||||
waitargs["timeout"] = kwargs.pop("timeout")
|
||||
|
||||
return Popen(*args, **kwargs).wait(**waitargs)
|
||||
|
||||
def check_call(*args, **kwargs):
|
||||
"""Call a program with an optional timeout. If the program has a non-zero
|
||||
exit status, raises a CalledProcessError."""
|
||||
|
||||
retcode = call(*args, **kwargs)
|
||||
if retcode:
|
||||
cmd = kwargs.get("args")
|
||||
if cmd is None:
|
||||
cmd = args[0]
|
||||
raise CalledProcessError(retcode, cmd)
|
||||
|
||||
if not mswindows:
|
||||
def DoNothing(*args):
|
||||
pass
|
||||
|
||||
class Popen(subprocess.Popen):
|
||||
if mswindows:
|
||||
def _execute_child(self, args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines, startupinfo,
|
||||
creationflags, shell,
|
||||
p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite):
|
||||
if not isinstance(args, types.StringTypes):
|
||||
args = subprocess.list2cmdline(args)
|
||||
|
||||
if startupinfo is None:
|
||||
startupinfo = winprocess.STARTUPINFO()
|
||||
|
||||
if None not in (p2cread, c2pwrite, errwrite):
|
||||
startupinfo.dwFlags |= winprocess.STARTF_USESTDHANDLES
|
||||
|
||||
startupinfo.hStdInput = int(p2cread)
|
||||
startupinfo.hStdOutput = int(c2pwrite)
|
||||
startupinfo.hStdError = int(errwrite)
|
||||
if shell:
|
||||
startupinfo.dwFlags |= winprocess.STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = winprocess.SW_HIDE
|
||||
comspec = os.environ.get("COMSPEC", "cmd.exe")
|
||||
args = comspec + " /c " + args
|
||||
|
||||
# We create a new job for this process, so that we can kill
|
||||
# the process and any sub-processes
|
||||
self._job = winprocess.CreateJobObject()
|
||||
|
||||
creationflags |= winprocess.CREATE_SUSPENDED
|
||||
creationflags |= winprocess.CREATE_UNICODE_ENVIRONMENT
|
||||
|
||||
hp, ht, pid, tid = winprocess.CreateProcess(
|
||||
executable, args,
|
||||
None, None, # No special security
|
||||
1, # Must inherit handles!
|
||||
creationflags,
|
||||
winprocess.EnvironmentBlock(env),
|
||||
cwd, startupinfo)
|
||||
|
||||
self._child_created = True
|
||||
self._handle = hp
|
||||
self._thread = ht
|
||||
self.pid = pid
|
||||
self.tid = tid
|
||||
|
||||
winprocess.AssignProcessToJobObject(self._job, hp)
|
||||
winprocess.ResumeThread(ht)
|
||||
|
||||
if p2cread is not None:
|
||||
p2cread.Close()
|
||||
if c2pwrite is not None:
|
||||
c2pwrite.Close()
|
||||
if errwrite is not None:
|
||||
errwrite.Close()
|
||||
time.sleep(.1)
|
||||
p = winprocess.QueryInformationJobObject(self._job, 8)['BasicInfo']['ActiveProcesses']
|
||||
if p is 0:
|
||||
self._job_working = False
|
||||
else:
|
||||
self._job_working = True
|
||||
|
||||
def kill(self, group=True):
|
||||
"""Kill the process. If group=True, all sub-processes will also be killed."""
|
||||
if mswindows:
|
||||
if group:
|
||||
winprocess.TerminateJobObject(self._job, 127)
|
||||
else:
|
||||
winprocess.TerminateProcess(self._handle, 127)
|
||||
self.returncode = 127
|
||||
else:
|
||||
if group:
|
||||
try:
|
||||
os.killpg(self.pid, signal.SIGKILL)
|
||||
except: pass
|
||||
else:
|
||||
os.kill(self.pid, signal.SIGKILL)
|
||||
self.returncode = -9
|
||||
|
||||
def wait(self, timeout=None, group=True):
|
||||
"""Wait for the process to terminate. Returns returncode attribute.
|
||||
If timeout seconds are reached and the process has not terminated,
|
||||
it will be forcefully killed. If timeout is -1, wait will not
|
||||
time out."""
|
||||
|
||||
if timeout is not None:
|
||||
timeout = timeout * 1000
|
||||
|
||||
if self.returncode is not None:
|
||||
return self.returncode
|
||||
|
||||
starttime = datetime.datetime.now()
|
||||
|
||||
if mswindows:
|
||||
if timeout is None:
|
||||
timeout = -1
|
||||
rc = winprocess.WaitForSingleObject(self._handle, timeout)
|
||||
|
||||
if rc != winprocess.WAIT_TIMEOUT:
|
||||
while (starttime - datetime.datetime.now()).microseconds < timeout or ( winprocess.QueryInformationJobObject(self._job, 8)['BasicInfo']['ActiveProcesses'] > 0 ):
|
||||
time.sleep(.5)
|
||||
|
||||
if (starttime - datetime.datetime.now()).microseconds > timeout:
|
||||
self.kill(group)
|
||||
else:
|
||||
self.returncode = winprocess.GetExitCodeProcess(self._handle)
|
||||
else:
|
||||
if sys.platform == 'linux2':
|
||||
def group_wait():
|
||||
os.waitpid(self.pid, 0)
|
||||
return self.returncode
|
||||
elif sys.platform == 'darwin':
|
||||
def group_wait():
|
||||
try:
|
||||
while 1:
|
||||
os.killpg(self.pid, signal.SIG_DFL)
|
||||
time.sleep(.5)
|
||||
except exceptions.OSError:
|
||||
return self.returncode
|
||||
|
||||
if timeout is None:
|
||||
if group is True:
|
||||
return group_wait()
|
||||
else:
|
||||
subprocess.Popen.wait(self)
|
||||
return self.returncode
|
||||
|
||||
returncode = False
|
||||
|
||||
while (starttime - datetime.datetime.now()).microseconds < timeout or ( returncode is False ):
|
||||
if group is True:
|
||||
return group_wait()
|
||||
else:
|
||||
if subprocess.poll() is not None:
|
||||
returncode = self.returncode
|
||||
time.sleep(.5)
|
||||
return self.returncode
|
||||
|
||||
return self.returncode
|
||||
# We get random maxint errors from subprocesses __del__
|
||||
__del__ = lambda self: None
|
||||
|
||||
def setpgid_preexec_fn():
|
||||
os.setpgid(0, 0)
|
||||
|
||||
def runCommand(cmd, **kwargs):
|
||||
if sys.platform != "win32":
|
||||
return Popen(cmd, preexec_fn=setpgid_preexec_fn, **kwargs)
|
||||
else:
|
||||
return Popen(cmd, **kwargs)
|
|
@ -0,0 +1,275 @@
|
|||
# A module to expose various thread/process/job related structures and
|
||||
# methods from kernel32
|
||||
#
|
||||
# The MIT License
|
||||
#
|
||||
# Copyright (c) 2003-2004 by Peter Astrand <astrand@lysator.liu.se>
|
||||
#
|
||||
# Additions and modifications written by Benjamin Smedberg
|
||||
# <benjamin@smedbergs.us> are Copyright (c) 2006 by the Mozilla Foundation
|
||||
# <http://www.mozilla.org/>
|
||||
#
|
||||
# More Modifications
|
||||
# Copyright (c) 2006-2007 by Mike Taylor <bear@code-bear.com>
|
||||
# Copyright (c) 2007-2008 by Mikeal Rogers <mikeal@mozilla.com>
|
||||
#
|
||||
# By obtaining, using, and/or copying this software and/or its
|
||||
# associated documentation, you agree that you have read, understood,
|
||||
# and will comply with the following terms and conditions:
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and
|
||||
# its associated documentation for any purpose and without fee is
|
||||
# hereby granted, provided that the above copyright notice appears in
|
||||
# all copies, and that both that copyright notice and this permission
|
||||
# notice appear in supporting documentation, and that the name of the
|
||||
# author not be used in advertising or publicity pertaining to
|
||||
# distribution of the software without specific, written prior
|
||||
# permission.
|
||||
#
|
||||
# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
|
||||
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
|
||||
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
|
||||
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
from ctypes import c_void_p, POINTER, sizeof, Structure, windll, WinError, WINFUNCTYPE
|
||||
from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPCWSTR, LPWSTR, UINT, WORD
|
||||
from subprocess import SW_HIDE
|
||||
from win32job import QueryInformationJobObject
|
||||
|
||||
LPVOID = c_void_p
|
||||
LPBYTE = POINTER(BYTE)
|
||||
LPDWORD = POINTER(DWORD)
|
||||
|
||||
def ErrCheckBool(result, func, args):
|
||||
"""errcheck function for Windows functions that return a BOOL True
|
||||
on success"""
|
||||
if not result:
|
||||
raise WinError()
|
||||
return args
|
||||
|
||||
|
||||
# AutoHANDLE
|
||||
|
||||
class AutoHANDLE(HANDLE):
|
||||
"""Subclass of HANDLE which will call CloseHandle() on deletion."""
|
||||
|
||||
CloseHandleProto = WINFUNCTYPE(BOOL, HANDLE)
|
||||
CloseHandle = CloseHandleProto(("CloseHandle", windll.kernel32))
|
||||
CloseHandle.errcheck = ErrCheckBool
|
||||
|
||||
def Close(self):
|
||||
if self.value:
|
||||
self.CloseHandle(self)
|
||||
self.value = 0
|
||||
|
||||
def __del__(self):
|
||||
self.Close()
|
||||
|
||||
def __int__(self):
|
||||
return self.value
|
||||
|
||||
def ErrCheckHandle(result, func, args):
|
||||
"""errcheck function for Windows functions that return a HANDLE."""
|
||||
if not result:
|
||||
raise WinError()
|
||||
return AutoHANDLE(result)
|
||||
|
||||
# PROCESS_INFORMATION structure
|
||||
|
||||
class PROCESS_INFORMATION(Structure):
|
||||
_fields_ = [("hProcess", HANDLE),
|
||||
("hThread", HANDLE),
|
||||
("dwProcessID", DWORD),
|
||||
("dwThreadID", DWORD)]
|
||||
|
||||
def __init__(self):
|
||||
Structure.__init__(self)
|
||||
|
||||
self.cb = sizeof(self)
|
||||
|
||||
LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
|
||||
|
||||
# STARTUPINFO structure
|
||||
|
||||
class STARTUPINFO(Structure):
|
||||
_fields_ = [("cb", DWORD),
|
||||
("lpReserved", LPWSTR),
|
||||
("lpDesktop", LPWSTR),
|
||||
("lpTitle", LPWSTR),
|
||||
("dwX", DWORD),
|
||||
("dwY", DWORD),
|
||||
("dwXSize", DWORD),
|
||||
("dwYSize", DWORD),
|
||||
("dwXCountChars", DWORD),
|
||||
("dwYCountChars", DWORD),
|
||||
("dwFillAttribute", DWORD),
|
||||
("dwFlags", DWORD),
|
||||
("wShowWindow", WORD),
|
||||
("cbReserved2", WORD),
|
||||
("lpReserved2", LPBYTE),
|
||||
("hStdInput", HANDLE),
|
||||
("hStdOutput", HANDLE),
|
||||
("hStdError", HANDLE)
|
||||
]
|
||||
LPSTARTUPINFO = POINTER(STARTUPINFO)
|
||||
|
||||
STARTF_USESHOWWINDOW = 0x01
|
||||
STARTF_USESIZE = 0x02
|
||||
STARTF_USEPOSITION = 0x04
|
||||
STARTF_USECOUNTCHARS = 0x08
|
||||
STARTF_USEFILLATTRIBUTE = 0x10
|
||||
STARTF_RUNFULLSCREEN = 0x20
|
||||
STARTF_FORCEONFEEDBACK = 0x40
|
||||
STARTF_FORCEOFFFEEDBACK = 0x80
|
||||
STARTF_USESTDHANDLES = 0x100
|
||||
|
||||
# EnvironmentBlock
|
||||
|
||||
class EnvironmentBlock:
|
||||
"""An object which can be passed as the lpEnv parameter of CreateProcess.
|
||||
It is initialized with a dictionary."""
|
||||
|
||||
def __init__(self, dict):
|
||||
if not dict:
|
||||
self._as_parameter_ = None
|
||||
else:
|
||||
values = ["%s=%s" % (key, value)
|
||||
for (key, value) in dict.iteritems()]
|
||||
values.append("")
|
||||
self._as_parameter_ = LPCWSTR("\0".join(values))
|
||||
|
||||
# CreateProcess()
|
||||
|
||||
CreateProcessProto = WINFUNCTYPE(BOOL, # Return type
|
||||
LPCWSTR, # lpApplicationName
|
||||
LPWSTR, # lpCommandLine
|
||||
LPVOID, # lpProcessAttributes
|
||||
LPVOID, # lpThreadAttributes
|
||||
BOOL, # bInheritHandles
|
||||
DWORD, # dwCreationFlags
|
||||
LPVOID, # lpEnvironment
|
||||
LPCWSTR, # lpCurrentDirectory
|
||||
LPSTARTUPINFO, # lpStartupInfo
|
||||
LPPROCESS_INFORMATION # lpProcessInformation
|
||||
)
|
||||
|
||||
CreateProcessFlags = ((1, "lpApplicationName", None),
|
||||
(1, "lpCommandLine"),
|
||||
(1, "lpProcessAttributes", None),
|
||||
(1, "lpThreadAttributes", None),
|
||||
(1, "bInheritHandles", True),
|
||||
(1, "dwCreationFlags", 0),
|
||||
(1, "lpEnvironment", None),
|
||||
(1, "lpCurrentDirectory", None),
|
||||
(1, "lpStartupInfo"),
|
||||
(2, "lpProcessInformation"))
|
||||
|
||||
def ErrCheckCreateProcess(result, func, args):
|
||||
ErrCheckBool(result, func, args)
|
||||
# return a tuple (hProcess, hThread, dwProcessID, dwThreadID)
|
||||
pi = args[9]
|
||||
return AutoHANDLE(pi.hProcess), AutoHANDLE(pi.hThread), pi.dwProcessID, pi.dwThreadID
|
||||
|
||||
CreateProcess = CreateProcessProto(("CreateProcessW", windll.kernel32),
|
||||
CreateProcessFlags)
|
||||
CreateProcess.errcheck = ErrCheckCreateProcess
|
||||
|
||||
CREATE_BREAKAWAY_FROM_JOB = 0x01000000
|
||||
CREATE_DEFAULT_ERROR_MODE = 0x04000000
|
||||
CREATE_NEW_CONSOLE = 0x00000010
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
CREATE_SUSPENDED = 0x00000004
|
||||
CREATE_UNICODE_ENVIRONMENT = 0x00000400
|
||||
DEBUG_ONLY_THIS_PROCESS = 0x00000002
|
||||
DEBUG_PROCESS = 0x00000001
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
|
||||
# CreateJobObject()
|
||||
|
||||
CreateJobObjectProto = WINFUNCTYPE(HANDLE, # Return type
|
||||
LPVOID, # lpJobAttributes
|
||||
LPCWSTR # lpName
|
||||
)
|
||||
|
||||
CreateJobObjectFlags = ((1, "lpJobAttributes", None),
|
||||
(1, "lpName", None))
|
||||
|
||||
CreateJobObject = CreateJobObjectProto(("CreateJobObjectW", windll.kernel32),
|
||||
CreateJobObjectFlags)
|
||||
CreateJobObject.errcheck = ErrCheckHandle
|
||||
|
||||
# AssignProcessToJobObject()
|
||||
|
||||
AssignProcessToJobObjectProto = WINFUNCTYPE(BOOL, # Return type
|
||||
HANDLE, # hJob
|
||||
HANDLE # hProcess
|
||||
)
|
||||
AssignProcessToJobObjectFlags = ((1, "hJob"),
|
||||
(1, "hProcess"))
|
||||
AssignProcessToJobObject = AssignProcessToJobObjectProto(
|
||||
("AssignProcessToJobObject", windll.kernel32),
|
||||
AssignProcessToJobObjectFlags)
|
||||
AssignProcessToJobObject.errcheck = ErrCheckBool
|
||||
|
||||
# ResumeThread()
|
||||
|
||||
def ErrCheckResumeThread(result, func, args):
|
||||
if result == -1:
|
||||
raise WinError()
|
||||
|
||||
return args
|
||||
|
||||
ResumeThreadProto = WINFUNCTYPE(DWORD, # Return type
|
||||
HANDLE # hThread
|
||||
)
|
||||
ResumeThreadFlags = ((1, "hThread"),)
|
||||
ResumeThread = ResumeThreadProto(("ResumeThread", windll.kernel32),
|
||||
ResumeThreadFlags)
|
||||
ResumeThread.errcheck = ErrCheckResumeThread
|
||||
|
||||
# TerminateJobObject()
|
||||
|
||||
TerminateJobObjectProto = WINFUNCTYPE(BOOL, # Return type
|
||||
HANDLE, # hJob
|
||||
UINT # uExitCode
|
||||
)
|
||||
TerminateJobObjectFlags = ((1, "hJob"),
|
||||
(1, "uExitCode", 127))
|
||||
TerminateJobObject = TerminateJobObjectProto(
|
||||
("TerminateJobObject", windll.kernel32),
|
||||
TerminateJobObjectFlags)
|
||||
TerminateJobObject.errcheck = ErrCheckBool
|
||||
|
||||
# WaitForSingleObject()
|
||||
|
||||
WaitForSingleObjectProto = WINFUNCTYPE(DWORD, # Return type
|
||||
HANDLE, # hHandle
|
||||
DWORD, # dwMilliseconds
|
||||
)
|
||||
WaitForSingleObjectFlags = ((1, "hHandle"),
|
||||
(1, "dwMilliseconds", -1))
|
||||
WaitForSingleObject = WaitForSingleObjectProto(
|
||||
("WaitForSingleObject", windll.kernel32),
|
||||
WaitForSingleObjectFlags)
|
||||
|
||||
INFINITE = -1
|
||||
WAIT_TIMEOUT = 0x0102
|
||||
WAIT_OBJECT_0 = 0x0
|
||||
WAIT_ABANDONED = 0x0080
|
||||
|
||||
# GetExitCodeProcess()
|
||||
|
||||
GetExitCodeProcessProto = WINFUNCTYPE(BOOL, # Return type
|
||||
HANDLE, # hProcess
|
||||
LPDWORD, # lpExitCode
|
||||
)
|
||||
GetExitCodeProcessFlags = ((1, "hProcess"),
|
||||
(2, "lpExitCode"))
|
||||
GetExitCodeProcess = GetExitCodeProcessProto(
|
||||
("GetExitCodeProcess", windll.kernel32),
|
||||
GetExitCodeProcessFlags)
|
||||
GetExitCodeProcess.errcheck = ErrCheckBool
|
|
@ -0,0 +1,20 @@
|
|||
"""The namespace for the pavement to run in, also imports default tasks."""
|
||||
|
||||
import warnings
|
||||
|
||||
warnings.warn("""paver.defaults is deprecated. Import from paver.easy instead.
|
||||
Note that you will need to add additional declarations for exactly
|
||||
equivalent behavior. Specifically:
|
||||
|
||||
from paver.easy import *
|
||||
import paver.misctasks
|
||||
from paver import setuputils
|
||||
|
||||
setuputils.install_distutils_tasks()
|
||||
""", DeprecationWarning, 2)
|
||||
|
||||
from paver.easy import *
|
||||
from paver.misctasks import *
|
||||
from paver import setuputils
|
||||
|
||||
setuputils.install_distutils_tasks()
|
|
@ -0,0 +1,106 @@
|
|||
import subprocess
|
||||
import sys
|
||||
|
||||
from paver import tasks
|
||||
from paver.options import Bunch
|
||||
|
||||
def dry(message, func, *args, **kw):
|
||||
"""Wraps a function that performs a destructive operation, so that
|
||||
nothing will happen when a dry run is requested.
|
||||
|
||||
Runs func with the given arguments and keyword arguments. If this
|
||||
is a dry run, print the message rather than running the function."""
|
||||
info(message)
|
||||
if tasks.environment.dry_run:
|
||||
return
|
||||
return func(*args, **kw)
|
||||
|
||||
def error(message, *args):
|
||||
"""Displays an error message to the user."""
|
||||
tasks.environment.error(message, *args)
|
||||
|
||||
def info(message, *args):
|
||||
"""Displays a message to the user. If the quiet option is specified, the
|
||||
message will not be displayed."""
|
||||
tasks.environment.info(message, *args)
|
||||
|
||||
def debug(message, *args):
|
||||
"""Displays a message to the user, but only if the verbose flag is
|
||||
set."""
|
||||
tasks.environment.debug(message, *args)
|
||||
|
||||
def sh(command, capture=False, ignore_error=False, cwd=None):
|
||||
"""Runs an external command. If capture is True, the output of the
|
||||
command will be captured and returned as a string. If the command
|
||||
has a non-zero return code raise a BuildFailure. You can pass
|
||||
ignore_error=True to allow non-zero return codes to be allowed to
|
||||
pass silently, silently into the night. If you pass cwd='some/path'
|
||||
paver will chdir to 'some/path' before exectuting the command.
|
||||
|
||||
If the dry_run option is True, the command will not
|
||||
actually be run."""
|
||||
def runpipe():
|
||||
kwargs = { 'shell': True, 'stderr': subprocess.PIPE, 'cwd': cwd}
|
||||
if capture:
|
||||
kwargs['stdout'] = subprocess.PIPE
|
||||
p = subprocess.Popen(command, **kwargs)
|
||||
p.wait()
|
||||
if p.returncode and not ignore_error:
|
||||
error(p.stderr.read())
|
||||
raise BuildFailure("Subprocess return code: %d" % p.returncode)
|
||||
|
||||
if capture:
|
||||
return p.stdout.read()
|
||||
|
||||
return dry(command, runpipe)
|
||||
|
||||
|
||||
class _SimpleProxy(object):
|
||||
__initialized = False
|
||||
def __init__(self, rootobj, name):
|
||||
self.__rootobj = rootobj
|
||||
self.__name = name
|
||||
self.__initialized = True
|
||||
|
||||
def __get_object(self):
|
||||
return getattr(self.__rootobj, self.__name)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.__get_object(), attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if self.__initialized:
|
||||
setattr(self.__get_object(), attr, value)
|
||||
else:
|
||||
super(_SimpleProxy, self).__setattr__(attr, value)
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
return self.__get_object()(*args, **kw)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.__get_object())
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.__get_object())
|
||||
|
||||
environment = _SimpleProxy(tasks, "environment")
|
||||
options = _SimpleProxy(environment, "options")
|
||||
call_task = _SimpleProxy(environment, "call_task")
|
||||
|
||||
call_pavement = tasks.call_pavement
|
||||
task = tasks.task
|
||||
needs = tasks.needs
|
||||
cmdopts = tasks.cmdopts
|
||||
consume_args = tasks.consume_args
|
||||
no_auto = tasks.no_auto
|
||||
BuildFailure = tasks.BuildFailure
|
||||
PavementError = tasks.PavementError
|
||||
|
||||
# these are down here to avoid circular dependencies. Ideally, nothing would
|
||||
# be using paver.easy other than pavements.
|
||||
if sys.version_info > (2,5):
|
||||
from paver.path25 import path, pushd
|
||||
else:
|
||||
from paver.path import path
|
||||
|
||||
import paver.misctasks
|
|
@ -0,0 +1,67 @@
|
|||
"""Miscellaneous tasks that don't fit into one of the other groupings."""
|
||||
import os
|
||||
|
||||
from paver.easy import dry, path, task
|
||||
|
||||
_docsdir = os.path.join(os.path.dirname(__file__), "docs")
|
||||
if os.path.exists(_docsdir):
|
||||
@task
|
||||
def paverdocs():
|
||||
"""Open your web browser and display Paver's documentation."""
|
||||
import webbrowser
|
||||
webbrowser.open("file://"
|
||||
+ (os.path.join(os.path.abspath(_docsdir), 'index.html')))
|
||||
|
||||
@task
|
||||
def minilib(options):
|
||||
"""Create a Paver mini library that contains enough for a simple
|
||||
pavement.py to be installed using a generated setup.py. This
|
||||
is a good temporary measure until more people have deployed paver.
|
||||
The output file is 'paver-minilib.zip' in the current directory.
|
||||
|
||||
Options:
|
||||
|
||||
extra_files
|
||||
list of other paver modules to include (don't include the .py
|
||||
extension). By default, the following modules are included:
|
||||
defaults, path, release, setuputils, misctasks, options,
|
||||
tasks, easy
|
||||
"""
|
||||
import paver
|
||||
paverdir = path(paver.__file__).dirname()
|
||||
filelist = ['__init__', 'defaults', 'path', 'path25', 'release',
|
||||
'setuputils', "misctasks", "options", "tasks", "easy"]
|
||||
filelist.extend(options.get('extra_files', []))
|
||||
output_file = 'paver-minilib.zip'
|
||||
|
||||
def generate_zip():
|
||||
import zipfile
|
||||
destfile = zipfile.ZipFile(output_file, "w", zipfile.ZIP_DEFLATED)
|
||||
for filename in filelist:
|
||||
destfile.write(
|
||||
paverdir / (filename + ".py"),
|
||||
"paver/" + (filename + ".py"))
|
||||
destfile.close()
|
||||
dry("Generate %s" % output_file, generate_zip)
|
||||
|
||||
@task
|
||||
def generate_setup():
|
||||
"""Generates a setup.py file that uses paver behind the scenes. This
|
||||
setup.py file will look in the directory that the user is running it
|
||||
in for a paver-minilib.zip and will add that to sys.path if available.
|
||||
Otherwise, it will just assume that paver is available."""
|
||||
from paver.easy import dry
|
||||
def write_setup():
|
||||
setup = open("setup.py", "w")
|
||||
setup.write("""import os
|
||||
if os.path.exists("paver-minilib.zip"):
|
||||
import sys
|
||||
sys.path.insert(0, "paver-minilib.zip")
|
||||
|
||||
import paver.tasks
|
||||
paver.tasks.main()
|
||||
""")
|
||||
setup.close()
|
||||
|
||||
dry("Write setup.py", write_setup)
|
||||
|
|
@ -0,0 +1,171 @@
|
|||
class OptionsError(Exception):
|
||||
pass
|
||||
|
||||
class Bunch(dict):
|
||||
"""A dictionary that provides attribute-style access."""
|
||||
|
||||
def __repr__(self):
|
||||
keys = self.keys()
|
||||
keys.sort()
|
||||
args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
|
||||
return '%s(%s)' % (self.__class__.__name__, args)
|
||||
|
||||
def __getitem__(self, key):
|
||||
item = dict.__getitem__(self, key)
|
||||
if callable(item):
|
||||
return item()
|
||||
return item
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
|
||||
__setattr__ = dict.__setitem__
|
||||
|
||||
def __delattr__(self, name):
|
||||
try:
|
||||
del self[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
|
||||
class Namespace(Bunch):
|
||||
"""A Bunch that will search dictionaries contained within to find a value.
|
||||
The search order is set via the order() method. See the order method for
|
||||
more information about search order.
|
||||
"""
|
||||
def __init__(self, d=None, **kw):
|
||||
self._sections = []
|
||||
self._ordering = None
|
||||
self.update(d, **kw)
|
||||
|
||||
def order(self, *keys, **kw):
|
||||
"""Set the search order for this namespace. The arguments
|
||||
should be the list of keys in the order you wish to search,
|
||||
or a dictionary/Bunch that you want to search.
|
||||
Keys that are left out will not be searched. If you pass in
|
||||
no arguments, then the default ordering will be used. (The default
|
||||
is to search the global space first, then in the order in
|
||||
which the sections were created.)
|
||||
|
||||
If you pass in a key name that is not a section, that
|
||||
key will be silently removed from the list.
|
||||
|
||||
Keyword arguments are:
|
||||
|
||||
add_rest=False
|
||||
put the sections you list at the front of the search
|
||||
and add the remaining sections to the end
|
||||
"""
|
||||
if not keys:
|
||||
self._ordering = None
|
||||
return
|
||||
|
||||
order = []
|
||||
for item in keys:
|
||||
if isinstance(item, dict) or item in self._sections:
|
||||
order.append(item)
|
||||
|
||||
if kw.get('add_rest'):
|
||||
# this is not efficient. do we care? probably not.
|
||||
for item in self._sections:
|
||||
if item not in order:
|
||||
order.append(item)
|
||||
self._ordering = order
|
||||
|
||||
def clear(self):
|
||||
self._ordering = None
|
||||
self._sections = []
|
||||
super(Namespace, self).clear()
|
||||
|
||||
def setdotted(self, key, value):
|
||||
"""Sets a namespace key, value pair where the key
|
||||
can use dotted notation to set sub-values. For example,
|
||||
the key "foo.bar" will set the "bar" value in the "foo"
|
||||
Bunch in this Namespace. If foo does not exist, it is created
|
||||
as a Bunch. If foo is a value, an OptionsError will be
|
||||
raised."""
|
||||
segments = key.split(".")
|
||||
obj = self
|
||||
segment = segments.pop(0)
|
||||
while segments:
|
||||
if segment not in obj:
|
||||
obj[segment] = Bunch()
|
||||
obj = obj[segment]
|
||||
if not isinstance(obj, dict):
|
||||
raise OptionsError("In setting option '%s', %s was already a value"
|
||||
% (key, segment))
|
||||
segment = segments.pop(0)
|
||||
obj[segment] = value
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if isinstance(value, dict):
|
||||
self._sections.insert(0, key)
|
||||
super(Namespace, self).__setitem__(key, value)
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
order = self._ordering
|
||||
if order is None:
|
||||
order = self._sections
|
||||
try:
|
||||
return super(Namespace, self).__getitem__(key)
|
||||
except KeyError:
|
||||
pass
|
||||
for section in order:
|
||||
if isinstance(section, dict):
|
||||
try:
|
||||
return section[key]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
return self[section][key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise KeyError("Key %s not found in namespace" % key)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key.startswith("_"):
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
self[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
try:
|
||||
index = self._sections.index(key)
|
||||
del self._sections[index]
|
||||
except ValueError:
|
||||
pass
|
||||
super(Namespace, self).__delitem__(key)
|
||||
|
||||
def update(self, d=None, **kw):
|
||||
"""Update the namespace. This is less efficient than the standard
|
||||
dict.update but is necessary to keep track of the sections that we'll be
|
||||
searching."""
|
||||
items = []
|
||||
if d:
|
||||
# look up keys even though we call items
|
||||
# because that's what the dict.update
|
||||
# doc says
|
||||
if hasattr(d, 'keys'):
|
||||
items.extend(list(d.items()))
|
||||
else:
|
||||
items.extend(list(d))
|
||||
items.extend(list(kw.items()))
|
||||
for key, value in items:
|
||||
self[key] = value
|
||||
|
||||
__call__ = update
|
||||
|
||||
def setdefault(self, key, default):
|
||||
if not key in self:
|
||||
self[key] = default
|
||||
return default
|
||||
return self[key]
|
|
@ -0,0 +1,999 @@
|
|||
""" path.py - An object representing a path to a file or directory.
|
||||
|
||||
Example::
|
||||
|
||||
from path import path
|
||||
d = path('/home/guido/bin')
|
||||
for f in d.files('*.py'):
|
||||
f.chmod(0755)
|
||||
|
||||
This module requires Python 2.2 or later.
|
||||
|
||||
|
||||
:URL: http://www.jorendorff.com/articles/python/path
|
||||
:Author: Jason Orendorff <jason.orendorff\x40gmail\x2ecom> (and others - see the url!)
|
||||
:Date: 9 Mar 2007
|
||||
|
||||
This has been modified from the original to avoid dry run issues.
|
||||
"""
|
||||
|
||||
|
||||
# TODO
|
||||
# - Tree-walking functions don't avoid symlink loops. Matt Harrison
|
||||
# sent me a patch for this.
|
||||
# - Bug in write_text(). It doesn't support Universal newline mode.
|
||||
# - Better error message in listdir() when self isn't a
|
||||
# directory. (On Windows, the error message really sucks.)
|
||||
# - Make sure everything has a good docstring.
|
||||
# - Add methods for regex find and replace.
|
||||
# - guess_content_type() method?
|
||||
# - Perhaps support arguments to touch().
|
||||
|
||||
import sys, warnings, os, fnmatch, glob, shutil, codecs
|
||||
|
||||
try:
|
||||
from hashlib import md5
|
||||
except ImportError:
|
||||
# compatibility for versions before 2.5
|
||||
import md5
|
||||
md5 = md5.new
|
||||
|
||||
__version__ = '2.2'
|
||||
__all__ = ['path']
|
||||
|
||||
# Platform-specific support for path.owner
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
import win32security
|
||||
except ImportError:
|
||||
win32security = None
|
||||
else:
|
||||
try:
|
||||
import pwd
|
||||
except ImportError:
|
||||
pwd = None
|
||||
|
||||
# Pre-2.3 support. Are unicode filenames supported?
|
||||
_base = str
|
||||
_getcwd = os.getcwd
|
||||
try:
|
||||
if os.path.supports_unicode_filenames:
|
||||
_base = unicode
|
||||
_getcwd = os.getcwdu
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Pre-2.3 workaround for booleans
|
||||
try:
|
||||
True, False
|
||||
except NameError:
|
||||
True, False = 1, 0
|
||||
|
||||
# Pre-2.3 workaround for basestring.
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
basestring = (str, unicode)
|
||||
|
||||
# Universal newline support
|
||||
_textmode = 'r'
|
||||
if hasattr(file, 'newlines'):
|
||||
_textmode = 'U'
|
||||
|
||||
|
||||
class TreeWalkWarning(Warning):
|
||||
pass
|
||||
|
||||
class path(_base):
|
||||
""" Represents a filesystem path.
|
||||
|
||||
For documentation on individual methods, consult their
|
||||
counterparts in os.path.
|
||||
"""
|
||||
|
||||
# --- Special Python methods.
|
||||
|
||||
def __repr__(self):
|
||||
return 'path(%s)' % _base.__repr__(self)
|
||||
|
||||
# Adding a path and a string yields a path.
|
||||
def __add__(self, more):
|
||||
try:
|
||||
resultStr = _base.__add__(self, more)
|
||||
except TypeError: #Python bug
|
||||
resultStr = NotImplemented
|
||||
if resultStr is NotImplemented:
|
||||
return resultStr
|
||||
return self.__class__(resultStr)
|
||||
|
||||
def __radd__(self, other):
|
||||
if isinstance(other, basestring):
|
||||
return self.__class__(other.__add__(self))
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
# The / operator joins paths.
|
||||
def __div__(self, rel):
|
||||
""" fp.__div__(rel) == fp / rel == fp.joinpath(rel)
|
||||
|
||||
Join two path components, adding a separator character if
|
||||
needed.
|
||||
"""
|
||||
return self.__class__(os.path.join(self, rel))
|
||||
|
||||
# Make the / operator work even when true division is enabled.
|
||||
__truediv__ = __div__
|
||||
|
||||
def getcwd(cls):
|
||||
""" Return the current working directory as a path object. """
|
||||
return cls(_getcwd())
|
||||
getcwd = classmethod(getcwd)
|
||||
|
||||
def chdir(self):
|
||||
"""Change current directory."""
|
||||
os.chdir(self)
|
||||
|
||||
|
||||
# --- Operations on path strings.
|
||||
|
||||
isabs = os.path.isabs
|
||||
def abspath(self): return self.__class__(os.path.abspath(self))
|
||||
def normcase(self): return self.__class__(os.path.normcase(self))
|
||||
def normpath(self): return self.__class__(os.path.normpath(self))
|
||||
def realpath(self): return self.__class__(os.path.realpath(self))
|
||||
def expanduser(self): return self.__class__(os.path.expanduser(self))
|
||||
def expandvars(self): return self.__class__(os.path.expandvars(self))
|
||||
def dirname(self): return self.__class__(os.path.dirname(self))
|
||||
basename = os.path.basename
|
||||
|
||||
def expand(self):
|
||||
""" Clean up a filename by calling expandvars(),
|
||||
expanduser(), and normpath() on it.
|
||||
|
||||
This is commonly everything needed to clean up a filename
|
||||
read from a configuration file, for example.
|
||||
"""
|
||||
return self.expandvars().expanduser().normpath()
|
||||
|
||||
def _get_namebase(self):
|
||||
base, ext = os.path.splitext(self.name)
|
||||
return base
|
||||
|
||||
def _get_ext(self):
|
||||
f, ext = os.path.splitext(_base(self))
|
||||
return ext
|
||||
|
||||
def _get_drive(self):
|
||||
drive, r = os.path.splitdrive(self)
|
||||
return self.__class__(drive)
|
||||
|
||||
parent = property(
|
||||
dirname, None, None,
|
||||
""" This path's parent directory, as a new path object.
|
||||
|
||||
For example, path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib')
|
||||
""")
|
||||
|
||||
name = property(
|
||||
basename, None, None,
|
||||
""" The name of this file or directory without the full path.
|
||||
|
||||
For example, path('/usr/local/lib/libpython.so').name == 'libpython.so'
|
||||
""")
|
||||
|
||||
namebase = property(
|
||||
_get_namebase, None, None,
|
||||
""" The same as path.name, but with one file extension stripped off.
|
||||
|
||||
For example, path('/home/guido/python.tar.gz').name == 'python.tar.gz',
|
||||
but path('/home/guido/python.tar.gz').namebase == 'python.tar'
|
||||
""")
|
||||
|
||||
ext = property(
|
||||
_get_ext, None, None,
|
||||
""" The file extension, for example '.py'. """)
|
||||
|
||||
drive = property(
|
||||
_get_drive, None, None,
|
||||
""" The drive specifier, for example 'C:'.
|
||||
This is always empty on systems that don't use drive specifiers.
|
||||
""")
|
||||
|
||||
def splitpath(self):
|
||||
""" p.splitpath() -> Return (p.parent, p.name). """
|
||||
parent, child = os.path.split(self)
|
||||
return self.__class__(parent), child
|
||||
|
||||
def splitdrive(self):
|
||||
""" p.splitdrive() -> Return (p.drive, <the rest of p>).
|
||||
|
||||
Split the drive specifier from this path. If there is
|
||||
no drive specifier, p.drive is empty, so the return value
|
||||
is simply (path(''), p). This is always the case on Unix.
|
||||
"""
|
||||
drive, rel = os.path.splitdrive(self)
|
||||
return self.__class__(drive), rel
|
||||
|
||||
def splitext(self):
|
||||
""" p.splitext() -> Return (p.stripext(), p.ext).
|
||||
|
||||
Split the filename extension from this path and return
|
||||
the two parts. Either part may be empty.
|
||||
|
||||
The extension is everything from '.' to the end of the
|
||||
last path segment. This has the property that if
|
||||
(a, b) == p.splitext(), then a + b == p.
|
||||
"""
|
||||
filename, ext = os.path.splitext(self)
|
||||
return self.__class__(filename), ext
|
||||
|
||||
def stripext(self):
|
||||
""" p.stripext() -> Remove one file extension from the path.
|
||||
|
||||
For example, path('/home/guido/python.tar.gz').stripext()
|
||||
returns path('/home/guido/python.tar').
|
||||
"""
|
||||
return self.splitext()[0]
|
||||
|
||||
if hasattr(os.path, 'splitunc'):
|
||||
def splitunc(self):
|
||||
unc, rest = os.path.splitunc(self)
|
||||
return self.__class__(unc), rest
|
||||
|
||||
def _get_uncshare(self):
|
||||
unc, r = os.path.splitunc(self)
|
||||
return self.__class__(unc)
|
||||
|
||||
uncshare = property(
|
||||
_get_uncshare, None, None,
|
||||
""" The UNC mount point for this path.
|
||||
This is empty for paths on local drives. """)
|
||||
|
||||
def joinpath(self, *args):
|
||||
""" Join two or more path components, adding a separator
|
||||
character (os.sep) if needed. Returns a new path
|
||||
object.
|
||||
"""
|
||||
return self.__class__(os.path.join(self, *args))
|
||||
|
||||
def splitall(self):
|
||||
r""" Return a list of the path components in this path.
|
||||
|
||||
The first item in the list will be a path. Its value will be
|
||||
either os.curdir, os.pardir, empty, or the root directory of
|
||||
this path (for example, '/' or 'C:\\'). The other items in
|
||||
the list will be strings.
|
||||
|
||||
``path.path.joinpath(*result)`` will yield the original path.
|
||||
"""
|
||||
parts = []
|
||||
loc = self
|
||||
while loc != os.curdir and loc != os.pardir:
|
||||
prev = loc
|
||||
loc, child = prev.splitpath()
|
||||
if loc == prev:
|
||||
break
|
||||
parts.append(child)
|
||||
parts.append(loc)
|
||||
parts.reverse()
|
||||
return parts
|
||||
|
||||
def relpath(self):
|
||||
""" Return this path as a relative path,
|
||||
based from the current working directory.
|
||||
"""
|
||||
cwd = self.__class__(os.getcwd())
|
||||
return cwd.relpathto(self)
|
||||
|
||||
def relpathto(self, dest):
|
||||
""" Return a relative path from self to dest.
|
||||
|
||||
If there is no relative path from self to dest, for example if
|
||||
they reside on different drives in Windows, then this returns
|
||||
dest.abspath().
|
||||
"""
|
||||
origin = self.abspath()
|
||||
dest = self.__class__(dest).abspath()
|
||||
|
||||
orig_list = origin.normcase().splitall()
|
||||
# Don't normcase dest! We want to preserve the case.
|
||||
dest_list = dest.splitall()
|
||||
|
||||
if orig_list[0] != os.path.normcase(dest_list[0]):
|
||||
# Can't get here from there.
|
||||
return dest
|
||||
|
||||
# Find the location where the two paths start to differ.
|
||||
i = 0
|
||||
for start_seg, dest_seg in zip(orig_list, dest_list):
|
||||
if start_seg != os.path.normcase(dest_seg):
|
||||
break
|
||||
i += 1
|
||||
|
||||
# Now i is the point where the two paths diverge.
|
||||
# Need a certain number of "os.pardir"s to work up
|
||||
# from the origin to the point of divergence.
|
||||
segments = [os.pardir] * (len(orig_list) - i)
|
||||
# Need to add the diverging part of dest_list.
|
||||
segments += dest_list[i:]
|
||||
if len(segments) == 0:
|
||||
# If they happen to be identical, use os.curdir.
|
||||
relpath = os.curdir
|
||||
else:
|
||||
relpath = os.path.join(*segments)
|
||||
return self.__class__(relpath)
|
||||
|
||||
# --- Listing, searching, walking, and matching
|
||||
|
||||
def listdir(self, pattern=None):
|
||||
""" D.listdir() -> List of items in this directory.
|
||||
|
||||
Use D.files() or D.dirs() instead if you want a listing
|
||||
of just files or just subdirectories.
|
||||
|
||||
The elements of the list are path objects.
|
||||
|
||||
With the optional 'pattern' argument, this only lists
|
||||
items whose names match the given pattern.
|
||||
"""
|
||||
names = os.listdir(self)
|
||||
if pattern is not None:
|
||||
names = fnmatch.filter(names, pattern)
|
||||
return [self / child for child in names]
|
||||
|
||||
def dirs(self, pattern=None):
|
||||
""" D.dirs() -> List of this directory's subdirectories.
|
||||
|
||||
The elements of the list are path objects.
|
||||
This does not walk recursively into subdirectories
|
||||
(but see path.walkdirs).
|
||||
|
||||
With the optional 'pattern' argument, this only lists
|
||||
directories whose names match the given pattern. For
|
||||
example::
|
||||
d.dirs('build-*')
|
||||
"""
|
||||
return [p for p in self.listdir(pattern) if p.isdir()]
|
||||
|
||||
def files(self, pattern=None):
|
||||
""" D.files() -> List of the files in this directory.
|
||||
|
||||
The elements of the list are path objects.
|
||||
This does not walk into subdirectories (see path.walkfiles).
|
||||
|
||||
With the optional 'pattern' argument, this only lists files
|
||||
whose names match the given pattern. For example::
|
||||
d.files('*.pyc')
|
||||
"""
|
||||
|
||||
return [p for p in self.listdir(pattern) if p.isfile()]
|
||||
|
||||
def walk(self, pattern=None, errors='strict'):
|
||||
""" D.walk() -> iterator over files and subdirs, recursively.
|
||||
|
||||
The iterator yields path objects naming each child item of
|
||||
this directory and its descendants. This requires that
|
||||
D.isdir().
|
||||
|
||||
This performs a depth-first traversal of the directory tree.
|
||||
Each directory is returned just before all its children.
|
||||
|
||||
The errors= keyword argument controls behavior when an
|
||||
error occurs. The default is 'strict', which causes an
|
||||
exception. The other allowed values are 'warn', which
|
||||
reports the error via warnings.warn(), and 'ignore'.
|
||||
"""
|
||||
if errors not in ('strict', 'warn', 'ignore'):
|
||||
raise ValueError("invalid errors parameter")
|
||||
|
||||
try:
|
||||
childList = self.listdir()
|
||||
except Exception:
|
||||
if errors == 'ignore':
|
||||
return
|
||||
elif errors == 'warn':
|
||||
warnings.warn(
|
||||
"Unable to list directory '%s': %s"
|
||||
% (self, sys.exc_info()[1]),
|
||||
TreeWalkWarning)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
for child in childList:
|
||||
if pattern is None or child.fnmatch(pattern):
|
||||
yield child
|
||||
try:
|
||||
isdir = child.isdir()
|
||||
except Exception:
|
||||
if errors == 'ignore':
|
||||
isdir = False
|
||||
elif errors == 'warn':
|
||||
warnings.warn(
|
||||
"Unable to access '%s': %s"
|
||||
% (child, sys.exc_info()[1]),
|
||||
TreeWalkWarning)
|
||||
isdir = False
|
||||
else:
|
||||
raise
|
||||
|
||||
if isdir:
|
||||
for item in child.walk(pattern, errors):
|
||||
yield item
|
||||
|
||||
def walkdirs(self, pattern=None, errors='strict'):
|
||||
""" D.walkdirs() -> iterator over subdirs, recursively.
|
||||
|
||||
With the optional 'pattern' argument, this yields only
|
||||
directories whose names match the given pattern. For
|
||||
example, ``mydir.walkdirs('*test')`` yields only directories
|
||||
with names ending in 'test'.
|
||||
|
||||
The errors= keyword argument controls behavior when an
|
||||
error occurs. The default is 'strict', which causes an
|
||||
exception. The other allowed values are 'warn', which
|
||||
reports the error via warnings.warn(), and 'ignore'.
|
||||
"""
|
||||
if errors not in ('strict', 'warn', 'ignore'):
|
||||
raise ValueError("invalid errors parameter")
|
||||
|
||||
try:
|
||||
dirs = self.dirs()
|
||||
except Exception:
|
||||
if errors == 'ignore':
|
||||
return
|
||||
elif errors == 'warn':
|
||||
warnings.warn(
|
||||
"Unable to list directory '%s': %s"
|
||||
% (self, sys.exc_info()[1]),
|
||||
TreeWalkWarning)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
for child in dirs:
|
||||
if pattern is None or child.fnmatch(pattern):
|
||||
yield child
|
||||
for subsubdir in child.walkdirs(pattern, errors):
|
||||
yield subsubdir
|
||||
|
||||
def walkfiles(self, pattern=None, errors='strict'):
|
||||
""" D.walkfiles() -> iterator over files in D, recursively.
|
||||
|
||||
The optional argument, pattern, limits the results to files
|
||||
with names that match the pattern. For example,
|
||||
``mydir.walkfiles('*.tmp')`` yields only files with the .tmp
|
||||
extension.
|
||||
"""
|
||||
if errors not in ('strict', 'warn', 'ignore'):
|
||||
raise ValueError("invalid errors parameter")
|
||||
|
||||
try:
|
||||
childList = self.listdir()
|
||||
except Exception:
|
||||
if errors == 'ignore':
|
||||
return
|
||||
elif errors == 'warn':
|
||||
warnings.warn(
|
||||
"Unable to list directory '%s': %s"
|
||||
% (self, sys.exc_info()[1]),
|
||||
TreeWalkWarning)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
for child in childList:
|
||||
try:
|
||||
isfile = child.isfile()
|
||||
isdir = not isfile and child.isdir()
|
||||
except:
|
||||
if errors == 'ignore':
|
||||
continue
|
||||
elif errors == 'warn':
|
||||
warnings.warn(
|
||||
"Unable to access '%s': %s"
|
||||
% (self, sys.exc_info()[1]),
|
||||
TreeWalkWarning)
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
|
||||
if isfile:
|
||||
if pattern is None or child.fnmatch(pattern):
|
||||
yield child
|
||||
elif isdir:
|
||||
for f in child.walkfiles(pattern, errors):
|
||||
yield f
|
||||
|
||||
def fnmatch(self, pattern):
|
||||
""" Return True if self.name matches the given pattern.
|
||||
|
||||
pattern - A filename pattern with wildcards,
|
||||
for example ``'*.py'``.
|
||||
"""
|
||||
return fnmatch.fnmatch(self.name, pattern)
|
||||
|
||||
def glob(self, pattern):
|
||||
""" Return a list of path objects that match the pattern.
|
||||
|
||||
pattern - a path relative to this directory, with wildcards.
|
||||
|
||||
For example, path('/users').glob('*/bin/*') returns a list
|
||||
of all the files users have in their bin directories.
|
||||
"""
|
||||
cls = self.__class__
|
||||
return [cls(s) for s in glob.glob(_base(self / pattern))]
|
||||
|
||||
|
||||
# --- Reading or writing an entire file at once.
|
||||
|
||||
# TODO: file writing should not occur during dry runs XXX
|
||||
def open(self, mode='r'):
|
||||
""" Open this file. Return a file object. """
|
||||
return file(self, mode)
|
||||
|
||||
def bytes(self):
|
||||
""" Open this file, read all bytes, return them as a string. """
|
||||
f = self.open('rb')
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def write_bytes(self, bytes, append=False):
|
||||
""" Open this file and write the given bytes to it.
|
||||
|
||||
Default behavior is to overwrite any existing file.
|
||||
Call p.write_bytes(bytes, append=True) to append instead.
|
||||
"""
|
||||
if append:
|
||||
mode = 'ab'
|
||||
else:
|
||||
mode = 'wb'
|
||||
f = self.open(mode)
|
||||
try:
|
||||
f.write(bytes)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def text(self, encoding=None, errors='strict'):
|
||||
r""" Open this file, read it in, return the content as a string.
|
||||
|
||||
This uses 'U' mode in Python 2.3 and later, so '\r\n' and '\r'
|
||||
are automatically translated to '\n'.
|
||||
|
||||
Optional arguments:
|
||||
|
||||
encoding - The Unicode encoding (or character set) of
|
||||
the file. If present, the content of the file is
|
||||
decoded and returned as a unicode object; otherwise
|
||||
it is returned as an 8-bit str.
|
||||
errors - How to handle Unicode errors; see help(str.decode)
|
||||
for the options. Default is 'strict'.
|
||||
"""
|
||||
if encoding is None:
|
||||
# 8-bit
|
||||
f = self.open(_textmode)
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
else:
|
||||
# Unicode
|
||||
f = codecs.open(self, 'r', encoding, errors)
|
||||
# (Note - Can't use 'U' mode here, since codecs.open
|
||||
# doesn't support 'U' mode, even in Python 2.3.)
|
||||
try:
|
||||
t = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
return (t.replace(u'\r\n', u'\n')
|
||||
.replace(u'\r\x85', u'\n')
|
||||
.replace(u'\r', u'\n')
|
||||
.replace(u'\x85', u'\n')
|
||||
.replace(u'\u2028', u'\n'))
|
||||
|
||||
def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False):
|
||||
r""" Write the given text to this file.
|
||||
|
||||
The default behavior is to overwrite any existing file;
|
||||
to append instead, use the 'append=True' keyword argument.
|
||||
|
||||
There are two differences between path.write_text() and
|
||||
path.write_bytes(): newline handling and Unicode handling.
|
||||
See below.
|
||||
|
||||
Parameters:
|
||||
|
||||
- text - str/unicode - The text to be written.
|
||||
|
||||
- encoding - str - The Unicode encoding that will be used.
|
||||
This is ignored if 'text' isn't a Unicode string.
|
||||
|
||||
- errors - str - How to handle Unicode encoding errors.
|
||||
Default is 'strict'. See help(unicode.encode) for the
|
||||
options. This is ignored if 'text' isn't a Unicode
|
||||
string.
|
||||
|
||||
- linesep - keyword argument - str/unicode - The sequence of
|
||||
characters to be used to mark end-of-line. The default is
|
||||
os.linesep. You can also specify None; this means to
|
||||
leave all newlines as they are in 'text'.
|
||||
|
||||
- append - keyword argument - bool - Specifies what to do if
|
||||
the file already exists (True: append to the end of it;
|
||||
False: overwrite it.) The default is False.
|
||||
|
||||
|
||||
--- Newline handling.
|
||||
|
||||
write_text() converts all standard end-of-line sequences
|
||||
('\n', '\r', and '\r\n') to your platform's default end-of-line
|
||||
sequence (see os.linesep; on Windows, for example, the
|
||||
end-of-line marker is '\r\n').
|
||||
|
||||
If you don't like your platform's default, you can override it
|
||||
using the 'linesep=' keyword argument. If you specifically want
|
||||
write_text() to preserve the newlines as-is, use 'linesep=None'.
|
||||
|
||||
This applies to Unicode text the same as to 8-bit text, except
|
||||
there are three additional standard Unicode end-of-line sequences:
|
||||
u'\x85', u'\r\x85', and u'\u2028'.
|
||||
|
||||
(This is slightly different from when you open a file for
|
||||
writing with fopen(filename, "w") in C or file(filename, 'w')
|
||||
in Python.)
|
||||
|
||||
|
||||
--- Unicode
|
||||
|
||||
If 'text' isn't Unicode, then apart from newline handling, the
|
||||
bytes are written verbatim to the file. The 'encoding' and
|
||||
'errors' arguments are not used and must be omitted.
|
||||
|
||||
If 'text' is Unicode, it is first converted to bytes using the
|
||||
specified 'encoding' (or the default encoding if 'encoding'
|
||||
isn't specified). The 'errors' argument applies only to this
|
||||
conversion.
|
||||
|
||||
"""
|
||||
if isinstance(text, unicode):
|
||||
if linesep is not None:
|
||||
# Convert all standard end-of-line sequences to
|
||||
# ordinary newline characters.
|
||||
text = (text.replace(u'\r\n', u'\n')
|
||||
.replace(u'\r\x85', u'\n')
|
||||
.replace(u'\r', u'\n')
|
||||
.replace(u'\x85', u'\n')
|
||||
.replace(u'\u2028', u'\n'))
|
||||
text = text.replace(u'\n', linesep)
|
||||
if encoding is None:
|
||||
encoding = sys.getdefaultencoding()
|
||||
bytes = text.encode(encoding, errors)
|
||||
else:
|
||||
# It is an error to specify an encoding if 'text' is
|
||||
# an 8-bit string.
|
||||
assert encoding is None
|
||||
|
||||
if linesep is not None:
|
||||
text = (text.replace('\r\n', '\n')
|
||||
.replace('\r', '\n'))
|
||||
bytes = text.replace('\n', linesep)
|
||||
|
||||
self.write_bytes(bytes, append)
|
||||
|
||||
def lines(self, encoding=None, errors='strict', retain=True):
|
||||
r""" Open this file, read all lines, return them in a list.
|
||||
|
||||
Optional arguments:
|
||||
encoding - The Unicode encoding (or character set) of
|
||||
the file. The default is None, meaning the content
|
||||
of the file is read as 8-bit characters and returned
|
||||
as a list of (non-Unicode) str objects.
|
||||
errors - How to handle Unicode errors; see help(str.decode)
|
||||
for the options. Default is 'strict'
|
||||
retain - If true, retain newline characters; but all newline
|
||||
character combinations ('\r', '\n', '\r\n') are
|
||||
translated to '\n'. If false, newline characters are
|
||||
stripped off. Default is True.
|
||||
|
||||
This uses 'U' mode in Python 2.3 and later.
|
||||
"""
|
||||
if encoding is None and retain:
|
||||
f = self.open(_textmode)
|
||||
try:
|
||||
return f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
else:
|
||||
return self.text(encoding, errors).splitlines(retain)
|
||||
|
||||
def write_lines(self, lines, encoding=None, errors='strict',
|
||||
linesep=os.linesep, append=False):
|
||||
r""" Write the given lines of text to this file.
|
||||
|
||||
By default this overwrites any existing file at this path.
|
||||
|
||||
This puts a platform-specific newline sequence on every line.
|
||||
See 'linesep' below.
|
||||
|
||||
lines - A list of strings.
|
||||
|
||||
encoding - A Unicode encoding to use. This applies only if
|
||||
'lines' contains any Unicode strings.
|
||||
|
||||
errors - How to handle errors in Unicode encoding. This
|
||||
also applies only to Unicode strings.
|
||||
|
||||
linesep - The desired line-ending. This line-ending is
|
||||
applied to every line. If a line already has any
|
||||
standard line ending ('\r', '\n', '\r\n', u'\x85',
|
||||
u'\r\x85', u'\u2028'), that will be stripped off and
|
||||
this will be used instead. The default is os.linesep,
|
||||
which is platform-dependent ('\r\n' on Windows, '\n' on
|
||||
Unix, etc.) Specify None to write the lines as-is,
|
||||
like file.writelines().
|
||||
|
||||
Use the keyword argument append=True to append lines to the
|
||||
file. The default is to overwrite the file. Warning:
|
||||
When you use this with Unicode data, if the encoding of the
|
||||
existing data in the file is different from the encoding
|
||||
you specify with the encoding= parameter, the result is
|
||||
mixed-encoding data, which can really confuse someone trying
|
||||
to read the file later.
|
||||
"""
|
||||
if append:
|
||||
mode = 'ab'
|
||||
else:
|
||||
mode = 'wb'
|
||||
f = self.open(mode)
|
||||
try:
|
||||
for line in lines:
|
||||
isUnicode = isinstance(line, unicode)
|
||||
if linesep is not None:
|
||||
# Strip off any existing line-end and add the
|
||||
# specified linesep string.
|
||||
if isUnicode:
|
||||
if line[-2:] in (u'\r\n', u'\x0d\x85'):
|
||||
line = line[:-2]
|
||||
elif line[-1:] in (u'\r', u'\n',
|
||||
u'\x85', u'\u2028'):
|
||||
line = line[:-1]
|
||||
else:
|
||||
if line[-2:] == '\r\n':
|
||||
line = line[:-2]
|
||||
elif line[-1:] in ('\r', '\n'):
|
||||
line = line[:-1]
|
||||
line += linesep
|
||||
if isUnicode:
|
||||
if encoding is None:
|
||||
encoding = sys.getdefaultencoding()
|
||||
line = line.encode(encoding, errors)
|
||||
f.write(line)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def read_md5(self):
|
||||
""" Calculate the md5 hash for this file.
|
||||
|
||||
This reads through the entire file.
|
||||
"""
|
||||
f = self.open('rb')
|
||||
try:
|
||||
m = md5()
|
||||
while True:
|
||||
d = f.read(8192)
|
||||
if not d:
|
||||
break
|
||||
m.update(d)
|
||||
finally:
|
||||
f.close()
|
||||
return m.digest()
|
||||
|
||||
# --- Methods for querying the filesystem.
|
||||
|
||||
exists = os.path.exists
|
||||
isdir = os.path.isdir
|
||||
isfile = os.path.isfile
|
||||
islink = os.path.islink
|
||||
ismount = os.path.ismount
|
||||
|
||||
if hasattr(os.path, 'samefile'):
|
||||
samefile = os.path.samefile
|
||||
|
||||
getatime = os.path.getatime
|
||||
atime = property(
|
||||
getatime, None, None,
|
||||
""" Last access time of the file. """)
|
||||
|
||||
getmtime = os.path.getmtime
|
||||
mtime = property(
|
||||
getmtime, None, None,
|
||||
""" Last-modified time of the file. """)
|
||||
|
||||
if hasattr(os.path, 'getctime'):
|
||||
getctime = os.path.getctime
|
||||
ctime = property(
|
||||
getctime, None, None,
|
||||
""" Creation time of the file. """)
|
||||
|
||||
getsize = os.path.getsize
|
||||
size = property(
|
||||
getsize, None, None,
|
||||
""" Size of the file, in bytes. """)
|
||||
|
||||
if hasattr(os, 'access'):
|
||||
def access(self, mode):
|
||||
""" Return true if current user has access to this path.
|
||||
|
||||
mode - One of the constants os.F_OK, os.R_OK, os.W_OK, os.X_OK
|
||||
"""
|
||||
return os.access(self, mode)
|
||||
|
||||
def stat(self):
|
||||
""" Perform a stat() system call on this path. """
|
||||
return os.stat(self)
|
||||
|
||||
def lstat(self):
|
||||
""" Like path.stat(), but do not follow symbolic links. """
|
||||
return os.lstat(self)
|
||||
|
||||
def get_owner(self):
|
||||
r""" Return the name of the owner of this file or directory.
|
||||
|
||||
This follows symbolic links.
|
||||
|
||||
On Windows, this returns a name of the form ur'DOMAIN\User Name'.
|
||||
On Windows, a group can own a file or directory.
|
||||
"""
|
||||
if os.name == 'nt':
|
||||
if win32security is None:
|
||||
raise Exception("path.owner requires win32all to be installed")
|
||||
desc = win32security.GetFileSecurity(
|
||||
self, win32security.OWNER_SECURITY_INFORMATION)
|
||||
sid = desc.GetSecurityDescriptorOwner()
|
||||
account, domain, typecode = win32security.LookupAccountSid(None, sid)
|
||||
return domain + u'\\' + account
|
||||
else:
|
||||
if pwd is None:
|
||||
raise NotImplementedError("path.owner is not implemented on this platform.")
|
||||
st = self.stat()
|
||||
return pwd.getpwuid(st.st_uid).pw_name
|
||||
|
||||
owner = property(
|
||||
get_owner, None, None,
|
||||
""" Name of the owner of this file or directory. """)
|
||||
|
||||
if hasattr(os, 'statvfs'):
|
||||
def statvfs(self):
|
||||
""" Perform a statvfs() system call on this path. """
|
||||
return os.statvfs(self)
|
||||
|
||||
if hasattr(os, 'pathconf'):
|
||||
def pathconf(self, name):
|
||||
return os.pathconf(self, name)
|
||||
|
||||
|
||||
# --- Modifying operations on files and directories
|
||||
|
||||
def utime(self, times):
|
||||
""" Set the access and modified times of this file. """
|
||||
os.utime(self, times)
|
||||
|
||||
def chmod(self, mode):
|
||||
os.chmod(self, mode)
|
||||
|
||||
if hasattr(os, 'chown'):
|
||||
def chown(self, uid, gid):
|
||||
os.chown(self, uid, gid)
|
||||
|
||||
def rename(self, new):
|
||||
dry("rename %s to %s" % (self, new), os.rename, self, new)
|
||||
|
||||
def renames(self, new):
|
||||
dry("renames %s to %s" % (self, new), os.renames, self, new)
|
||||
|
||||
|
||||
# --- Create/delete operations on directories
|
||||
|
||||
def mkdir(self, mode=0777):
|
||||
if not self.exists():
|
||||
dry("mkdir %s (mode %s)" % (self, mode), os.mkdir, self, mode)
|
||||
|
||||
def makedirs(self, mode=0777):
|
||||
if not self.exists():
|
||||
dry("makedirs %s (mode %s)" % (self, mode), os.makedirs, self, mode)
|
||||
|
||||
def rmdir(self):
|
||||
if self.exists():
|
||||
dry("rmdir %s" % (self), os.rmdir, self)
|
||||
|
||||
def removedirs(self):
|
||||
if self.exists():
|
||||
dry("removedirs %s" % (self), os.removedirs, self)
|
||||
|
||||
|
||||
# --- Modifying operations on files
|
||||
|
||||
def touch(self):
|
||||
""" Set the access/modified times of this file to the current time.
|
||||
Create the file if it does not exist.
|
||||
"""
|
||||
def do_touch():
|
||||
fd = os.open(self, os.O_WRONLY | os.O_CREAT, 0666)
|
||||
os.close(fd)
|
||||
os.utime(self, None)
|
||||
dry("touch %s" % (self), do_touch)
|
||||
|
||||
def remove(self):
|
||||
if self.exists():
|
||||
dry("remove %s" % (self), os.remove, self)
|
||||
|
||||
def unlink(self):
|
||||
if self.exists():
|
||||
dry("unlink %s" % (self), os.unlink, self)
|
||||
|
||||
|
||||
# --- Links
|
||||
# TODO: mark these up for dry run XXX
|
||||
|
||||
if hasattr(os, 'link'):
|
||||
def link(self, newpath):
|
||||
""" Create a hard link at 'newpath', pointing to this file. """
|
||||
os.link(self, newpath)
|
||||
|
||||
if hasattr(os, 'symlink'):
|
||||
def symlink(self, newlink):
|
||||
""" Create a symbolic link at 'newlink', pointing here. """
|
||||
os.symlink(self, newlink)
|
||||
|
||||
if hasattr(os, 'readlink'):
|
||||
def readlink(self):
|
||||
""" Return the path to which this symbolic link points.
|
||||
|
||||
The result may be an absolute or a relative path.
|
||||
"""
|
||||
return self.__class__(os.readlink(self))
|
||||
|
||||
def readlinkabs(self):
|
||||
""" Return the path to which this symbolic link points.
|
||||
|
||||
The result is always an absolute path.
|
||||
"""
|
||||
p = self.readlink()
|
||||
if p.isabs():
|
||||
return p
|
||||
else:
|
||||
return (self.parent / p).abspath()
|
||||
|
||||
|
||||
# --- High-level functions from shutil
|
||||
|
||||
def copy(self, dst):
|
||||
dry("copy %s %s" % (self, dst), shutil.copy, self, dst)
|
||||
|
||||
def copytree(self, dst, *args, **kw):
|
||||
dry("copytree %s %s" % (self, dst), shutil.copytree,
|
||||
self, dst, *args, **kw)
|
||||
|
||||
if hasattr(shutil, 'move'):
|
||||
def move(self, dst):
|
||||
dry("move %s %s" % (self, dst), shutil.move, self, dst)
|
||||
|
||||
def rmtree(self, *args, **kw):
|
||||
if self.exists():
|
||||
dry("rmtree %s %s %s" % (self, args, kw), shutil.rmtree,
|
||||
self, *args, **kw)
|
||||
|
||||
|
||||
# --- Special stuff from os
|
||||
|
||||
if hasattr(os, 'chroot'):
|
||||
def chroot(self):
|
||||
os.chroot(self)
|
||||
|
||||
if hasattr(os, 'startfile'):
|
||||
def startfile(self):
|
||||
os.startfile(self)
|
||||
|
||||
from paver.easy import dry
|
|
@ -0,0 +1,35 @@
|
|||
"""Python 2.5+ path module that adds with-statement features."""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
from paver.path import path
|
||||
from paver import tasks
|
||||
|
||||
__all__ = ['path', 'pushd']
|
||||
|
||||
@contextmanager
|
||||
def pushd(dir):
|
||||
'''A context manager (Python 2.5+ only) for stepping into a
|
||||
directory and automatically coming back to the previous one.
|
||||
The original directory is returned. Usage is like this::
|
||||
|
||||
from __future__ import with_statement
|
||||
# the above line is only needed for Python 2.5
|
||||
|
||||
from paver.easy import *
|
||||
|
||||
@task
|
||||
def my_task():
|
||||
with pushd('new/directory') as old_dir:
|
||||
...do stuff...
|
||||
'''
|
||||
old_dir = os.getcwd()
|
||||
tasks.environment.info('cd %s' % dir)
|
||||
os.chdir(dir)
|
||||
try:
|
||||
yield old_dir
|
||||
tasks.environment.info('cd %s' % old_dir)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
|
@ -0,0 +1,19 @@
|
|||
"""Release metadata for Paver."""
|
||||
|
||||
from paver.options import Bunch
|
||||
from paver.tasks import VERSION
|
||||
|
||||
setup_meta=Bunch(
|
||||
name='Paver',
|
||||
version=VERSION,
|
||||
description='Easy build, distribution and deployment scripting',
|
||||
long_description="""Paver is a Python-based build/distribution/deployment scripting tool along the
|
||||
lines of Make or Rake. What makes Paver unique is its integration with
|
||||
commonly used Python libraries. Common tasks that were easy before remain
|
||||
easy. More importantly, dealing with *your* applications specific needs and
|
||||
requirements is also easy.""",
|
||||
author='Kevin Dangoor',
|
||||
author_email='dangoor+paver@gmail.com',
|
||||
url='http://www.blueskyonmars.com/projects/paver/',
|
||||
packages=['paver', 'paver.cog']
|
||||
)
|
|
@ -0,0 +1,245 @@
|
|||
"""Integrates distutils/setuptools with Paver."""
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import distutils
|
||||
from fnmatch import fnmatchcase
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
try:
|
||||
from setuptools import dist
|
||||
except ImportError:
|
||||
from distutils import dist
|
||||
from distutils.errors import DistutilsModuleError
|
||||
_Distribution = dist.Distribution
|
||||
|
||||
from distutils import debug
|
||||
# debug.DEBUG = True
|
||||
|
||||
from paver.options import Bunch
|
||||
|
||||
try:
|
||||
import setuptools
|
||||
import pkg_resources
|
||||
has_setuptools = True
|
||||
except ImportError:
|
||||
has_setuptools = False
|
||||
|
||||
# our commands can have '.' in them, so we'll monkeypatch this
|
||||
# expression
|
||||
dist.command_re = re.compile (r'^[a-zA-Z]([a-zA-Z0-9_\.]*)$')
|
||||
|
||||
from paver import tasks
|
||||
|
||||
__ALL__ = ['find_package_data']
|
||||
|
||||
# find_package_data is an Ian Bicking creation.
|
||||
|
||||
# Provided as an attribute, so you can append to these instead
|
||||
# of replicating them:
|
||||
standard_exclude = ('*.py', '*.pyc', '*~', '.*', '*.bak', '*.swp*')
|
||||
standard_exclude_directories = ('.*', 'CVS', '_darcs', './build',
|
||||
'./dist', 'EGG-INFO', '*.egg-info')
|
||||
|
||||
def find_package_data(
|
||||
where='.', package='',
|
||||
exclude=standard_exclude,
|
||||
exclude_directories=standard_exclude_directories,
|
||||
only_in_packages=True,
|
||||
show_ignored=False):
|
||||
"""
|
||||
Return a dictionary suitable for use in ``package_data``
|
||||
in a distutils ``setup.py`` file.
|
||||
|
||||
The dictionary looks like::
|
||||
|
||||
{'package': [files]}
|
||||
|
||||
Where ``files`` is a list of all the files in that package that
|
||||
don't match anything in ``exclude``.
|
||||
|
||||
If ``only_in_packages`` is true, then top-level directories that
|
||||
are not packages won't be included (but directories under packages
|
||||
will).
|
||||
|
||||
Directories matching any pattern in ``exclude_directories`` will
|
||||
be ignored; by default directories with leading ``.``, ``CVS``,
|
||||
and ``_darcs`` will be ignored.
|
||||
|
||||
If ``show_ignored`` is true, then all the files that aren't
|
||||
included in package data are shown on stderr (for debugging
|
||||
purposes).
|
||||
|
||||
Note patterns use wildcards, or can be exact paths (including
|
||||
leading ``./``), and all searching is case-insensitive.
|
||||
|
||||
This function is by Ian Bicking.
|
||||
"""
|
||||
|
||||
out = {}
|
||||
stack = [(convert_path(where), '', package, only_in_packages)]
|
||||
while stack:
|
||||
where, prefix, package, only_in_packages = stack.pop(0)
|
||||
for name in os.listdir(where):
|
||||
fn = os.path.join(where, name)
|
||||
if os.path.isdir(fn):
|
||||
bad_name = False
|
||||
for pattern in exclude_directories:
|
||||
if (fnmatchcase(name, pattern)
|
||||
or fn.lower() == pattern.lower()):
|
||||
bad_name = True
|
||||
if show_ignored:
|
||||
print >> sys.stderr, (
|
||||
"Directory %s ignored by pattern %s"
|
||||
% (fn, pattern))
|
||||
break
|
||||
if bad_name:
|
||||
continue
|
||||
if os.path.isfile(os.path.join(fn, '__init__.py')):
|
||||
if not package:
|
||||
new_package = name
|
||||
else:
|
||||
new_package = package + '.' + name
|
||||
stack.append((fn, '', new_package, False))
|
||||
else:
|
||||
stack.append((fn, prefix + name + '/', package, only_in_packages))
|
||||
elif package or not only_in_packages:
|
||||
# is a file
|
||||
bad_name = False
|
||||
for pattern in exclude:
|
||||
if (fnmatchcase(name, pattern)
|
||||
or fn.lower() == pattern.lower()):
|
||||
bad_name = True
|
||||
if show_ignored:
|
||||
print >> sys.stderr, (
|
||||
"File %s ignored by pattern %s"
|
||||
% (fn, pattern))
|
||||
break
|
||||
if bad_name:
|
||||
continue
|
||||
out.setdefault(package, []).append(prefix+name)
|
||||
return out
|
||||
|
||||
class DistutilsTask(tasks.Task):
|
||||
def __init__(self, distribution, command_name, command_class):
|
||||
name_sections = str(command_class).split(".")
|
||||
if name_sections[-2] == name_sections[-1]:
|
||||
del name_sections[-2]
|
||||
self.name = ".".join(name_sections)
|
||||
self.__name__ = self.name
|
||||
self.distribution = distribution
|
||||
self.command_name = command_name
|
||||
self.shortname = _get_shortname(command_name)
|
||||
self.command_class = command_class
|
||||
self.option_names = set()
|
||||
self.needs = []
|
||||
self.user_options = command_class.user_options
|
||||
# Parse distutils config files.
|
||||
distribution.parse_config_files()
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
options = tasks.environment.options.get(self.shortname, {})
|
||||
opt_dict = self.distribution.get_option_dict(self.command_name)
|
||||
for (name, value) in options.items():
|
||||
opt_dict[name.replace('-', '_')] = ("command line", value)
|
||||
self.distribution.run_command(self.command_name)
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return self.command_class.description
|
||||
|
||||
def _get_shortname(taskname):
|
||||
dotindex = taskname.rfind(".")
|
||||
if dotindex > -1:
|
||||
command_name = taskname[dotindex+1:]
|
||||
else:
|
||||
command_name = taskname
|
||||
return command_name
|
||||
|
||||
class DistutilsTaskFinder(object):
|
||||
def get_task(self, taskname):
|
||||
dist = _get_distribution()
|
||||
command_name = _get_shortname(taskname)
|
||||
try:
|
||||
command_class = dist.get_command_class(command_name)
|
||||
except DistutilsModuleError:
|
||||
return None
|
||||
return DistutilsTask(dist, command_name, command_class)
|
||||
|
||||
def get_tasks(self):
|
||||
dist = _get_distribution()
|
||||
if has_setuptools:
|
||||
for ep in pkg_resources.iter_entry_points('distutils.commands'):
|
||||
try:
|
||||
cmdclass = ep.load(False) # don't require extras, we're not running
|
||||
dist.cmdclass[ep.name] = cmdclass
|
||||
except:
|
||||
# on the Mac, at least, installing from the tarball
|
||||
# via zc.buildout fails due to a problem in the
|
||||
# py2app command
|
||||
tasks.environment.info("Could not load entry point: %s", ep)
|
||||
dist.get_command_list()
|
||||
return set(DistutilsTask(dist, key, value)
|
||||
for key, value in dist.cmdclass.items())
|
||||
|
||||
def _get_distribution():
|
||||
try:
|
||||
return tasks.environment.distribution
|
||||
except AttributeError:
|
||||
dist = _Distribution(attrs=tasks.environment.options.get('setup', {}))
|
||||
tasks.environment.distribution = dist
|
||||
dist.script_name = tasks.environment.pavement_file
|
||||
return dist
|
||||
|
||||
def install_distutils_tasks():
|
||||
"""Makes distutils and setuptools commands available as Paver tasks."""
|
||||
env = tasks.environment
|
||||
if not hasattr(env, "_distutils_tasks_installed"):
|
||||
env.task_finders.append(DistutilsTaskFinder())
|
||||
env._distutils_tasks_installed = True
|
||||
|
||||
def setup(**kw):
|
||||
"""Updates options.setup with the keyword arguments provided,
|
||||
and installs the distutils tasks for this pavement. You can
|
||||
use paver.setuputils.setup as a direct replacement for
|
||||
the distutils.core.setup or setuptools.setup in a traditional
|
||||
setup.py."""
|
||||
install_distutils_tasks()
|
||||
setup_section = tasks.environment.options.setdefault("setup", Bunch())
|
||||
setup_section.update(kw)
|
||||
|
||||
def _error(message, *args):
|
||||
"""Displays an error message to the user."""
|
||||
tasks.environment.error(message, *args)
|
||||
|
||||
def _info(message, *args):
|
||||
"""Displays a message to the user. If the quiet option is specified, the
|
||||
message will not be displayed."""
|
||||
tasks.environment.info(message, *args)
|
||||
|
||||
def _debug(message, *args):
|
||||
"""Displays a message to the user, but only if the verbose flag is
|
||||
set."""
|
||||
tasks.environment.debug(message, *args)
|
||||
|
||||
def _base_log(level, message, *args):
|
||||
"""Displays a message at the given log level"""
|
||||
tasks.environment._log(level, message, args)
|
||||
|
||||
# monkeypatch the distutils logging to go through Paver's logging
|
||||
log.log = _base_log
|
||||
log.debug = _debug
|
||||
log.info = _info
|
||||
log.warn = _error
|
||||
log.error = _error
|
||||
log.fatal = _error
|
||||
|
||||
|
||||
if has_setuptools:
|
||||
__ALL__.extend(["find_packages"])
|
||||
|
||||
from setuptools import find_packages
|
||||
else:
|
||||
import distutils.core
|
||||
|
|
@ -0,0 +1,618 @@
|
|||
import sys
|
||||
import os
|
||||
import optparse
|
||||
import types
|
||||
import inspect
|
||||
import itertools
|
||||
import traceback
|
||||
|
||||
VERSION = "1.0.1"
|
||||
|
||||
class PavementError(Exception):
|
||||
"""Exception that represents a problem in the pavement.py file
|
||||
rather than the process of running a build."""
|
||||
pass
|
||||
|
||||
class BuildFailure(Exception):
|
||||
"""Represents a problem with some part of the build's execution."""
|
||||
pass
|
||||
|
||||
|
||||
class Environment(object):
|
||||
_task_in_progress = None
|
||||
_task_output = None
|
||||
_all_tasks = None
|
||||
_dry_run = False
|
||||
verbose = False
|
||||
interactive = False
|
||||
quiet = False
|
||||
_file = "pavement.py"
|
||||
|
||||
def __init__(self, pavement=None):
|
||||
self.pavement = pavement
|
||||
self.task_finders = []
|
||||
try:
|
||||
# for the time being, at least, tasks.py can be used on its
|
||||
# own!
|
||||
from paver import options
|
||||
self.options = options.Namespace()
|
||||
self.options.dry_run = False
|
||||
self.options.pavement_file = self.pavement_file
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def info(self, message, *args):
|
||||
self._log(2, message, args)
|
||||
|
||||
def debug(self, message, *args):
|
||||
self._log(1, message, args)
|
||||
|
||||
def error(self, message, *args):
|
||||
self._log(3, message, args)
|
||||
|
||||
def _log(self, level, message, args):
|
||||
output = message % args
|
||||
if self._task_output is not None:
|
||||
self._task_output.append(output)
|
||||
if level > 2 or (level > 1 and not self.quiet) or \
|
||||
self.verbose:
|
||||
self._print(output)
|
||||
|
||||
def _print(self, output):
|
||||
print output
|
||||
|
||||
def _exit(self, code):
|
||||
sys.exit(1)
|
||||
|
||||
def _set_dry_run(self, dr):
|
||||
self._dry_run = dr
|
||||
try:
|
||||
self.options.dry_run = dr
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def _get_dry_run(self):
|
||||
return self._dry_run
|
||||
|
||||
dry_run = property(_get_dry_run, _set_dry_run)
|
||||
|
||||
def _set_pavement_file(self, pavement_file):
|
||||
self._file = pavement_file
|
||||
try:
|
||||
self.options.pavement_file = pavement_file
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def _get_pavement_file(self):
|
||||
return self._file
|
||||
|
||||
pavement_file = property(_get_pavement_file, _set_pavement_file)
|
||||
|
||||
file = property(fset=_set_pavement_file)
|
||||
|
||||
def get_task(self, taskname):
|
||||
task = getattr(self.pavement, taskname, None)
|
||||
|
||||
# delegate to task finders next
|
||||
if not task:
|
||||
for finder in self.task_finders:
|
||||
task = finder.get_task(taskname)
|
||||
if task:
|
||||
break
|
||||
|
||||
# try to look up by full name
|
||||
if not task:
|
||||
task = _import_task(taskname)
|
||||
|
||||
# if there's nothing by full name, look up by
|
||||
# short name
|
||||
if not task:
|
||||
all_tasks = self.get_tasks()
|
||||
matches = [t for t in all_tasks
|
||||
if t.shortname == taskname]
|
||||
if len(matches) > 1:
|
||||
matched_names = [t.name for t in matches]
|
||||
raise BuildFailure("Ambiguous task name %s (%s)" %
|
||||
(taskname, matched_names))
|
||||
elif matches:
|
||||
task = matches[0]
|
||||
return task
|
||||
|
||||
def call_task(self, task_name):
|
||||
task = self.get_task(task_name)
|
||||
task()
|
||||
|
||||
def _run_task(self, task_name, needs, func):
|
||||
(funcargs, varargs, varkw, defaults) = inspect.getargspec(func)
|
||||
kw = dict()
|
||||
for i in xrange(0, len(funcargs)):
|
||||
arg = funcargs[i]
|
||||
if arg == 'env':
|
||||
kw['env'] = self
|
||||
# Keyword arguments do now need to be in the environment
|
||||
elif (defaults is not None and
|
||||
(i - (len(funcargs) - len(defaults))) >= 0):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
kw[arg] = getattr(self, arg)
|
||||
except AttributeError:
|
||||
raise PavementError("Task %s requires an argument (%s) that is "
|
||||
"not present in the environment" % (task_name, arg))
|
||||
|
||||
if not self._task_in_progress:
|
||||
self._task_in_progress = task_name
|
||||
self._task_output = []
|
||||
running_top_level = True
|
||||
else:
|
||||
running_top_level = False
|
||||
def do_task():
|
||||
self.info("---> " + task_name)
|
||||
for req in needs:
|
||||
task = self.get_task(req)
|
||||
if not task:
|
||||
raise PavementError("Requirement %s for task %s not found" %
|
||||
(req, task_name))
|
||||
if not isinstance(task, Task):
|
||||
raise PavementError("Requirement %s for task %s is not a Task"
|
||||
% (req, task_name))
|
||||
if not task.called:
|
||||
task()
|
||||
return func(**kw)
|
||||
if running_top_level:
|
||||
try:
|
||||
return do_task()
|
||||
except Exception, e:
|
||||
self._print("""
|
||||
|
||||
Captured Task Output:
|
||||
---------------------
|
||||
""")
|
||||
self._print("\n".join(self._task_output))
|
||||
if isinstance(e, BuildFailure):
|
||||
self._print("\nBuild failed running %s: %s" %
|
||||
(self._task_in_progress, e))
|
||||
else:
|
||||
self._print(traceback.format_exc())
|
||||
self._task_in_progress = None
|
||||
self._task_output = None
|
||||
self._exit(1)
|
||||
else:
|
||||
return do_task()
|
||||
|
||||
def get_tasks(self):
|
||||
if self._all_tasks:
|
||||
return self._all_tasks
|
||||
result = set()
|
||||
modules = set()
|
||||
def scan_module(module):
|
||||
modules.add(module)
|
||||
for name in dir(module):
|
||||
item = getattr(module, name, None)
|
||||
if isinstance(item, Task):
|
||||
result.add(item)
|
||||
if isinstance(item, types.ModuleType) and item not in modules:
|
||||
scan_module(item)
|
||||
scan_module(self.pavement)
|
||||
for finder in self.task_finders:
|
||||
result.update(finder.get_tasks())
|
||||
self._all_tasks = result
|
||||
return result
|
||||
|
||||
environment_stack = []
|
||||
environment = Environment()
|
||||
|
||||
def _import_task(taskname):
|
||||
"""Looks up a dotted task name and imports the module as necessary
|
||||
to get at the task."""
|
||||
parts = taskname.split('.')
|
||||
if len(parts) < 2:
|
||||
return None
|
||||
func_name = parts[-1]
|
||||
full_mod_name = ".".join(parts[:-1])
|
||||
mod_name = parts[-2]
|
||||
try:
|
||||
module = __import__(full_mod_name, globals(), locals(), [mod_name])
|
||||
except ImportError:
|
||||
return None
|
||||
return getattr(module, func_name, None)
|
||||
|
||||
class Task(object):
|
||||
called = False
|
||||
consume_args = False
|
||||
no_auto = False
|
||||
|
||||
__doc__ = ""
|
||||
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.needs = []
|
||||
self.__name__ = func.__name__
|
||||
self.shortname = func.__name__
|
||||
self.name = "%s.%s" % (func.__module__, func.__name__)
|
||||
self.option_names = set()
|
||||
self.user_options = []
|
||||
try:
|
||||
self.__doc__ = func.__doc__
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
retval = environment._run_task(self.name, self.needs, self.func)
|
||||
self.called = True
|
||||
return retval
|
||||
|
||||
def __repr__(self):
|
||||
return "Task: " + self.__name__
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
options = self.user_options
|
||||
parser = optparse.OptionParser(add_help_option=False,
|
||||
usage="%%prog %s [options]" % (self.name))
|
||||
parser.disable_interspersed_args()
|
||||
parser.add_option('-h', '--help', action="store_true",
|
||||
help="display this help information")
|
||||
|
||||
needs_tasks = [(environment.get_task(task), task) for task in self.needs]
|
||||
for task, task_name in itertools.chain([(self, self.name)], needs_tasks):
|
||||
if not task:
|
||||
raise PavementError("Task %s needed by %s does not exist"
|
||||
% (task_name, self))
|
||||
for option in task.user_options:
|
||||
try:
|
||||
longname = option[0]
|
||||
if longname.endswith('='):
|
||||
action = "store"
|
||||
longname = longname[:-1]
|
||||
else:
|
||||
action = "store_true"
|
||||
|
||||
environment.debug("Task %s: adding option %s (%s)" %
|
||||
(self.name, longname, option[1]))
|
||||
try:
|
||||
if option[1] is None:
|
||||
parser.add_option("--" + longname, action=action,
|
||||
dest=longname.replace('-', '_'),
|
||||
help=option[2])
|
||||
else:
|
||||
parser.add_option("-" + option[1],
|
||||
"--" + longname, action=action,
|
||||
dest=longname.replace('-', '_'),
|
||||
help=option[2])
|
||||
except optparse.OptionConflictError:
|
||||
raise PavementError("""In setting command options for %r,
|
||||
option %s for %r is already in use
|
||||
by another task in the dependency chain.""" % (self, option, task))
|
||||
self.option_names.add((task.shortname, longname))
|
||||
except IndexError:
|
||||
raise PavementError("Invalid option format provided for %r: %s"
|
||||
% (self, option))
|
||||
return parser
|
||||
|
||||
def display_help(self, parser=None):
|
||||
if not parser:
|
||||
parser = self.parser
|
||||
|
||||
name = self.name
|
||||
print "\n%s" % name
|
||||
print "-" * (len(name))
|
||||
parser.print_help()
|
||||
print
|
||||
print self.__doc__
|
||||
print
|
||||
|
||||
def parse_args(self, args):
|
||||
import paver.options
|
||||
environment.debug("Task %s: Parsing args %s" % (self.name, args))
|
||||
optholder = environment.options.setdefault(self.shortname,
|
||||
paver.options.Bunch())
|
||||
parser = self.parser
|
||||
options, args = parser.parse_args(args)
|
||||
if options.help:
|
||||
self.display_help(parser)
|
||||
sys.exit(0)
|
||||
|
||||
for task_name, option_name in self.option_names:
|
||||
option_name = option_name.replace('-', '_')
|
||||
try:
|
||||
optholder = environment.options[task_name]
|
||||
except KeyError:
|
||||
optholder = paver.options.Bunch()
|
||||
environment.options[task_name] = optholder
|
||||
value = getattr(options, option_name)
|
||||
if value is not None:
|
||||
optholder[option_name] = getattr(options, option_name)
|
||||
return args
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
doc = self.__doc__
|
||||
if doc:
|
||||
period = doc.find(".")
|
||||
if period > -1:
|
||||
doc = doc[0:period]
|
||||
else:
|
||||
doc = ""
|
||||
return doc
|
||||
|
||||
|
||||
def task(func):
|
||||
"""Specifies that this function is a task.
|
||||
|
||||
Note that this decorator does not actually replace the function object.
|
||||
It just keeps track of the task and sets an is_task flag on the
|
||||
function object."""
|
||||
if isinstance(func, Task):
|
||||
return func
|
||||
task = Task(func)
|
||||
return task
|
||||
|
||||
def needs(*args):
|
||||
"""Specifies tasks upon which this task depends.
|
||||
|
||||
req can be a string or a list of strings with the names
|
||||
of the tasks. You can call this decorator multiple times
|
||||
and the various requirements are added on. You can also
|
||||
call with the requirements as a list of arguments.
|
||||
|
||||
The requirements are called in the order presented in the
|
||||
list."""
|
||||
def entangle(func):
|
||||
req = args
|
||||
func = task(func)
|
||||
needs_list = func.needs
|
||||
if len(req) == 1:
|
||||
req = req[0]
|
||||
if isinstance(req, basestring):
|
||||
needs_list.append(req)
|
||||
elif isinstance(req, (list, tuple)):
|
||||
needs_list.extend(req)
|
||||
else:
|
||||
raise PavementError("'needs' decorator requires a list or string "
|
||||
"but got %s" % req)
|
||||
return func
|
||||
return entangle
|
||||
|
||||
def cmdopts(options):
|
||||
"""Sets the command line options that can be set for this task.
|
||||
This uses the same format as the distutils command line option
|
||||
parser. It's a list of tuples, each with three elements:
|
||||
long option name, short option, description.
|
||||
|
||||
If the long option name ends with '=', that means that the
|
||||
option takes a value. Otherwise the option is just boolean.
|
||||
All of the options will be stored in the options dict with
|
||||
the name of the task. Each value that gets stored in that
|
||||
dict will be stored with a key that is based on the long option
|
||||
name (the only difference is that - is replaced by _)."""
|
||||
def entangle(func):
|
||||
func = task(func)
|
||||
func.user_options = options
|
||||
return func
|
||||
return entangle
|
||||
|
||||
def consume_args(func):
|
||||
"""Any command line arguments that appear after this task on the
|
||||
command line will be placed in options.args."""
|
||||
func = task(func)
|
||||
func.consume_args = True
|
||||
return func
|
||||
|
||||
def no_auto(func):
|
||||
"""Specify that this task does not depend on the auto task,
|
||||
and don't run the auto task just for this one."""
|
||||
func = task(func)
|
||||
func.no_auto = True
|
||||
return func
|
||||
|
||||
def _preparse(args):
|
||||
task = None
|
||||
taskname = None
|
||||
while args:
|
||||
arg = args.pop(0)
|
||||
if '=' in arg:
|
||||
key, value = arg.split("=")
|
||||
try:
|
||||
environment.options.setdotted(key, value)
|
||||
except AttributeError:
|
||||
raise BuildFailure("""This appears to be a standalone Paver
|
||||
tasks.py, so the build environment does not support options. The command
|
||||
line (%s) attempts to set an option.""" % (args))
|
||||
elif arg.startswith('-'):
|
||||
args.insert(0, arg)
|
||||
break
|
||||
else:
|
||||
taskname = arg
|
||||
task = environment.get_task(taskname)
|
||||
if task is None:
|
||||
raise BuildFailure("Unknown task: %s" % taskname)
|
||||
break
|
||||
return task, taskname, args
|
||||
|
||||
def _parse_global_options(args):
|
||||
# this is where global options should be dealt with
|
||||
parser = optparse.OptionParser(usage=
|
||||
"""Usage: %prog [global options] taskname [task options] """
|
||||
"""[taskname [taskoptions]]""", version="Paver %s" % (VERSION),
|
||||
add_help_option=False)
|
||||
|
||||
environment.help_function = parser.print_help
|
||||
|
||||
parser.add_option('-n', '--dry-run', action='store_true',
|
||||
help="don't actually do anything")
|
||||
parser.add_option('-v', "--verbose", action="store_true",
|
||||
help="display all logging output")
|
||||
parser.add_option('-q', '--quiet', action="store_true",
|
||||
help="display only errors")
|
||||
parser.add_option("-i", "--interactive", action="store_true",
|
||||
help="enable prompting")
|
||||
parser.add_option("-f", "--file", metavar="FILE",
|
||||
help="read tasks from FILE [%default]")
|
||||
parser.add_option('-h', "--help", action="store_true",
|
||||
help="display this help information")
|
||||
parser.set_defaults(file=environment.pavement_file)
|
||||
|
||||
parser.disable_interspersed_args()
|
||||
options, args = parser.parse_args(args)
|
||||
if options.help:
|
||||
args.insert(0, "help")
|
||||
for key, value in vars(options).items():
|
||||
setattr(environment, key, value)
|
||||
|
||||
return args
|
||||
|
||||
def _parse_command_line(args):
|
||||
task, taskname, args = _preparse(args)
|
||||
|
||||
if not task:
|
||||
args = _parse_global_options(args)
|
||||
if not args:
|
||||
return None, []
|
||||
|
||||
taskname = args.pop(0)
|
||||
task = environment.get_task(taskname)
|
||||
|
||||
if not task:
|
||||
raise BuildFailure("Unknown task: %s" % taskname)
|
||||
|
||||
if not isinstance(task, Task):
|
||||
raise BuildFailure("%s is not a Task" % taskname)
|
||||
|
||||
if task.consume_args:
|
||||
try:
|
||||
environment.options.args = args
|
||||
except AttributeError:
|
||||
pass
|
||||
environment.args = args
|
||||
args = []
|
||||
else:
|
||||
args = task.parse_args(args)
|
||||
|
||||
return task, args
|
||||
|
||||
def _cmp_task_names(a, b):
|
||||
a = a.name
|
||||
b = b.name
|
||||
a_in_pavement = a.startswith("pavement.")
|
||||
b_in_pavement = b.startswith("pavement.")
|
||||
if a_in_pavement and not b_in_pavement:
|
||||
return 1
|
||||
if b_in_pavement and not a_in_pavement:
|
||||
return -1
|
||||
return cmp(a, b)
|
||||
|
||||
def _group_by_module(items):
|
||||
groups = []
|
||||
current_group_name = None
|
||||
current_group = None
|
||||
maxlen = 5
|
||||
for item in items:
|
||||
name = item.name
|
||||
dotpos = name.rfind(".")
|
||||
group_name = name[:dotpos]
|
||||
maxlen = max(len(item.shortname), maxlen)
|
||||
if current_group_name != group_name:
|
||||
current_group = []
|
||||
current_group_name = group_name
|
||||
groups.append([group_name, current_group])
|
||||
current_group.append(item)
|
||||
return maxlen, groups
|
||||
|
||||
@task
|
||||
@no_auto
|
||||
@consume_args
|
||||
def help(args, help_function):
|
||||
"""This help display."""
|
||||
if args:
|
||||
task_name = args[0]
|
||||
task = environment.get_task(task_name)
|
||||
if not task:
|
||||
print "Task not found: %s" % (task_name)
|
||||
return
|
||||
|
||||
task.display_help()
|
||||
return
|
||||
|
||||
help_function()
|
||||
|
||||
task_list = environment.get_tasks()
|
||||
task_list = sorted(task_list, cmp=_cmp_task_names)
|
||||
maxlen, task_list = _group_by_module(task_list)
|
||||
fmt = " %-" + str(maxlen) + "s - %s"
|
||||
for group_name, group in task_list:
|
||||
print "\nTasks from %s:" % (group_name)
|
||||
for task in group:
|
||||
print(fmt % (task.shortname, task.description))
|
||||
|
||||
def _process_commands(args, auto_pending=False):
|
||||
first_loop = True
|
||||
while True:
|
||||
task, args = _parse_command_line(args)
|
||||
if auto_pending:
|
||||
if not task or not task.no_auto:
|
||||
environment.call_task('auto')
|
||||
auto_pending=False
|
||||
if task is None:
|
||||
if first_loop:
|
||||
task = environment.get_task('default')
|
||||
if not task:
|
||||
break
|
||||
else:
|
||||
break
|
||||
task()
|
||||
first_loop = False
|
||||
|
||||
def call_pavement(new_pavement, args):
|
||||
if isinstance(args, basestring):
|
||||
args = args.split()
|
||||
global environment
|
||||
environment_stack.append(environment)
|
||||
environment = Environment()
|
||||
cwd = os.getcwd()
|
||||
dirname, basename = os.path.split(new_pavement)
|
||||
environment.pavement_file = basename
|
||||
try:
|
||||
if dirname:
|
||||
os.chdir(dirname)
|
||||
_launch_pavement(args)
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
environment = environment_stack.pop()
|
||||
|
||||
def _launch_pavement(args):
|
||||
mod = types.ModuleType("pavement")
|
||||
environment.pavement = mod
|
||||
|
||||
if not os.path.exists(environment.pavement_file):
|
||||
environment.pavement_file = None
|
||||
exec "from paver.easy import *\n" in mod.__dict__
|
||||
_process_commands(args)
|
||||
return
|
||||
|
||||
mod.__file__ = environment.pavement_file
|
||||
try:
|
||||
execfile(environment.pavement_file, mod.__dict__)
|
||||
auto_task = getattr(mod, 'auto', None)
|
||||
auto_pending = isinstance(auto_task, Task)
|
||||
_process_commands(args, auto_pending=auto_pending)
|
||||
except PavementError, e:
|
||||
print "\n\n*** Problem with pavement:\n%s\n%s\n\n" % (
|
||||
os.path.abspath(environment.pavement_file), e)
|
||||
|
||||
def main(args=None):
|
||||
global environment
|
||||
if args is None:
|
||||
if len(sys.argv) > 1:
|
||||
args = sys.argv[1:]
|
||||
else:
|
||||
args = []
|
||||
environment = Environment()
|
||||
|
||||
# need to parse args to recover pavement-file to read before executing
|
||||
try:
|
||||
args = _parse_global_options(args)
|
||||
_launch_pavement(args)
|
||||
except BuildFailure, e:
|
||||
environment.error("Build failed: %s", e)
|
||||
sys.exit(1)
|
|
@ -0,0 +1,376 @@
|
|||
r"""
|
||||
A simple, fast, extensible JSON encoder and decoder
|
||||
|
||||
JSON (JavaScript Object Notation) <http://json.org> is a subset of
|
||||
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
||||
interchange format.
|
||||
|
||||
simplejson exposes an API familiar to uses of the standard library
|
||||
marshal and pickle modules.
|
||||
|
||||
Encoding basic Python object hierarchies::
|
||||
|
||||
>>> import simplejson
|
||||
>>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
||||
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
||||
>>> print simplejson.dumps("\"foo\bar")
|
||||
"\"foo\bar"
|
||||
>>> print simplejson.dumps(u'\u1234')
|
||||
"\u1234"
|
||||
>>> print simplejson.dumps('\\')
|
||||
"\\"
|
||||
>>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
|
||||
{"a": 0, "b": 0, "c": 0}
|
||||
>>> from StringIO import StringIO
|
||||
>>> io = StringIO()
|
||||
>>> simplejson.dump(['streaming API'], io)
|
||||
>>> io.getvalue()
|
||||
'["streaming API"]'
|
||||
|
||||
Compact encoding::
|
||||
|
||||
>>> import simplejson
|
||||
>>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
|
||||
'[1,2,3,{"4":5,"6":7}]'
|
||||
|
||||
Pretty printing::
|
||||
|
||||
>>> import simplejson
|
||||
>>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
|
||||
{
|
||||
"4": 5,
|
||||
"6": 7
|
||||
}
|
||||
|
||||
Decoding JSON::
|
||||
|
||||
>>> import simplejson
|
||||
>>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
|
||||
[u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
|
||||
>>> simplejson.loads('"\\"foo\\bar"')
|
||||
u'"foo\x08ar'
|
||||
>>> from StringIO import StringIO
|
||||
>>> io = StringIO('["streaming API"]')
|
||||
>>> simplejson.load(io)
|
||||
[u'streaming API']
|
||||
|
||||
Specializing JSON object decoding::
|
||||
|
||||
>>> import simplejson
|
||||
>>> def as_complex(dct):
|
||||
... if '__complex__' in dct:
|
||||
... return complex(dct['real'], dct['imag'])
|
||||
... return dct
|
||||
...
|
||||
>>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
||||
... object_hook=as_complex)
|
||||
(1+2j)
|
||||
>>> import decimal
|
||||
>>> simplejson.loads('1.1', parse_float=decimal.Decimal)
|
||||
Decimal("1.1")
|
||||
|
||||
Extending JSONEncoder::
|
||||
|
||||
>>> import simplejson
|
||||
>>> class ComplexEncoder(simplejson.JSONEncoder):
|
||||
... def default(self, obj):
|
||||
... if isinstance(obj, complex):
|
||||
... return [obj.real, obj.imag]
|
||||
... return simplejson.JSONEncoder.default(self, obj)
|
||||
...
|
||||
>>> dumps(2 + 1j, cls=ComplexEncoder)
|
||||
'[2.0, 1.0]'
|
||||
>>> ComplexEncoder().encode(2 + 1j)
|
||||
'[2.0, 1.0]'
|
||||
>>> list(ComplexEncoder().iterencode(2 + 1j))
|
||||
['[', '2.0', ', ', '1.0', ']']
|
||||
|
||||
|
||||
Using simplejson from the shell to validate and
|
||||
pretty-print::
|
||||
|
||||
$ echo '{"json":"obj"}' | python -msimplejson.tool
|
||||
{
|
||||
"json": "obj"
|
||||
}
|
||||
$ echo '{ 1.2:3.4}' | python -msimplejson.tool
|
||||
Expecting property name: line 1 column 2 (char 2)
|
||||
|
||||
Note that the JSON produced by this module's default settings
|
||||
is a subset of YAML, so it may be used as a serializer for that as well.
|
||||
"""
|
||||
__version__ = '1.9.2'
|
||||
__all__ = [
|
||||
'dump', 'dumps', 'load', 'loads',
|
||||
'JSONDecoder', 'JSONEncoder',
|
||||
]
|
||||
|
||||
if __name__ == '__main__':
|
||||
import warnings
|
||||
warnings.warn('python -msimplejson is deprecated, use python -msiplejson.tool', DeprecationWarning)
|
||||
from simplejson.decoder import JSONDecoder
|
||||
from simplejson.encoder import JSONEncoder
|
||||
else:
|
||||
from decoder import JSONDecoder
|
||||
from encoder import JSONEncoder
|
||||
|
||||
_default_encoder = JSONEncoder(
|
||||
skipkeys=False,
|
||||
ensure_ascii=True,
|
||||
check_circular=True,
|
||||
allow_nan=True,
|
||||
indent=None,
|
||||
separators=None,
|
||||
encoding='utf-8',
|
||||
default=None,
|
||||
)
|
||||
|
||||
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
|
||||
allow_nan=True, cls=None, indent=None, separators=None,
|
||||
encoding='utf-8', default=None, **kw):
|
||||
"""
|
||||
Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
||||
``.write()``-supporting file-like object).
|
||||
|
||||
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
|
||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
||||
will be skipped instead of raising a ``TypeError``.
|
||||
|
||||
If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
|
||||
may be ``unicode`` instances, subject to normal Python ``str`` to
|
||||
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
|
||||
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
|
||||
to cause an error.
|
||||
|
||||
If ``check_circular`` is ``False``, then the circular reference check
|
||||
for container types will be skipped and a circular reference will
|
||||
result in an ``OverflowError`` (or worse).
|
||||
|
||||
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
|
||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
||||
in strict compliance of the JSON specification, instead of using the
|
||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
||||
|
||||
If ``indent`` is a non-negative integer, then JSON array elements and object
|
||||
members will be pretty-printed with that indent level. An indent level
|
||||
of 0 will only insert newlines. ``None`` is the most compact representation.
|
||||
|
||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
||||
``(',', ':')`` is the most compact JSON representation.
|
||||
|
||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
||||
|
||||
``default(obj)`` is a function that should return a serializable version
|
||||
of obj or raise TypeError. The default simply raises TypeError.
|
||||
|
||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
||||
``.default()`` method to serialize additional types), specify it with
|
||||
the ``cls`` kwarg.
|
||||
"""
|
||||
# cached encoder
|
||||
if (skipkeys is False and ensure_ascii is True and
|
||||
check_circular is True and allow_nan is True and
|
||||
cls is None and indent is None and separators is None and
|
||||
encoding == 'utf-8' and default is None and not kw):
|
||||
iterable = _default_encoder.iterencode(obj)
|
||||
else:
|
||||
if cls is None:
|
||||
cls = JSONEncoder
|
||||
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
||||
separators=separators, encoding=encoding,
|
||||
default=default, **kw).iterencode(obj)
|
||||
# could accelerate with writelines in some versions of Python, at
|
||||
# a debuggability cost
|
||||
for chunk in iterable:
|
||||
fp.write(chunk)
|
||||
|
||||
|
||||
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
|
||||
allow_nan=True, cls=None, indent=None, separators=None,
|
||||
encoding='utf-8', default=None, **kw):
|
||||
"""
|
||||
Serialize ``obj`` to a JSON formatted ``str``.
|
||||
|
||||
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
|
||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
||||
will be skipped instead of raising a ``TypeError``.
|
||||
|
||||
If ``ensure_ascii`` is ``False``, then the return value will be a
|
||||
``unicode`` instance subject to normal Python ``str`` to ``unicode``
|
||||
coercion rules instead of being escaped to an ASCII ``str``.
|
||||
|
||||
If ``check_circular`` is ``False``, then the circular reference check
|
||||
for container types will be skipped and a circular reference will
|
||||
result in an ``OverflowError`` (or worse).
|
||||
|
||||
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
|
||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
||||
strict compliance of the JSON specification, instead of using the
|
||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
||||
|
||||
If ``indent`` is a non-negative integer, then JSON array elements and
|
||||
object members will be pretty-printed with that indent level. An indent
|
||||
level of 0 will only insert newlines. ``None`` is the most compact
|
||||
representation.
|
||||
|
||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
||||
``(',', ':')`` is the most compact JSON representation.
|
||||
|
||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
||||
|
||||
``default(obj)`` is a function that should return a serializable version
|
||||
of obj or raise TypeError. The default simply raises TypeError.
|
||||
|
||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
||||
``.default()`` method to serialize additional types), specify it with
|
||||
the ``cls`` kwarg.
|
||||
"""
|
||||
# cached encoder
|
||||
if (skipkeys is False and ensure_ascii is True and
|
||||
check_circular is True and allow_nan is True and
|
||||
cls is None and indent is None and separators is None and
|
||||
encoding == 'utf-8' and default is None and not kw):
|
||||
return _default_encoder.encode(obj)
|
||||
if cls is None:
|
||||
cls = JSONEncoder
|
||||
return cls(
|
||||
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
||||
separators=separators, encoding=encoding, default=default,
|
||||
**kw).encode(obj)
|
||||
|
||||
|
||||
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
|
||||
|
||||
|
||||
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
|
||||
parse_int=None, parse_constant=None, **kw):
|
||||
"""
|
||||
Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
||||
a JSON document) to a Python object.
|
||||
|
||||
If the contents of ``fp`` is encoded with an ASCII based encoding other
|
||||
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
|
||||
be specified. Encodings that are not ASCII based (such as UCS-2) are
|
||||
not allowed, and should be wrapped with
|
||||
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
|
||||
object and passed to ``loads()``
|
||||
|
||||
``object_hook`` is an optional function that will be called with the
|
||||
result of any object literal decode (a ``dict``). The return value of
|
||||
``object_hook`` will be used instead of the ``dict``. This feature
|
||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
||||
|
||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
||||
kwarg.
|
||||
"""
|
||||
return loads(fp.read(),
|
||||
encoding=encoding, cls=cls, object_hook=object_hook,
|
||||
parse_float=parse_float, parse_int=parse_int,
|
||||
parse_constant=parse_constant, **kw)
|
||||
|
||||
|
||||
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
|
||||
parse_int=None, parse_constant=None, **kw):
|
||||
"""
|
||||
Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
||||
document) to a Python object.
|
||||
|
||||
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
|
||||
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
|
||||
must be specified. Encodings that are not ASCII based (such as UCS-2)
|
||||
are not allowed and should be decoded to ``unicode`` first.
|
||||
|
||||
``object_hook`` is an optional function that will be called with the
|
||||
result of any object literal decode (a ``dict``). The return value of
|
||||
``object_hook`` will be used instead of the ``dict``. This feature
|
||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
||||
|
||||
``parse_float``, if specified, will be called with the string
|
||||
of every JSON float to be decoded. By default this is equivalent to
|
||||
float(num_str). This can be used to use another datatype or parser
|
||||
for JSON floats (e.g. decimal.Decimal).
|
||||
|
||||
``parse_int``, if specified, will be called with the string
|
||||
of every JSON int to be decoded. By default this is equivalent to
|
||||
int(num_str). This can be used to use another datatype or parser
|
||||
for JSON integers (e.g. float).
|
||||
|
||||
``parse_constant``, if specified, will be called with one of the
|
||||
following strings: -Infinity, Infinity, NaN, null, true, false.
|
||||
This can be used to raise an exception if invalid JSON numbers
|
||||
are encountered.
|
||||
|
||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
||||
kwarg.
|
||||
"""
|
||||
if (cls is None and encoding is None and object_hook is None and
|
||||
parse_int is None and parse_float is None and
|
||||
parse_constant is None and not kw):
|
||||
return _default_decoder.decode(s)
|
||||
if cls is None:
|
||||
cls = JSONDecoder
|
||||
if object_hook is not None:
|
||||
kw['object_hook'] = object_hook
|
||||
if parse_float is not None:
|
||||
kw['parse_float'] = parse_float
|
||||
if parse_int is not None:
|
||||
kw['parse_int'] = parse_int
|
||||
if parse_constant is not None:
|
||||
kw['parse_constant'] = parse_constant
|
||||
return cls(encoding=encoding, **kw).decode(s)
|
||||
|
||||
|
||||
#
|
||||
# Compatibility cruft from other libraries
|
||||
#
|
||||
|
||||
|
||||
def decode(s):
|
||||
"""
|
||||
demjson, python-cjson API compatibility hook. Use loads(s) instead.
|
||||
"""
|
||||
import warnings
|
||||
warnings.warn("simplejson.loads(s) should be used instead of decode(s)",
|
||||
DeprecationWarning)
|
||||
return loads(s)
|
||||
|
||||
|
||||
def encode(obj):
|
||||
"""
|
||||
demjson, python-cjson compatibility hook. Use dumps(s) instead.
|
||||
"""
|
||||
import warnings
|
||||
warnings.warn("simplejson.dumps(s) should be used instead of encode(s)",
|
||||
DeprecationWarning)
|
||||
return dumps(obj)
|
||||
|
||||
|
||||
def read(s):
|
||||
"""
|
||||
jsonlib, JsonUtils, python-json, json-py API compatibility hook.
|
||||
Use loads(s) instead.
|
||||
"""
|
||||
import warnings
|
||||
warnings.warn("simplejson.loads(s) should be used instead of read(s)",
|
||||
DeprecationWarning)
|
||||
return loads(s)
|
||||
|
||||
|
||||
def write(obj):
|
||||
"""
|
||||
jsonlib, JsonUtils, python-json, json-py API compatibility hook.
|
||||
Use dumps(s) instead.
|
||||
"""
|
||||
import warnings
|
||||
warnings.warn("simplejson.dumps(s) should be used instead of write(s)",
|
||||
DeprecationWarning)
|
||||
return dumps(obj)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import simplejson.tool
|
||||
simplejson.tool.main()
|
|
@ -0,0 +1,343 @@
|
|||
"""
|
||||
Implementation of JSONDecoder
|
||||
"""
|
||||
import re
|
||||
import sys
|
||||
|
||||
from simplejson.scanner import Scanner, pattern
|
||||
try:
|
||||
from simplejson._speedups import scanstring as c_scanstring
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
||||
|
||||
def _floatconstants():
|
||||
import struct
|
||||
import sys
|
||||
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
|
||||
if sys.byteorder != 'big':
|
||||
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
|
||||
nan, inf = struct.unpack('dd', _BYTES)
|
||||
return nan, inf, -inf
|
||||
|
||||
NaN, PosInf, NegInf = _floatconstants()
|
||||
|
||||
|
||||
def linecol(doc, pos):
|
||||
lineno = doc.count('\n', 0, pos) + 1
|
||||
if lineno == 1:
|
||||
colno = pos
|
||||
else:
|
||||
colno = pos - doc.rindex('\n', 0, pos)
|
||||
return lineno, colno
|
||||
|
||||
|
||||
def errmsg(msg, doc, pos, end=None):
|
||||
lineno, colno = linecol(doc, pos)
|
||||
if end is None:
|
||||
return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
|
||||
endlineno, endcolno = linecol(doc, end)
|
||||
return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
|
||||
msg, lineno, colno, endlineno, endcolno, pos, end)
|
||||
|
||||
|
||||
_CONSTANTS = {
|
||||
'-Infinity': NegInf,
|
||||
'Infinity': PosInf,
|
||||
'NaN': NaN,
|
||||
'true': True,
|
||||
'false': False,
|
||||
'null': None,
|
||||
}
|
||||
|
||||
def JSONConstant(match, context, c=_CONSTANTS):
|
||||
s = match.group(0)
|
||||
fn = getattr(context, 'parse_constant', None)
|
||||
if fn is None:
|
||||
rval = c[s]
|
||||
else:
|
||||
rval = fn(s)
|
||||
return rval, None
|
||||
pattern('(-?Infinity|NaN|true|false|null)')(JSONConstant)
|
||||
|
||||
|
||||
def JSONNumber(match, context):
|
||||
match = JSONNumber.regex.match(match.string, *match.span())
|
||||
integer, frac, exp = match.groups()
|
||||
if frac or exp:
|
||||
fn = getattr(context, 'parse_float', None) or float
|
||||
res = fn(integer + (frac or '') + (exp or ''))
|
||||
else:
|
||||
fn = getattr(context, 'parse_int', None) or int
|
||||
res = fn(integer)
|
||||
return res, None
|
||||
pattern(r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?')(JSONNumber)
|
||||
|
||||
|
||||
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
|
||||
BACKSLASH = {
|
||||
'"': u'"', '\\': u'\\', '/': u'/',
|
||||
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
|
||||
}
|
||||
|
||||
DEFAULT_ENCODING = "utf-8"
|
||||
|
||||
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
|
||||
if encoding is None:
|
||||
encoding = DEFAULT_ENCODING
|
||||
chunks = []
|
||||
_append = chunks.append
|
||||
begin = end - 1
|
||||
while 1:
|
||||
chunk = _m(s, end)
|
||||
if chunk is None:
|
||||
raise ValueError(
|
||||
errmsg("Unterminated string starting at", s, begin))
|
||||
end = chunk.end()
|
||||
content, terminator = chunk.groups()
|
||||
if content:
|
||||
if not isinstance(content, unicode):
|
||||
content = unicode(content, encoding)
|
||||
_append(content)
|
||||
if terminator == '"':
|
||||
break
|
||||
elif terminator != '\\':
|
||||
if strict:
|
||||
raise ValueError(errmsg("Invalid control character %r at", s, end))
|
||||
else:
|
||||
_append(terminator)
|
||||
continue
|
||||
try:
|
||||
esc = s[end]
|
||||
except IndexError:
|
||||
raise ValueError(
|
||||
errmsg("Unterminated string starting at", s, begin))
|
||||
if esc != 'u':
|
||||
try:
|
||||
m = _b[esc]
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
errmsg("Invalid \\escape: %r" % (esc,), s, end))
|
||||
end += 1
|
||||
else:
|
||||
esc = s[end + 1:end + 5]
|
||||
next_end = end + 5
|
||||
msg = "Invalid \\uXXXX escape"
|
||||
try:
|
||||
if len(esc) != 4:
|
||||
raise ValueError
|
||||
uni = int(esc, 16)
|
||||
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
|
||||
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
|
||||
if not s[end + 5:end + 7] == '\\u':
|
||||
raise ValueError
|
||||
esc2 = s[end + 7:end + 11]
|
||||
if len(esc2) != 4:
|
||||
raise ValueError
|
||||
uni2 = int(esc2, 16)
|
||||
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
|
||||
next_end += 6
|
||||
m = unichr(uni)
|
||||
except ValueError:
|
||||
raise ValueError(errmsg(msg, s, end))
|
||||
end = next_end
|
||||
_append(m)
|
||||
return u''.join(chunks), end
|
||||
|
||||
|
||||
# Use speedup
|
||||
try:
|
||||
scanstring = c_scanstring
|
||||
except NameError:
|
||||
scanstring = py_scanstring
|
||||
|
||||
def JSONString(match, context):
|
||||
encoding = getattr(context, 'encoding', None)
|
||||
strict = getattr(context, 'strict', True)
|
||||
return scanstring(match.string, match.end(), encoding, strict)
|
||||
pattern(r'"')(JSONString)
|
||||
|
||||
|
||||
WHITESPACE = re.compile(r'\s*', FLAGS)
|
||||
|
||||
def JSONObject(match, context, _w=WHITESPACE.match):
|
||||
pairs = {}
|
||||
s = match.string
|
||||
end = _w(s, match.end()).end()
|
||||
nextchar = s[end:end + 1]
|
||||
# Trivial empty object
|
||||
if nextchar == '}':
|
||||
return pairs, end + 1
|
||||
if nextchar != '"':
|
||||
raise ValueError(errmsg("Expecting property name", s, end))
|
||||
end += 1
|
||||
encoding = getattr(context, 'encoding', None)
|
||||
strict = getattr(context, 'strict', True)
|
||||
iterscan = JSONScanner.iterscan
|
||||
while True:
|
||||
key, end = scanstring(s, end, encoding, strict)
|
||||
end = _w(s, end).end()
|
||||
if s[end:end + 1] != ':':
|
||||
raise ValueError(errmsg("Expecting : delimiter", s, end))
|
||||
end = _w(s, end + 1).end()
|
||||
try:
|
||||
value, end = iterscan(s, idx=end, context=context).next()
|
||||
except StopIteration:
|
||||
raise ValueError(errmsg("Expecting object", s, end))
|
||||
pairs[key] = value
|
||||
end = _w(s, end).end()
|
||||
nextchar = s[end:end + 1]
|
||||
end += 1
|
||||
if nextchar == '}':
|
||||
break
|
||||
if nextchar != ',':
|
||||
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
|
||||
end = _w(s, end).end()
|
||||
nextchar = s[end:end + 1]
|
||||
end += 1
|
||||
if nextchar != '"':
|
||||
raise ValueError(errmsg("Expecting property name", s, end - 1))
|
||||
object_hook = getattr(context, 'object_hook', None)
|
||||
if object_hook is not None:
|
||||
pairs = object_hook(pairs)
|
||||
return pairs, end
|
||||
pattern(r'{')(JSONObject)
|
||||
|
||||
|
||||
def JSONArray(match, context, _w=WHITESPACE.match):
|
||||
values = []
|
||||
s = match.string
|
||||
end = _w(s, match.end()).end()
|
||||
# Look-ahead for trivial empty array
|
||||
nextchar = s[end:end + 1]
|
||||
if nextchar == ']':
|
||||
return values, end + 1
|
||||
iterscan = JSONScanner.iterscan
|
||||
while True:
|
||||
try:
|
||||
value, end = iterscan(s, idx=end, context=context).next()
|
||||
except StopIteration:
|
||||
raise ValueError(errmsg("Expecting object", s, end))
|
||||
values.append(value)
|
||||
end = _w(s, end).end()
|
||||
nextchar = s[end:end + 1]
|
||||
end += 1
|
||||
if nextchar == ']':
|
||||
break
|
||||
if nextchar != ',':
|
||||
raise ValueError(errmsg("Expecting , delimiter", s, end))
|
||||
end = _w(s, end).end()
|
||||
return values, end
|
||||
pattern(r'\[')(JSONArray)
|
||||
|
||||
|
||||
ANYTHING = [
|
||||
JSONObject,
|
||||
JSONArray,
|
||||
JSONString,
|
||||
JSONConstant,
|
||||
JSONNumber,
|
||||
]
|
||||
|
||||
JSONScanner = Scanner(ANYTHING)
|
||||
|
||||
|
||||
class JSONDecoder(object):
|
||||
"""
|
||||
Simple JSON <http://json.org> decoder
|
||||
|
||||
Performs the following translations in decoding by default:
|
||||
|
||||
+---------------+-------------------+
|
||||
| JSON | Python |
|
||||
+===============+===================+
|
||||
| object | dict |
|
||||
+---------------+-------------------+
|
||||
| array | list |
|
||||
+---------------+-------------------+
|
||||
| string | unicode |
|
||||
+---------------+-------------------+
|
||||
| number (int) | int, long |
|
||||
+---------------+-------------------+
|
||||
| number (real) | float |
|
||||
+---------------+-------------------+
|
||||
| true | True |
|
||||
+---------------+-------------------+
|
||||
| false | False |
|
||||
+---------------+-------------------+
|
||||
| null | None |
|
||||
+---------------+-------------------+
|
||||
|
||||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
|
||||
their corresponding ``float`` values, which is outside the JSON spec.
|
||||
"""
|
||||
|
||||
_scanner = Scanner(ANYTHING)
|
||||
__all__ = ['__init__', 'decode', 'raw_decode']
|
||||
|
||||
def __init__(self, encoding=None, object_hook=None, parse_float=None,
|
||||
parse_int=None, parse_constant=None, strict=True):
|
||||
"""
|
||||
``encoding`` determines the encoding used to interpret any ``str``
|
||||
objects decoded by this instance (utf-8 by default). It has no
|
||||
effect when decoding ``unicode`` objects.
|
||||
|
||||
Note that currently only encodings that are a superset of ASCII work,
|
||||
strings of other encodings should be passed in as ``unicode``.
|
||||
|
||||
``object_hook``, if specified, will be called with the result
|
||||
of every JSON object decoded and its return value will be used in
|
||||
place of the given ``dict``. This can be used to provide custom
|
||||
deserializations (e.g. to support JSON-RPC class hinting).
|
||||
|
||||
``parse_float``, if specified, will be called with the string
|
||||
of every JSON float to be decoded. By default this is equivalent to
|
||||
float(num_str). This can be used to use another datatype or parser
|
||||
for JSON floats (e.g. decimal.Decimal).
|
||||
|
||||
``parse_int``, if specified, will be called with the string
|
||||
of every JSON int to be decoded. By default this is equivalent to
|
||||
int(num_str). This can be used to use another datatype or parser
|
||||
for JSON integers (e.g. float).
|
||||
|
||||
``parse_constant``, if specified, will be called with one of the
|
||||
following strings: -Infinity, Infinity, NaN, null, true, false.
|
||||
This can be used to raise an exception if invalid JSON numbers
|
||||
are encountered.
|
||||
"""
|
||||
self.encoding = encoding
|
||||
self.object_hook = object_hook
|
||||
self.parse_float = parse_float
|
||||
self.parse_int = parse_int
|
||||
self.parse_constant = parse_constant
|
||||
self.strict = strict
|
||||
|
||||
def decode(self, s, _w=WHITESPACE.match):
|
||||
"""
|
||||
Return the Python representation of ``s`` (a ``str`` or ``unicode``
|
||||
instance containing a JSON document)
|
||||
"""
|
||||
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
|
||||
end = _w(s, end).end()
|
||||
if end != len(s):
|
||||
raise ValueError(errmsg("Extra data", s, end, len(s)))
|
||||
return obj
|
||||
|
||||
def raw_decode(self, s, **kw):
|
||||
"""
|
||||
Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
|
||||
with a JSON document) and return a 2-tuple of the Python
|
||||
representation and the index in ``s`` where the document ended.
|
||||
|
||||
This can be used to decode a JSON document from a string that may
|
||||
have extraneous data at the end.
|
||||
"""
|
||||
kw.setdefault('context', self)
|
||||
try:
|
||||
obj, end = self._scanner.iterscan(s, **kw).next()
|
||||
except StopIteration:
|
||||
raise ValueError("No JSON object could be decoded")
|
||||
return obj, end
|
||||
|
||||
__all__ = ['JSONDecoder']
|
|
@ -0,0 +1,385 @@
|
|||
"""
|
||||
Implementation of JSONEncoder
|
||||
"""
|
||||
import re
|
||||
|
||||
try:
|
||||
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
|
||||
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
||||
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
||||
ESCAPE_DCT = {
|
||||
'\\': '\\\\',
|
||||
'"': '\\"',
|
||||
'\b': '\\b',
|
||||
'\f': '\\f',
|
||||
'\n': '\\n',
|
||||
'\r': '\\r',
|
||||
'\t': '\\t',
|
||||
}
|
||||
for i in range(0x20):
|
||||
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
||||
|
||||
# Assume this produces an infinity on all machines (probably not guaranteed)
|
||||
INFINITY = float('1e66666')
|
||||
FLOAT_REPR = repr
|
||||
|
||||
def floatstr(o, allow_nan=True):
|
||||
# Check for specials. Note that this type of test is processor- and/or
|
||||
# platform-specific, so do tests which don't depend on the internals.
|
||||
|
||||
if o != o:
|
||||
text = 'NaN'
|
||||
elif o == INFINITY:
|
||||
text = 'Infinity'
|
||||
elif o == -INFINITY:
|
||||
text = '-Infinity'
|
||||
else:
|
||||
return FLOAT_REPR(o)
|
||||
|
||||
if not allow_nan:
|
||||
raise ValueError("Out of range float values are not JSON compliant: %r"
|
||||
% (o,))
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def encode_basestring(s):
|
||||
"""
|
||||
Return a JSON representation of a Python string
|
||||
"""
|
||||
def replace(match):
|
||||
return ESCAPE_DCT[match.group(0)]
|
||||
return '"' + ESCAPE.sub(replace, s) + '"'
|
||||
|
||||
|
||||
def py_encode_basestring_ascii(s):
|
||||
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
||||
s = s.decode('utf-8')
|
||||
def replace(match):
|
||||
s = match.group(0)
|
||||
try:
|
||||
return ESCAPE_DCT[s]
|
||||
except KeyError:
|
||||
n = ord(s)
|
||||
if n < 0x10000:
|
||||
return '\\u%04x' % (n,)
|
||||
else:
|
||||
# surrogate pair
|
||||
n -= 0x10000
|
||||
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
||||
s2 = 0xdc00 | (n & 0x3ff)
|
||||
return '\\u%04x\\u%04x' % (s1, s2)
|
||||
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
||||
|
||||
|
||||
try:
|
||||
encode_basestring_ascii = c_encode_basestring_ascii
|
||||
except NameError:
|
||||
encode_basestring_ascii = py_encode_basestring_ascii
|
||||
|
||||
|
||||
class JSONEncoder(object):
|
||||
"""
|
||||
Extensible JSON <http://json.org> encoder for Python data structures.
|
||||
|
||||
Supports the following objects and types by default:
|
||||
|
||||
+-------------------+---------------+
|
||||
| Python | JSON |
|
||||
+===================+===============+
|
||||
| dict | object |
|
||||
+-------------------+---------------+
|
||||
| list, tuple | array |
|
||||
+-------------------+---------------+
|
||||
| str, unicode | string |
|
||||
+-------------------+---------------+
|
||||
| int, long, float | number |
|
||||
+-------------------+---------------+
|
||||
| True | true |
|
||||
+-------------------+---------------+
|
||||
| False | false |
|
||||
+-------------------+---------------+
|
||||
| None | null |
|
||||
+-------------------+---------------+
|
||||
|
||||
To extend this to recognize other objects, subclass and implement a
|
||||
``.default()`` method with another method that returns a serializable
|
||||
object for ``o`` if possible, otherwise it should call the superclass
|
||||
implementation (to raise ``TypeError``).
|
||||
"""
|
||||
__all__ = ['__init__', 'default', 'encode', 'iterencode']
|
||||
item_separator = ', '
|
||||
key_separator = ': '
|
||||
def __init__(self, skipkeys=False, ensure_ascii=True,
|
||||
check_circular=True, allow_nan=True, sort_keys=False,
|
||||
indent=None, separators=None, encoding='utf-8', default=None):
|
||||
"""
|
||||
Constructor for JSONEncoder, with sensible defaults.
|
||||
|
||||
If skipkeys is False, then it is a TypeError to attempt
|
||||
encoding of keys that are not str, int, long, float or None. If
|
||||
skipkeys is True, such items are simply skipped.
|
||||
|
||||
If ensure_ascii is True, the output is guaranteed to be str
|
||||
objects with all incoming unicode characters escaped. If
|
||||
ensure_ascii is false, the output will be unicode object.
|
||||
|
||||
If check_circular is True, then lists, dicts, and custom encoded
|
||||
objects will be checked for circular references during encoding to
|
||||
prevent an infinite recursion (which would cause an OverflowError).
|
||||
Otherwise, no such check takes place.
|
||||
|
||||
If allow_nan is True, then NaN, Infinity, and -Infinity will be
|
||||
encoded as such. This behavior is not JSON specification compliant,
|
||||
but is consistent with most JavaScript based encoders and decoders.
|
||||
Otherwise, it will be a ValueError to encode such floats.
|
||||
|
||||
If sort_keys is True, then the output of dictionaries will be
|
||||
sorted by key; this is useful for regression tests to ensure
|
||||
that JSON serializations can be compared on a day-to-day basis.
|
||||
|
||||
If indent is a non-negative integer, then JSON array
|
||||
elements and object members will be pretty-printed with that
|
||||
indent level. An indent level of 0 will only insert newlines.
|
||||
None is the most compact representation.
|
||||
|
||||
If specified, separators should be a (item_separator, key_separator)
|
||||
tuple. The default is (', ', ': '). To get the most compact JSON
|
||||
representation you should specify (',', ':') to eliminate whitespace.
|
||||
|
||||
If specified, default is a function that gets called for objects
|
||||
that can't otherwise be serialized. It should return a JSON encodable
|
||||
version of the object or raise a ``TypeError``.
|
||||
|
||||
If encoding is not None, then all input strings will be
|
||||
transformed into unicode using that encoding prior to JSON-encoding.
|
||||
The default is UTF-8.
|
||||
"""
|
||||
|
||||
self.skipkeys = skipkeys
|
||||
self.ensure_ascii = ensure_ascii
|
||||
self.check_circular = check_circular
|
||||
self.allow_nan = allow_nan
|
||||
self.sort_keys = sort_keys
|
||||
self.indent = indent
|
||||
self.current_indent_level = 0
|
||||
if separators is not None:
|
||||
self.item_separator, self.key_separator = separators
|
||||
if default is not None:
|
||||
self.default = default
|
||||
self.encoding = encoding
|
||||
|
||||
def _newline_indent(self):
|
||||
return '\n' + (' ' * (self.indent * self.current_indent_level))
|
||||
|
||||
def _iterencode_list(self, lst, markers=None):
|
||||
if not lst:
|
||||
yield '[]'
|
||||
return
|
||||
if markers is not None:
|
||||
markerid = id(lst)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = lst
|
||||
yield '['
|
||||
if self.indent is not None:
|
||||
self.current_indent_level += 1
|
||||
newline_indent = self._newline_indent()
|
||||
separator = self.item_separator + newline_indent
|
||||
yield newline_indent
|
||||
else:
|
||||
newline_indent = None
|
||||
separator = self.item_separator
|
||||
first = True
|
||||
for value in lst:
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
yield separator
|
||||
for chunk in self._iterencode(value, markers):
|
||||
yield chunk
|
||||
if newline_indent is not None:
|
||||
self.current_indent_level -= 1
|
||||
yield self._newline_indent()
|
||||
yield ']'
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
def _iterencode_dict(self, dct, markers=None):
|
||||
if not dct:
|
||||
yield '{}'
|
||||
return
|
||||
if markers is not None:
|
||||
markerid = id(dct)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = dct
|
||||
yield '{'
|
||||
key_separator = self.key_separator
|
||||
if self.indent is not None:
|
||||
self.current_indent_level += 1
|
||||
newline_indent = self._newline_indent()
|
||||
item_separator = self.item_separator + newline_indent
|
||||
yield newline_indent
|
||||
else:
|
||||
newline_indent = None
|
||||
item_separator = self.item_separator
|
||||
first = True
|
||||
if self.ensure_ascii:
|
||||
encoder = encode_basestring_ascii
|
||||
else:
|
||||
encoder = encode_basestring
|
||||
allow_nan = self.allow_nan
|
||||
if self.sort_keys:
|
||||
keys = dct.keys()
|
||||
keys.sort()
|
||||
items = [(k, dct[k]) for k in keys]
|
||||
else:
|
||||
items = dct.iteritems()
|
||||
_encoding = self.encoding
|
||||
_do_decode = (_encoding is not None
|
||||
and not (_encoding == 'utf-8'))
|
||||
for key, value in items:
|
||||
if isinstance(key, str):
|
||||
if _do_decode:
|
||||
key = key.decode(_encoding)
|
||||
elif isinstance(key, basestring):
|
||||
pass
|
||||
# JavaScript is weakly typed for these, so it makes sense to
|
||||
# also allow them. Many encoders seem to do something like this.
|
||||
elif isinstance(key, float):
|
||||
key = floatstr(key, allow_nan)
|
||||
elif isinstance(key, (int, long)):
|
||||
key = str(key)
|
||||
elif key is True:
|
||||
key = 'true'
|
||||
elif key is False:
|
||||
key = 'false'
|
||||
elif key is None:
|
||||
key = 'null'
|
||||
elif self.skipkeys:
|
||||
continue
|
||||
else:
|
||||
raise TypeError("key %r is not a string" % (key,))
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
yield item_separator
|
||||
yield encoder(key)
|
||||
yield key_separator
|
||||
for chunk in self._iterencode(value, markers):
|
||||
yield chunk
|
||||
if newline_indent is not None:
|
||||
self.current_indent_level -= 1
|
||||
yield self._newline_indent()
|
||||
yield '}'
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
def _iterencode(self, o, markers=None):
|
||||
if isinstance(o, basestring):
|
||||
if self.ensure_ascii:
|
||||
encoder = encode_basestring_ascii
|
||||
else:
|
||||
encoder = encode_basestring
|
||||
_encoding = self.encoding
|
||||
if (_encoding is not None and isinstance(o, str)
|
||||
and not (_encoding == 'utf-8')):
|
||||
o = o.decode(_encoding)
|
||||
yield encoder(o)
|
||||
elif o is None:
|
||||
yield 'null'
|
||||
elif o is True:
|
||||
yield 'true'
|
||||
elif o is False:
|
||||
yield 'false'
|
||||
elif isinstance(o, (int, long)):
|
||||
yield str(o)
|
||||
elif isinstance(o, float):
|
||||
yield floatstr(o, self.allow_nan)
|
||||
elif isinstance(o, (list, tuple)):
|
||||
for chunk in self._iterencode_list(o, markers):
|
||||
yield chunk
|
||||
elif isinstance(o, dict):
|
||||
for chunk in self._iterencode_dict(o, markers):
|
||||
yield chunk
|
||||
else:
|
||||
if markers is not None:
|
||||
markerid = id(o)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = o
|
||||
for chunk in self._iterencode_default(o, markers):
|
||||
yield chunk
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
def _iterencode_default(self, o, markers=None):
|
||||
newobj = self.default(o)
|
||||
return self._iterencode(newobj, markers)
|
||||
|
||||
def default(self, o):
|
||||
"""
|
||||
Implement this method in a subclass such that it returns
|
||||
a serializable object for ``o``, or calls the base implementation
|
||||
(to raise a ``TypeError``).
|
||||
|
||||
For example, to support arbitrary iterators, you could
|
||||
implement default like this::
|
||||
|
||||
def default(self, o):
|
||||
try:
|
||||
iterable = iter(o)
|
||||
except TypeError:
|
||||
pass
|
||||
else:
|
||||
return list(iterable)
|
||||
return JSONEncoder.default(self, o)
|
||||
"""
|
||||
raise TypeError("%r is not JSON serializable" % (o,))
|
||||
|
||||
def encode(self, o):
|
||||
"""
|
||||
Return a JSON string representation of a Python data structure.
|
||||
|
||||
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
||||
'{"foo": ["bar", "baz"]}'
|
||||
"""
|
||||
# This is for extremely simple cases and benchmarks.
|
||||
if isinstance(o, basestring):
|
||||
if isinstance(o, str):
|
||||
_encoding = self.encoding
|
||||
if (_encoding is not None
|
||||
and not (_encoding == 'utf-8')):
|
||||
o = o.decode(_encoding)
|
||||
if self.ensure_ascii:
|
||||
return encode_basestring_ascii(o)
|
||||
else:
|
||||
return encode_basestring(o)
|
||||
# This doesn't pass the iterator directly to ''.join() because the
|
||||
# exceptions aren't as detailed. The list call should be roughly
|
||||
# equivalent to the PySequence_Fast that ''.join() would do.
|
||||
chunks = list(self.iterencode(o))
|
||||
return ''.join(chunks)
|
||||
|
||||
def iterencode(self, o):
|
||||
"""
|
||||
Encode the given object and yield each string
|
||||
representation as available.
|
||||
|
||||
For example::
|
||||
|
||||
for chunk in JSONEncoder().iterencode(bigobject):
|
||||
mysocket.write(chunk)
|
||||
"""
|
||||
if self.check_circular:
|
||||
markers = {}
|
||||
else:
|
||||
markers = None
|
||||
return self._iterencode(o, markers)
|
||||
|
||||
__all__ = ['JSONEncoder']
|
|
@ -0,0 +1,67 @@
|
|||
"""
|
||||
Iterator based sre token scanner
|
||||
"""
|
||||
import re
|
||||
from re import VERBOSE, MULTILINE, DOTALL
|
||||
import sre_parse
|
||||
import sre_compile
|
||||
import sre_constants
|
||||
from sre_constants import BRANCH, SUBPATTERN
|
||||
|
||||
__all__ = ['Scanner', 'pattern']
|
||||
|
||||
FLAGS = (VERBOSE | MULTILINE | DOTALL)
|
||||
|
||||
class Scanner(object):
|
||||
def __init__(self, lexicon, flags=FLAGS):
|
||||
self.actions = [None]
|
||||
# Combine phrases into a compound pattern
|
||||
s = sre_parse.Pattern()
|
||||
s.flags = flags
|
||||
p = []
|
||||
for idx, token in enumerate(lexicon):
|
||||
phrase = token.pattern
|
||||
try:
|
||||
subpattern = sre_parse.SubPattern(s,
|
||||
[(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))])
|
||||
except sre_constants.error:
|
||||
raise
|
||||
p.append(subpattern)
|
||||
self.actions.append(token)
|
||||
|
||||
s.groups = len(p) + 1 # NOTE(guido): Added to make SRE validation work
|
||||
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
|
||||
self.scanner = sre_compile.compile(p)
|
||||
|
||||
def iterscan(self, string, idx=0, context=None):
|
||||
"""
|
||||
Yield match, end_idx for each match
|
||||
"""
|
||||
match = self.scanner.scanner(string, idx).match
|
||||
actions = self.actions
|
||||
lastend = idx
|
||||
end = len(string)
|
||||
while True:
|
||||
m = match()
|
||||
if m is None:
|
||||
break
|
||||
matchbegin, matchend = m.span()
|
||||
if lastend == matchend:
|
||||
break
|
||||
action = actions[m.lastindex]
|
||||
if action is not None:
|
||||
rval, next_pos = action(m, context)
|
||||
if next_pos is not None and next_pos != matchend:
|
||||
# "fast forward" the scanner
|
||||
matchend = next_pos
|
||||
match = self.scanner.scanner(string, matchend).match
|
||||
yield rval, matchend
|
||||
lastend = matchend
|
||||
|
||||
|
||||
def pattern(pattern, flags=FLAGS):
|
||||
def decorator(fn):
|
||||
fn.pattern = pattern
|
||||
fn.regex = re.compile(pattern, flags)
|
||||
return fn
|
||||
return decorator
|
|
@ -0,0 +1,22 @@
|
|||
import unittest
|
||||
import doctest
|
||||
|
||||
def additional_tests():
|
||||
import simplejson
|
||||
import simplejson.encoder
|
||||
import simplejson.decoder
|
||||
suite = unittest.TestSuite()
|
||||
for mod in (simplejson, simplejson.encoder, simplejson.decoder):
|
||||
suite.addTest(doctest.DocTestSuite(mod))
|
||||
return suite
|
||||
|
||||
def main():
|
||||
suite = additional_tests()
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
main()
|
|
@ -0,0 +1,15 @@
|
|||
import decimal
|
||||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
class TestDecode(TestCase):
|
||||
def test_decimal(self):
|
||||
rval = S.loads('1.1', parse_float=decimal.Decimal)
|
||||
self.assert_(isinstance(rval, decimal.Decimal))
|
||||
self.assertEquals(rval, decimal.Decimal('1.1'))
|
||||
|
||||
def test_float(self):
|
||||
rval = S.loads('1', parse_int=float)
|
||||
self.assert_(isinstance(rval, float))
|
||||
self.assertEquals(rval, 1.0)
|
|
@ -0,0 +1,9 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
class TestDefault(TestCase):
|
||||
def test_default(self):
|
||||
self.assertEquals(
|
||||
S.dumps(type, default=repr),
|
||||
S.dumps(repr(type)))
|
|
@ -0,0 +1,13 @@
|
|||
from unittest import TestCase
|
||||
from cStringIO import StringIO
|
||||
|
||||
import simplejson as S
|
||||
|
||||
class TestDump(TestCase):
|
||||
def test_dump(self):
|
||||
sio = StringIO()
|
||||
S.dump({}, sio)
|
||||
self.assertEquals(sio.getvalue(), '{}')
|
||||
|
||||
def test_dumps(self):
|
||||
self.assertEquals(S.dumps({}), '{}')
|
|
@ -0,0 +1,36 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson.encoder
|
||||
|
||||
CASES = [
|
||||
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
|
||||
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
||||
(u'controls', '"controls"'),
|
||||
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
||||
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
|
||||
(u' s p a c e d ', '" s p a c e d "'),
|
||||
(u'\U0001d120', '"\\ud834\\udd20"'),
|
||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
||||
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
|
||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
||||
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
|
||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
||||
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
|
||||
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
||||
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
||||
]
|
||||
|
||||
class TestEncodeBaseStringAscii(TestCase):
|
||||
def test_py_encode_basestring_ascii(self):
|
||||
self._test_encode_basestring_ascii(simplejson.encoder.py_encode_basestring_ascii)
|
||||
|
||||
def test_c_encode_basestring_ascii(self):
|
||||
self._test_encode_basestring_ascii(simplejson.encoder.c_encode_basestring_ascii)
|
||||
|
||||
def _test_encode_basestring_ascii(self, encode_basestring_ascii):
|
||||
fname = encode_basestring_ascii.__name__
|
||||
for input_string, expect in CASES:
|
||||
result = encode_basestring_ascii(input_string)
|
||||
self.assertEquals(result, expect,
|
||||
'%r != %r for %s(%r)' % (result, expect, fname, input_string))
|
|
@ -0,0 +1,76 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
# Fri Dec 30 18:57:26 2005
|
||||
JSONDOCS = [
|
||||
# http://json.org/JSON_checker/test/fail1.json
|
||||
'"A JSON payload should be an object or array, not a string."',
|
||||
# http://json.org/JSON_checker/test/fail2.json
|
||||
'["Unclosed array"',
|
||||
# http://json.org/JSON_checker/test/fail3.json
|
||||
'{unquoted_key: "keys must be quoted}',
|
||||
# http://json.org/JSON_checker/test/fail4.json
|
||||
'["extra comma",]',
|
||||
# http://json.org/JSON_checker/test/fail5.json
|
||||
'["double extra comma",,]',
|
||||
# http://json.org/JSON_checker/test/fail6.json
|
||||
'[ , "<-- missing value"]',
|
||||
# http://json.org/JSON_checker/test/fail7.json
|
||||
'["Comma after the close"],',
|
||||
# http://json.org/JSON_checker/test/fail8.json
|
||||
'["Extra close"]]',
|
||||
# http://json.org/JSON_checker/test/fail9.json
|
||||
'{"Extra comma": true,}',
|
||||
# http://json.org/JSON_checker/test/fail10.json
|
||||
'{"Extra value after close": true} "misplaced quoted value"',
|
||||
# http://json.org/JSON_checker/test/fail11.json
|
||||
'{"Illegal expression": 1 + 2}',
|
||||
# http://json.org/JSON_checker/test/fail12.json
|
||||
'{"Illegal invocation": alert()}',
|
||||
# http://json.org/JSON_checker/test/fail13.json
|
||||
'{"Numbers cannot have leading zeroes": 013}',
|
||||
# http://json.org/JSON_checker/test/fail14.json
|
||||
'{"Numbers cannot be hex": 0x14}',
|
||||
# http://json.org/JSON_checker/test/fail15.json
|
||||
'["Illegal backslash escape: \\x15"]',
|
||||
# http://json.org/JSON_checker/test/fail16.json
|
||||
'["Illegal backslash escape: \\\'"]',
|
||||
# http://json.org/JSON_checker/test/fail17.json
|
||||
'["Illegal backslash escape: \\017"]',
|
||||
# http://json.org/JSON_checker/test/fail18.json
|
||||
'[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
|
||||
# http://json.org/JSON_checker/test/fail19.json
|
||||
'{"Missing colon" null}',
|
||||
# http://json.org/JSON_checker/test/fail20.json
|
||||
'{"Double colon":: null}',
|
||||
# http://json.org/JSON_checker/test/fail21.json
|
||||
'{"Comma instead of colon", null}',
|
||||
# http://json.org/JSON_checker/test/fail22.json
|
||||
'["Colon instead of comma": false]',
|
||||
# http://json.org/JSON_checker/test/fail23.json
|
||||
'["Bad value", truth]',
|
||||
# http://json.org/JSON_checker/test/fail24.json
|
||||
"['single quote']",
|
||||
# http://code.google.com/p/simplejson/issues/detail?id=3
|
||||
u'["A\u001FZ control characters in string"]',
|
||||
]
|
||||
|
||||
SKIPS = {
|
||||
1: "why not have a string payload?",
|
||||
18: "spec doesn't specify any nesting limitations",
|
||||
}
|
||||
|
||||
class TestFail(TestCase):
|
||||
def test_failures(self):
|
||||
for idx, doc in enumerate(JSONDOCS):
|
||||
idx = idx + 1
|
||||
if idx in SKIPS:
|
||||
S.loads(doc)
|
||||
continue
|
||||
try:
|
||||
S.loads(doc)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
|
|
@ -0,0 +1,9 @@
|
|||
import math
|
||||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
class TestFloat(TestCase):
|
||||
def test_floats(self):
|
||||
for num in [1617161771.7650001, math.pi, math.pi**100, math.pi**-100]:
|
||||
self.assertEquals(float(S.dumps(num)), num)
|
|
@ -0,0 +1,41 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
import textwrap
|
||||
|
||||
class TestIndent(TestCase):
|
||||
def test_indent(self):
|
||||
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
|
||||
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
|
||||
|
||||
expect = textwrap.dedent("""\
|
||||
[
|
||||
[
|
||||
"blorpie"
|
||||
],
|
||||
[
|
||||
"whoops"
|
||||
],
|
||||
[],
|
||||
"d-shtaeou",
|
||||
"d-nthiouh",
|
||||
"i-vhbjkhnth",
|
||||
{
|
||||
"nifty": 87
|
||||
},
|
||||
{
|
||||
"field": "yes",
|
||||
"morefield": false
|
||||
}
|
||||
]""")
|
||||
|
||||
|
||||
d1 = S.dumps(h)
|
||||
d2 = S.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
|
||||
|
||||
h1 = S.loads(d1)
|
||||
h2 = S.loads(d2)
|
||||
|
||||
self.assertEquals(h1, h)
|
||||
self.assertEquals(h2, h)
|
||||
self.assertEquals(d2, expect)
|
|
@ -0,0 +1,76 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
# from http://json.org/JSON_checker/test/pass1.json
|
||||
JSON = r'''
|
||||
[
|
||||
"JSON Test Pattern pass1",
|
||||
{"object with 1 member":["array with 1 element"]},
|
||||
{},
|
||||
[],
|
||||
-42,
|
||||
true,
|
||||
false,
|
||||
null,
|
||||
{
|
||||
"integer": 1234567890,
|
||||
"real": -9876.543210,
|
||||
"e": 0.123456789e-12,
|
||||
"E": 1.234567890E+34,
|
||||
"": 23456789012E666,
|
||||
"zero": 0,
|
||||
"one": 1,
|
||||
"space": " ",
|
||||
"quote": "\"",
|
||||
"backslash": "\\",
|
||||
"controls": "\b\f\n\r\t",
|
||||
"slash": "/ & \/",
|
||||
"alpha": "abcdefghijklmnopqrstuvwyz",
|
||||
"ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ",
|
||||
"digit": "0123456789",
|
||||
"special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?",
|
||||
"hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A",
|
||||
"true": true,
|
||||
"false": false,
|
||||
"null": null,
|
||||
"array":[ ],
|
||||
"object":{ },
|
||||
"address": "50 St. James Street",
|
||||
"url": "http://www.JSON.org/",
|
||||
"comment": "// /* <!-- --",
|
||||
"# -- --> */": " ",
|
||||
" s p a c e d " :[1,2 , 3
|
||||
|
||||
,
|
||||
|
||||
4 , 5 , 6 ,7 ],
|
||||
"compact": [1,2,3,4,5,6,7],
|
||||
"jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}",
|
||||
"quotes": "" \u0022 %22 0x22 034 "",
|
||||
"\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"
|
||||
: "A key can be any string"
|
||||
},
|
||||
0.5 ,98.6
|
||||
,
|
||||
99.44
|
||||
,
|
||||
|
||||
1066
|
||||
|
||||
|
||||
,"rosebud"]
|
||||
'''
|
||||
|
||||
class TestPass1(TestCase):
|
||||
def test_parse(self):
|
||||
# test in/out equivalence and parsing
|
||||
res = S.loads(JSON)
|
||||
out = S.dumps(res)
|
||||
self.assertEquals(res, S.loads(out))
|
||||
try:
|
||||
S.dumps(res, allow_nan=False)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
self.fail("23456789012E666 should be out of range")
|
|
@ -0,0 +1,14 @@
|
|||
from unittest import TestCase
|
||||
import simplejson as S
|
||||
|
||||
# from http://json.org/JSON_checker/test/pass2.json
|
||||
JSON = r'''
|
||||
[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]
|
||||
'''
|
||||
|
||||
class TestPass2(TestCase):
|
||||
def test_parse(self):
|
||||
# test in/out equivalence and parsing
|
||||
res = S.loads(JSON)
|
||||
out = S.dumps(res)
|
||||
self.assertEquals(res, S.loads(out))
|
|
@ -0,0 +1,20 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
# from http://json.org/JSON_checker/test/pass3.json
|
||||
JSON = r'''
|
||||
{
|
||||
"JSON Test Pattern pass3": {
|
||||
"The outermost value": "must be an object or array.",
|
||||
"In this test": "It is an object."
|
||||
}
|
||||
}
|
||||
'''
|
||||
|
||||
class TestPass3(TestCase):
|
||||
def test_parse(self):
|
||||
# test in/out equivalence and parsing
|
||||
res = S.loads(JSON)
|
||||
out = S.dumps(res)
|
||||
self.assertEquals(res, S.loads(out))
|
|
@ -0,0 +1,65 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
class JSONTestObject:
|
||||
pass
|
||||
|
||||
class RecursiveJSONEncoder(S.JSONEncoder):
|
||||
recurse = False
|
||||
def default(self, o):
|
||||
if o is JSONTestObject:
|
||||
if self.recurse:
|
||||
return [JSONTestObject]
|
||||
else:
|
||||
return 'JSONTestObject'
|
||||
return S.JSONEncoder.default(o)
|
||||
|
||||
class TestRecursion(TestCase):
|
||||
def test_listrecursion(self):
|
||||
x = []
|
||||
x.append(x)
|
||||
try:
|
||||
S.dumps(x)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
self.fail("didn't raise ValueError on list recursion")
|
||||
x = []
|
||||
y = [x]
|
||||
x.append(y)
|
||||
try:
|
||||
S.dumps(x)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
self.fail("didn't raise ValueError on alternating list recursion")
|
||||
y = []
|
||||
x = [y, y]
|
||||
# ensure that the marker is cleared
|
||||
S.dumps(x)
|
||||
|
||||
def test_dictrecursion(self):
|
||||
x = {}
|
||||
x["test"] = x
|
||||
try:
|
||||
S.dumps(x)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
self.fail("didn't raise ValueError on dict recursion")
|
||||
x = {}
|
||||
y = {"a": x, "b": x}
|
||||
# ensure that the marker is cleared
|
||||
S.dumps(x)
|
||||
|
||||
def test_defaultrecursion(self):
|
||||
enc = RecursiveJSONEncoder()
|
||||
self.assertEquals(enc.encode(JSONTestObject), '"JSONTestObject"')
|
||||
enc.recurse = True
|
||||
try:
|
||||
enc.encode(JSONTestObject)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
self.fail("didn't raise ValueError on default recursion")
|
|
@ -0,0 +1,102 @@
|
|||
import sys
|
||||
import decimal
|
||||
from unittest import TestCase
|
||||
|
||||
import simplejson.decoder
|
||||
|
||||
class TestScanString(TestCase):
|
||||
def test_py_scanstring(self):
|
||||
self._test_scanstring(simplejson.decoder.py_scanstring)
|
||||
|
||||
def test_c_scanstring(self):
|
||||
self._test_scanstring(simplejson.decoder.c_scanstring)
|
||||
|
||||
def _test_scanstring(self, scanstring):
|
||||
self.assertEquals(
|
||||
scanstring('"z\\ud834\\udd20x"', 1, None, True),
|
||||
(u'z\U0001d120x', 16))
|
||||
|
||||
if sys.maxunicode == 65535:
|
||||
self.assertEquals(
|
||||
scanstring(u'"z\U0001d120x"', 1, None, True),
|
||||
(u'z\U0001d120x', 6))
|
||||
else:
|
||||
self.assertEquals(
|
||||
scanstring(u'"z\U0001d120x"', 1, None, True),
|
||||
(u'z\U0001d120x', 5))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('"\\u007b"', 1, None, True),
|
||||
(u'{', 8))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True),
|
||||
(u'A JSON payload should be an object or array, not a string.', 60))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('["Unclosed array"', 2, None, True),
|
||||
(u'Unclosed array', 17))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('["extra comma",]', 2, None, True),
|
||||
(u'extra comma', 14))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('["double extra comma",,]', 2, None, True),
|
||||
(u'double extra comma', 21))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('["Comma after the close"],', 2, None, True),
|
||||
(u'Comma after the close', 24))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('["Extra close"]]', 2, None, True),
|
||||
(u'Extra close', 14))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Extra comma": true,}', 2, None, True),
|
||||
(u'Extra comma', 14))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True),
|
||||
(u'Extra value after close', 26))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Illegal expression": 1 + 2}', 2, None, True),
|
||||
(u'Illegal expression', 21))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Illegal invocation": alert()}', 2, None, True),
|
||||
(u'Illegal invocation', 21))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True),
|
||||
(u'Numbers cannot have leading zeroes', 37))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True),
|
||||
(u'Numbers cannot be hex', 24))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True),
|
||||
(u'Too deep', 30))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Missing colon" null}', 2, None, True),
|
||||
(u'Missing colon', 16))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Double colon":: null}', 2, None, True),
|
||||
(u'Double colon', 15))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('{"Comma instead of colon", null}', 2, None, True),
|
||||
(u'Comma instead of colon', 25))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('["Colon instead of comma": false]', 2, None, True),
|
||||
(u'Colon instead of comma', 25))
|
||||
|
||||
self.assertEquals(
|
||||
scanstring('["Bad value", truth]', 2, None, True),
|
||||
(u'Bad value', 12))
|
|
@ -0,0 +1,42 @@
|
|||
import textwrap
|
||||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
|
||||
class TestSeparators(TestCase):
|
||||
def test_separators(self):
|
||||
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
|
||||
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
|
||||
|
||||
expect = textwrap.dedent("""\
|
||||
[
|
||||
[
|
||||
"blorpie"
|
||||
] ,
|
||||
[
|
||||
"whoops"
|
||||
] ,
|
||||
[] ,
|
||||
"d-shtaeou" ,
|
||||
"d-nthiouh" ,
|
||||
"i-vhbjkhnth" ,
|
||||
{
|
||||
"nifty" : 87
|
||||
} ,
|
||||
{
|
||||
"field" : "yes" ,
|
||||
"morefield" : false
|
||||
}
|
||||
]""")
|
||||
|
||||
|
||||
d1 = S.dumps(h)
|
||||
d2 = S.dumps(h, indent=2, sort_keys=True, separators=(' ,', ' : '))
|
||||
|
||||
h1 = S.loads(d1)
|
||||
h2 = S.loads(d2)
|
||||
|
||||
self.assertEquals(h1, h)
|
||||
self.assertEquals(h2, h)
|
||||
self.assertEquals(d2, expect)
|
|
@ -0,0 +1,55 @@
|
|||
from unittest import TestCase
|
||||
|
||||
import simplejson as S
|
||||
|
||||
class TestUnicode(TestCase):
|
||||
def test_encoding1(self):
|
||||
encoder = S.JSONEncoder(encoding='utf-8')
|
||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
||||
s = u.encode('utf-8')
|
||||
ju = encoder.encode(u)
|
||||
js = encoder.encode(s)
|
||||
self.assertEquals(ju, js)
|
||||
|
||||
def test_encoding2(self):
|
||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
||||
s = u.encode('utf-8')
|
||||
ju = S.dumps(u, encoding='utf-8')
|
||||
js = S.dumps(s, encoding='utf-8')
|
||||
self.assertEquals(ju, js)
|
||||
|
||||
def test_encoding3(self):
|
||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
||||
j = S.dumps(u)
|
||||
self.assertEquals(j, '"\\u03b1\\u03a9"')
|
||||
|
||||
def test_encoding4(self):
|
||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
||||
j = S.dumps([u])
|
||||
self.assertEquals(j, '["\\u03b1\\u03a9"]')
|
||||
|
||||
def test_encoding5(self):
|
||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
||||
j = S.dumps(u, ensure_ascii=False)
|
||||
self.assertEquals(j, u'"%s"' % (u,))
|
||||
|
||||
def test_encoding6(self):
|
||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
||||
j = S.dumps([u], ensure_ascii=False)
|
||||
self.assertEquals(j, u'["%s"]' % (u,))
|
||||
|
||||
def test_big_unicode_encode(self):
|
||||
u = u'\U0001d120'
|
||||
self.assertEquals(S.dumps(u), '"\\ud834\\udd20"')
|
||||
self.assertEquals(S.dumps(u, ensure_ascii=False), u'"\U0001d120"')
|
||||
|
||||
def test_big_unicode_decode(self):
|
||||
u = u'z\U0001d120x'
|
||||
self.assertEquals(S.loads('"' + u + '"'), u)
|
||||
self.assertEquals(S.loads('"z\\ud834\\udd20x"'), u)
|
||||
|
||||
def test_unicode_decode(self):
|
||||
for i in range(0, 0xd7ff):
|
||||
u = unichr(i)
|
||||
json = '"\\u%04x"' % (i,)
|
||||
self.assertEquals(S.loads(json), u)
|
|
@ -0,0 +1,44 @@
|
|||
r"""
|
||||
Using simplejson from the shell to validate and
|
||||
pretty-print::
|
||||
|
||||
$ echo '{"json":"obj"}' | python -msimplejson
|
||||
{
|
||||
"json": "obj"
|
||||
}
|
||||
$ echo '{ 1.2:3.4}' | python -msimplejson
|
||||
Expecting property name: line 1 column 2 (char 2)
|
||||
|
||||
Note that the JSON produced by this module's default settings
|
||||
is a subset of YAML, so it may be used as a serializer for that as well.
|
||||
"""
|
||||
import simplejson
|
||||
|
||||
#
|
||||
# Pretty printer:
|
||||
# curl http://mochikit.com/examples/ajax_tables/domains.json | python -msimplejson.tool
|
||||
#
|
||||
|
||||
def main():
|
||||
import sys
|
||||
if len(sys.argv) == 1:
|
||||
infile = sys.stdin
|
||||
outfile = sys.stdout
|
||||
elif len(sys.argv) == 2:
|
||||
infile = open(sys.argv[1], 'rb')
|
||||
outfile = sys.stdout
|
||||
elif len(sys.argv) == 3:
|
||||
infile = open(sys.argv[1], 'rb')
|
||||
outfile = open(sys.argv[2], 'wb')
|
||||
else:
|
||||
raise SystemExit("%s [infile [outfile]]" % (sys.argv[0],))
|
||||
try:
|
||||
obj = simplejson.load(infile)
|
||||
except ValueError, e:
|
||||
raise SystemExit(e)
|
||||
simplejson.dump(obj, outfile, sort_keys=True, indent=4)
|
||||
outfile.write('\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Загрузка…
Ссылка в новой задаче