зеркало из https://github.com/mozilla/gecko-dev.git
Bug 922190 - Remove bundled copy of simplejson. r=gps
--HG-- extra : rebase_source : 5d33bafacd732e6bfb7acb6bb4d171eabb7d258a
This commit is contained in:
Родитель
0e7c9d39cd
Коммит
d0f5dea659
|
@ -22,16 +22,13 @@
|
||||||
|
|
||||||
__version__ = '1'
|
__version__ = '1'
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import optparse
|
import optparse
|
||||||
import logging
|
import logging
|
||||||
import hashlib
|
import hashlib
|
||||||
import urllib2
|
import urllib2
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
try:
|
|
||||||
import simplejson as json # I hear simplejson is faster
|
|
||||||
except ImportError:
|
|
||||||
import json
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
simplejson.pth:python/simplejson-2.1.1
|
|
||||||
marionette.pth:testing/marionette/client
|
marionette.pth:testing/marionette/client
|
||||||
blessings.pth:python/blessings
|
blessings.pth:python/blessings
|
||||||
configobj.pth:python/configobj
|
configobj.pth:python/configobj
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
# Works with python2.6
|
# Works with python2.6
|
||||||
|
|
||||||
import datetime, os, re, sys, traceback
|
import datetime, os, re, sys, traceback
|
||||||
import math, string, copy
|
import math, string, copy, json
|
||||||
import subprocess
|
import subprocess
|
||||||
from subprocess import *
|
from subprocess import *
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
@ -117,17 +117,6 @@ def compare(current, baseline):
|
||||||
if percent_speedups:
|
if percent_speedups:
|
||||||
print 'Average speedup: %.2f%%' % avg(percent_speedups)
|
print 'Average speedup: %.2f%%' % avg(percent_speedups)
|
||||||
|
|
||||||
def try_import_json():
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
return json
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import simplejson as json
|
|
||||||
return json
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
script_path = os.path.abspath(__file__)
|
script_path = os.path.abspath(__file__)
|
||||||
script_dir = os.path.dirname(script_path)
|
script_dir = os.path.dirname(script_path)
|
||||||
|
@ -157,11 +146,6 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
test_list = [ Test.from_file(tst, name, OPTIONS) for tst, name in test_list ]
|
test_list = [ Test.from_file(tst, name, OPTIONS) for tst, name in test_list ]
|
||||||
|
|
||||||
if OPTIONS.baseline_path:
|
|
||||||
json = try_import_json()
|
|
||||||
if not json:
|
|
||||||
print('You need a json lib for baseline comparison')
|
|
||||||
sys.exit(1)
|
|
||||||
try:
|
try:
|
||||||
print("{")
|
print("{")
|
||||||
bench_map = run_tests(test_list, test_dir)
|
bench_map = run_tests(test_list, test_dir)
|
||||||
|
|
|
@ -1,18 +1,12 @@
|
||||||
#!/usr/bin/env python2.4
|
#!/usr/bin/env python2.4
|
||||||
"""usage: %progname candidate_path baseline_path
|
"""usage: %progname candidate_path baseline_path
|
||||||
|
|
||||||
:warning: May raise ImportError on import if JSON support is missing.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
import optparse
|
import optparse
|
||||||
from contextlib import nested
|
from contextlib import nested
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
|
|
||||||
def avg(seq):
|
def avg(seq):
|
||||||
return sum(seq) / len(seq)
|
return sum(seq) / len(seq)
|
||||||
|
|
|
@ -1,183 +0,0 @@
|
||||||
Version 2.1.1 released 2010-03-31
|
|
||||||
|
|
||||||
* Change how setup.py imports ez_setup.py to try and workaround old versions
|
|
||||||
of setuptools.
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=75
|
|
||||||
* Fix compilation on Windows platform (and other platforms with very
|
|
||||||
picky compilers)
|
|
||||||
* Corrected simplejson.__version__ and other minor doc changes.
|
|
||||||
* Do not fail speedups tests if speedups could not be built.
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=73
|
|
||||||
|
|
||||||
Version 2.1.0 released 2010-03-10
|
|
||||||
|
|
||||||
* Decimal serialization officially supported for encoding with
|
|
||||||
use_decimal=True. For encoding this encodes Decimal objects and
|
|
||||||
for decoding it implies parse_float=Decimal
|
|
||||||
* Python 2.4 no longer supported (may still work, but no longer tested)
|
|
||||||
* Decoding performance and memory utilization enhancements
|
|
||||||
http://bugs.python.org/issue7451
|
|
||||||
* JSONEncoderForHTML class for escaping &, <, >
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=66
|
|
||||||
* Memoization of object keys during encoding (when using speedups)
|
|
||||||
* Encoder changed to use PyIter_Next for list iteration to avoid
|
|
||||||
potential threading issues
|
|
||||||
* Encoder changed to use iteritems rather than PyDict_Next in order to
|
|
||||||
support dict subclasses that have a well defined ordering
|
|
||||||
http://bugs.python.org/issue6105
|
|
||||||
* indent encoding parameter changed to be a string rather than an integer
|
|
||||||
(integer use still supported for backwards compatibility)
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=56
|
|
||||||
* Test suite (python setup.py test) now automatically runs with and without
|
|
||||||
speedups
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=55
|
|
||||||
* Fixed support for older versions of easy_install (e.g. stock Mac OS X config)
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=54
|
|
||||||
* Fixed str/unicode mismatches when using ensure_ascii=False
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=48
|
|
||||||
* Fixed error message when parsing an array with trailing comma with speedups
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=46
|
|
||||||
* Refactor decoder errors to raise JSONDecodeError instead of ValueError
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=45
|
|
||||||
* New ordered_pairs_hook feature in decoder which makes it possible to
|
|
||||||
preserve key order. http://bugs.python.org/issue5381
|
|
||||||
* Fixed containerless unicode float decoding (same bug as 2.0.4, oops!)
|
|
||||||
http://code.google.com/p/simplejson/issues/detail?id=43
|
|
||||||
* Share PosInf definition between encoder and decoder
|
|
||||||
* Minor reformatting to make it easier to backport simplejson changes
|
|
||||||
to Python 2.7/3.1 json module
|
|
||||||
|
|
||||||
Version 2.0.9 released 2009-02-18
|
|
||||||
|
|
||||||
* Adds cyclic GC to the Encoder and Scanner speedups, which could've
|
|
||||||
caused uncollectible cycles in some cases when using custom parser
|
|
||||||
or encoder functions
|
|
||||||
|
|
||||||
Version 2.0.8 released 2009-02-15
|
|
||||||
|
|
||||||
* Documentation fixes
|
|
||||||
* Fixes encoding True and False as keys
|
|
||||||
* Fixes checking for True and False by identity for several parameters
|
|
||||||
|
|
||||||
Version 2.0.7 released 2009-01-04
|
|
||||||
|
|
||||||
* Documentation fixes
|
|
||||||
* C extension now always returns unicode strings when the input string is
|
|
||||||
unicode, even for empty strings
|
|
||||||
|
|
||||||
Version 2.0.6 released 2008-12-19
|
|
||||||
|
|
||||||
* Windows build fixes
|
|
||||||
|
|
||||||
Version 2.0.5 released 2008-11-23
|
|
||||||
|
|
||||||
* Fixes a segfault in the C extension when using check_circular=False and
|
|
||||||
encoding an invalid document
|
|
||||||
|
|
||||||
Version 2.0.4 released 2008-10-24
|
|
||||||
|
|
||||||
* Fixes a parsing error in the C extension when the JSON document is (only)
|
|
||||||
a floating point number. It would consume one too few characters in that
|
|
||||||
case, and claim the document invalid.
|
|
||||||
|
|
||||||
Version 2.0.3 released 2008-10-11
|
|
||||||
|
|
||||||
* Fixes reference leaks in the encoding speedups (sorry about that!)
|
|
||||||
* Fixes doctest suite for Python 2.6
|
|
||||||
* More optimizations for the decoder
|
|
||||||
|
|
||||||
Version 2.0.2 released 2008-10-06
|
|
||||||
|
|
||||||
* Fixes MSVC2003 build regression
|
|
||||||
* Fixes Python 2.4 compatibility in _speedups.c
|
|
||||||
|
|
||||||
Version 2.0.1 released 2008-09-29
|
|
||||||
|
|
||||||
* Fixes long encoding regression introduced in 2.0.0
|
|
||||||
* Fixes MinGW build regression introduced in 2.0.0
|
|
||||||
|
|
||||||
Version 2.0.0 released 2008-09-27
|
|
||||||
|
|
||||||
* optimized Python encoding path
|
|
||||||
* optimized Python decoding path
|
|
||||||
* optimized C encoding path
|
|
||||||
* optimized C decoding path
|
|
||||||
* switched to sphinx docs (nearly the same as the json module in python 2.6)
|
|
||||||
|
|
||||||
Version 1.9.3 released 2008-09-23
|
|
||||||
|
|
||||||
* Decoding is significantly faster (for our internal benchmarks)
|
|
||||||
* Pretty-printing tool changed from simplejson to simplejson.tool for better
|
|
||||||
Python 2.6 comaptibility
|
|
||||||
* Misc. bug fixes
|
|
||||||
|
|
||||||
Version 1.9 released 2008-05-03
|
|
||||||
|
|
||||||
* Rewrote test suite with unittest and doctest (no more nosetest dependency)
|
|
||||||
* Better PEP 7 and PEP 8 source compliance
|
|
||||||
* Removed simplejson.jsonfilter demo module
|
|
||||||
* simplejson.jsonfilter is no longer included
|
|
||||||
|
|
||||||
Version 1.8.1 released 2008-03-24
|
|
||||||
|
|
||||||
* Optional C extension for accelerating the decoding of JSON strings
|
|
||||||
* Command line interface for pretty-printing JSON (via python -msimplejson)
|
|
||||||
* Decoding of integers and floats is now extensible (e.g. to use Decimal) via
|
|
||||||
parse_int, parse_float options.
|
|
||||||
* Subversion and issue tracker moved to google code:
|
|
||||||
http://code.google.com/p/simplejson/
|
|
||||||
* "/" is no longer escaped, so if you're embedding JSON directly in HTML
|
|
||||||
you'll want to use .replace("/", "\\/") to prevent a close-tag attack.
|
|
||||||
|
|
||||||
Version 1.7 released 2007-03-18
|
|
||||||
|
|
||||||
* Improves encoding performance with an optional C extension to speed up
|
|
||||||
str/unicode encoding (by 10-150x or so), which yields an overall speed
|
|
||||||
boost of 2x+ (JSON is string-heavy).
|
|
||||||
* Support for encoding unicode code points outside the BMP to UTF-16
|
|
||||||
surrogate code pairs (specified by the Strings section of RFC 4627).
|
|
||||||
|
|
||||||
Version 1.6 released 2007-03-03
|
|
||||||
|
|
||||||
* Improved str support for encoding. Previous versions of simplejson
|
|
||||||
integrated strings directly into the output stream, this version ensures
|
|
||||||
they're of a particular encoding (default is UTF-8) so that the output
|
|
||||||
stream is valid.
|
|
||||||
|
|
||||||
Version 1.5 released 2007-01-18
|
|
||||||
|
|
||||||
* Better Python 2.5 compatibility
|
|
||||||
* Better Windows compatibility
|
|
||||||
* indent encoding parameter for pretty printing
|
|
||||||
* separators encoding parameter for generating optimally compact JSON
|
|
||||||
|
|
||||||
Version 1.3 released 2006-04-01
|
|
||||||
|
|
||||||
* The optional object_hook function is called upon decoding of any JSON
|
|
||||||
object literal, and its return value is used instead of the dict that
|
|
||||||
would normally be used. This can be used to efficiently implement
|
|
||||||
features such as JSON-RPC class hinting, or other custom decodings of
|
|
||||||
JSON. See the documentation for more information.
|
|
||||||
|
|
||||||
Version 1.1 released 2005-12-31
|
|
||||||
|
|
||||||
* Renamed from simple_json to simplejson to comply with PEP 8 module naming
|
|
||||||
guidelines
|
|
||||||
* Full set of documentation
|
|
||||||
* More tests
|
|
||||||
* The encoder and decoder have been extended to understand NaN, Infinity, and
|
|
||||||
-Infinity (but this can be turned off via allow_nan=False for strict JSON
|
|
||||||
compliance)
|
|
||||||
* The decoder's scanner has been fixed so that it no longer accepts invalid
|
|
||||||
JSON documents
|
|
||||||
* The decoder now reports line and column information as well as character
|
|
||||||
numbers for easier debugging
|
|
||||||
* The encoder now has a circular reference checker, which can be optionally
|
|
||||||
disabled with check_circular=False
|
|
||||||
* dump, dumps, load, loads now accept an optional cls kwarg to use an
|
|
||||||
alternate JSONEncoder or JSONDecoder class for convenience.
|
|
||||||
* The read/write compatibility shim for json-py now have deprecation warnings
|
|
||||||
|
|
||||||
Version 1.0 released 2005-12-25
|
|
||||||
|
|
||||||
* Initial release
|
|
|
@ -1,19 +0,0 @@
|
||||||
Copyright (c) 2006 Bob Ippolito
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
|
||||||
of the Software, and to permit persons to whom the Software is furnished to do
|
|
||||||
so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
|
@ -1,30 +0,0 @@
|
||||||
Metadata-Version: 1.0
|
|
||||||
Name: simplejson
|
|
||||||
Version: 2.1.1
|
|
||||||
Summary: Simple, fast, extensible JSON encoder/decoder for Python
|
|
||||||
Home-page: http://undefined.org/python/#simplejson
|
|
||||||
Author: Bob Ippolito
|
|
||||||
Author-email: bob@redivi.com
|
|
||||||
License: MIT License
|
|
||||||
Description:
|
|
||||||
simplejson is a simple, fast, complete, correct and extensible
|
|
||||||
JSON <http://json.org> encoder and decoder for Python 2.5+. It is
|
|
||||||
pure Python code with no dependencies, but includes an optional C
|
|
||||||
extension for a serious speed boost.
|
|
||||||
|
|
||||||
simplejson is the externally maintained development version of the
|
|
||||||
json library included with Python 2.6 and Python 3.0, but maintains
|
|
||||||
backwards compatibility with Python 2.5.
|
|
||||||
|
|
||||||
The encoder may be subclassed to provide serialization in any kind of
|
|
||||||
situation, without any special support by the objects to be serialized
|
|
||||||
(somewhat like pickle).
|
|
||||||
|
|
||||||
The decoder can handle incoming JSON strings of any specified encoding
|
|
||||||
(UTF-8 by default).
|
|
||||||
|
|
||||||
Platform: any
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
|
@ -1,179 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# simplejson documentation build configuration file, created by
|
|
||||||
# sphinx-quickstart on Fri Sep 26 18:58:30 2008.
|
|
||||||
#
|
|
||||||
# This file is execfile()d with the current directory set to its containing dir.
|
|
||||||
#
|
|
||||||
# The contents of this file are pickled, so don't put values in the namespace
|
|
||||||
# that aren't pickleable (module imports are okay, they're removed automatically).
|
|
||||||
#
|
|
||||||
# All configuration values have a default value; values that are commented out
|
|
||||||
# serve to show the default value.
|
|
||||||
|
|
||||||
import sys, os
|
|
||||||
|
|
||||||
# If your extensions are in another directory, add it here. If the directory
|
|
||||||
# is relative to the documentation root, use os.path.abspath to make it
|
|
||||||
# absolute, like shown here.
|
|
||||||
#sys.path.append(os.path.abspath('some/directory'))
|
|
||||||
|
|
||||||
# General configuration
|
|
||||||
# ---------------------
|
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
|
||||||
extensions = []
|
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
|
||||||
templates_path = ['_templates']
|
|
||||||
|
|
||||||
# The suffix of source filenames.
|
|
||||||
source_suffix = '.rst'
|
|
||||||
|
|
||||||
# The master toctree document.
|
|
||||||
master_doc = 'index'
|
|
||||||
|
|
||||||
# General substitutions.
|
|
||||||
project = 'simplejson'
|
|
||||||
copyright = '2008, Bob Ippolito'
|
|
||||||
|
|
||||||
# The default replacements for |version| and |release|, also used in various
|
|
||||||
# other places throughout the built documents.
|
|
||||||
#
|
|
||||||
# The short X.Y version.
|
|
||||||
version = '2.1'
|
|
||||||
# The full version, including alpha/beta/rc tags.
|
|
||||||
release = '2.1.1'
|
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
|
||||||
# non-false value, then it is used:
|
|
||||||
#today = ''
|
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
|
||||||
today_fmt = '%B %d, %Y'
|
|
||||||
|
|
||||||
# List of documents that shouldn't be included in the build.
|
|
||||||
#unused_docs = []
|
|
||||||
|
|
||||||
# List of directories, relative to source directories, that shouldn't be searched
|
|
||||||
# for source files.
|
|
||||||
#exclude_dirs = []
|
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
|
||||||
#default_role = None
|
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
|
||||||
#add_function_parentheses = True
|
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
|
||||||
# unit titles (such as .. function::).
|
|
||||||
#add_module_names = True
|
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
|
||||||
# output. They are ignored by default.
|
|
||||||
#show_authors = False
|
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
|
||||||
pygments_style = 'sphinx'
|
|
||||||
|
|
||||||
|
|
||||||
# Options for HTML output
|
|
||||||
# -----------------------
|
|
||||||
|
|
||||||
# The style sheet to use for HTML and HTML Help pages. A file of that name
|
|
||||||
# must exist either in Sphinx' static/ path, or in one of the custom paths
|
|
||||||
# given in html_static_path.
|
|
||||||
html_style = 'default.css'
|
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
|
||||||
# "<project> v<release> documentation".
|
|
||||||
#html_title = None
|
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
|
||||||
#html_short_title = None
|
|
||||||
|
|
||||||
# The name of an image file (within the static path) to place at the top of
|
|
||||||
# the sidebar.
|
|
||||||
#html_logo = None
|
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
|
||||||
# pixels large.
|
|
||||||
#html_favicon = None
|
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
|
||||||
html_static_path = ['_static']
|
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
|
||||||
# using the given strftime format.
|
|
||||||
html_last_updated_fmt = '%b %d, %Y'
|
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
|
||||||
# typographically correct entities.
|
|
||||||
#html_use_smartypants = True
|
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
|
||||||
#html_sidebars = {}
|
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
|
||||||
# template names.
|
|
||||||
#html_additional_pages = {}
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
html_use_modindex = False
|
|
||||||
|
|
||||||
# If false, no index is generated.
|
|
||||||
#html_use_index = True
|
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
|
||||||
#html_split_index = False
|
|
||||||
|
|
||||||
# If true, the reST sources are included in the HTML build as _sources/<name>.
|
|
||||||
#html_copy_source = True
|
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
|
||||||
# base URL from which the finished HTML is served.
|
|
||||||
#html_use_opensearch = ''
|
|
||||||
|
|
||||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
|
||||||
html_file_suffix = '.html'
|
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
|
||||||
htmlhelp_basename = 'simplejsondoc'
|
|
||||||
|
|
||||||
|
|
||||||
# Options for LaTeX output
|
|
||||||
# ------------------------
|
|
||||||
|
|
||||||
# The paper size ('letter' or 'a4').
|
|
||||||
#latex_paper_size = 'letter'
|
|
||||||
|
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
|
||||||
#latex_font_size = '10pt'
|
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
|
||||||
# (source start file, target name, title, author, document class [howto/manual]).
|
|
||||||
latex_documents = [
|
|
||||||
('index', 'simplejson.tex', 'simplejson Documentation',
|
|
||||||
'Bob Ippolito', 'manual'),
|
|
||||||
]
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
|
||||||
# the title page.
|
|
||||||
#latex_logo = None
|
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
|
||||||
# not chapters.
|
|
||||||
#latex_use_parts = False
|
|
||||||
|
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
#latex_preamble = ''
|
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
|
||||||
#latex_appendices = []
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
#latex_use_modindex = True
|
|
|
@ -1,503 +0,0 @@
|
||||||
:mod:`simplejson` --- JSON encoder and decoder
|
|
||||||
==============================================
|
|
||||||
|
|
||||||
.. module:: simplejson
|
|
||||||
:synopsis: Encode and decode the JSON format.
|
|
||||||
.. moduleauthor:: Bob Ippolito <bob@redivi.com>
|
|
||||||
.. sectionauthor:: Bob Ippolito <bob@redivi.com>
|
|
||||||
|
|
||||||
JSON (JavaScript Object Notation) <http://json.org> is a subset of JavaScript
|
|
||||||
syntax (ECMA-262 3rd edition) used as a lightweight data interchange format.
|
|
||||||
|
|
||||||
:mod:`simplejson` exposes an API familiar to users of the standard library
|
|
||||||
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
|
|
||||||
version of the :mod:`json` library contained in Python 2.6, but maintains
|
|
||||||
compatibility with Python 2.5 and (currently) has
|
|
||||||
significant performance advantages, even without using the optional C
|
|
||||||
extension for speedups.
|
|
||||||
|
|
||||||
Encoding basic Python object hierarchies::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
|
||||||
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
|
||||||
>>> print json.dumps("\"foo\bar")
|
|
||||||
"\"foo\bar"
|
|
||||||
>>> print json.dumps(u'\u1234')
|
|
||||||
"\u1234"
|
|
||||||
>>> print json.dumps('\\')
|
|
||||||
"\\"
|
|
||||||
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
|
|
||||||
{"a": 0, "b": 0, "c": 0}
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO()
|
|
||||||
>>> json.dump(['streaming API'], io)
|
|
||||||
>>> io.getvalue()
|
|
||||||
'["streaming API"]'
|
|
||||||
|
|
||||||
Compact encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
|
|
||||||
'[1,2,3,{"4":5,"6":7}]'
|
|
||||||
|
|
||||||
Pretty printing::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4 * ' ')
|
|
||||||
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
|
|
||||||
{
|
|
||||||
"4": 5,
|
|
||||||
"6": 7
|
|
||||||
}
|
|
||||||
|
|
||||||
Decoding JSON::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
|
|
||||||
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
|
||||||
True
|
|
||||||
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
|
|
||||||
True
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO('["streaming API"]')
|
|
||||||
>>> json.load(io)[0] == 'streaming API'
|
|
||||||
True
|
|
||||||
|
|
||||||
Using Decimal instead of float::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> from decimal import Decimal
|
|
||||||
>>> json.loads('1.1', use_decimal=True) == Decimal('1.1')
|
|
||||||
True
|
|
||||||
>>> json.dumps(Decimal('1.1'), use_decimal=True) == '1.1'
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object decoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def as_complex(dct):
|
|
||||||
... if '__complex__' in dct:
|
|
||||||
... return complex(dct['real'], dct['imag'])
|
|
||||||
... return dct
|
|
||||||
...
|
|
||||||
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
|
||||||
... object_hook=as_complex)
|
|
||||||
(1+2j)
|
|
||||||
>>> import decimal
|
|
||||||
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def encode_complex(obj):
|
|
||||||
... if isinstance(obj, complex):
|
|
||||||
... return [obj.real, obj.imag]
|
|
||||||
... raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
...
|
|
||||||
>>> json.dumps(2 + 1j, default=encode_complex)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
|
|
||||||
|
|
||||||
.. highlight:: none
|
|
||||||
|
|
||||||
Using :mod:`simplejson.tool` from the shell to validate and pretty-print::
|
|
||||||
|
|
||||||
$ echo '{"json":"obj"}' | python -m simplejson.tool
|
|
||||||
{
|
|
||||||
"json": "obj"
|
|
||||||
}
|
|
||||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
|
||||||
Expecting property name: line 1 column 2 (char 2)
|
|
||||||
|
|
||||||
.. highlight:: python
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The JSON produced by this module's default settings is a subset of
|
|
||||||
YAML, so it may be used as a serializer for that as well.
|
|
||||||
|
|
||||||
|
|
||||||
Basic Usage
|
|
||||||
-----------
|
|
||||||
|
|
||||||
.. function:: dump(obj, fp[, skipkeys[, ensure_ascii[, check_circular[, allow_nan[, cls[, indent[, separators[, encoding[, default[, use_decimal[, **kw]]]]]]]]]]])
|
|
||||||
|
|
||||||
Serialize *obj* as a JSON formatted stream to *fp* (a ``.write()``-supporting
|
|
||||||
file-like object).
|
|
||||||
|
|
||||||
If *skipkeys* is true (default: ``False``), then dict keys that are not
|
|
||||||
of a basic type (:class:`str`, :class:`unicode`, :class:`int`, :class:`long`,
|
|
||||||
:class:`float`, :class:`bool`, ``None``) will be skipped instead of raising a
|
|
||||||
:exc:`TypeError`.
|
|
||||||
|
|
||||||
If *ensure_ascii* is false (default: ``True``), then some chunks written
|
|
||||||
to *fp* may be :class:`unicode` instances, subject to normal Python
|
|
||||||
:class:`str` to :class:`unicode` coercion rules. Unless ``fp.write()``
|
|
||||||
explicitly understands :class:`unicode` (as in :func:`codecs.getwriter`) this
|
|
||||||
is likely to cause an error. It's best to leave the default settings, because
|
|
||||||
they are safe and it is highly optimized.
|
|
||||||
|
|
||||||
If *check_circular* is false (default: ``True``), then the circular
|
|
||||||
reference check for container types will be skipped and a circular reference
|
|
||||||
will result in an :exc:`OverflowError` (or worse).
|
|
||||||
|
|
||||||
If *allow_nan* is false (default: ``True``), then it will be a
|
|
||||||
:exc:`ValueError` to serialize out of range :class:`float` values (``nan``,
|
|
||||||
``inf``, ``-inf``) in strict compliance of the JSON specification.
|
|
||||||
If *allow_nan* is true, their JavaScript equivalents will be used
|
|
||||||
(``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If *indent* is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. ``None`` (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Changed *indent* from an integer number of spaces to a string.
|
|
||||||
|
|
||||||
If specified, *separators* should be an ``(item_separator, dict_separator)``
|
|
||||||
tuple. By default, ``(', ', ': ')`` are used. To get the most compact JSON
|
|
||||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
|
||||||
|
|
||||||
*encoding* is the character encoding for str instances, default is
|
|
||||||
``'utf-8'``.
|
|
||||||
|
|
||||||
*default(obj)* is a function that should return a serializable version of
|
|
||||||
*obj* or raise :exc:`TypeError`. The default simply raises :exc:`TypeError`.
|
|
||||||
|
|
||||||
To use a custom :class:`JSONEncoder` subclass (e.g. one that overrides the
|
|
||||||
:meth:`default` method to serialize additional types), specify it with the
|
|
||||||
*cls* kwarg.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then :class:`decimal.Decimal`
|
|
||||||
will be natively serialized to JSON with full precision.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
*use_decimal* is new in 2.1.0.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
JSON is not a framed protocol so unlike :mod:`pickle` or :mod:`marshal` it
|
|
||||||
does not make sense to serialize more than one JSON document without some
|
|
||||||
container protocol to delimit them.
|
|
||||||
|
|
||||||
|
|
||||||
.. function:: dumps(obj[, skipkeys[, ensure_ascii[, check_circular[, allow_nan[, cls[, indent[, separators[, encoding[, default[, use_decimal[, **kw]]]]]]]]]]])
|
|
||||||
|
|
||||||
Serialize *obj* to a JSON formatted :class:`str`.
|
|
||||||
|
|
||||||
If *ensure_ascii* is false, then the return value will be a
|
|
||||||
:class:`unicode` instance. The other arguments have the same meaning as in
|
|
||||||
:func:`dump`. Note that the default *ensure_ascii* setting has much
|
|
||||||
better performance.
|
|
||||||
|
|
||||||
|
|
||||||
.. function:: load(fp[, encoding[, cls[, object_hook[, parse_float[, parse_int[, parse_constant[, object_pairs_hook[, use_decimal[, **kw]]]]]]]]])
|
|
||||||
|
|
||||||
Deserialize *fp* (a ``.read()``-supporting file-like object containing a JSON
|
|
||||||
document) to a Python object.
|
|
||||||
|
|
||||||
If the contents of *fp* are encoded with an ASCII based encoding other than
|
|
||||||
UTF-8 (e.g. latin-1), then an appropriate *encoding* name must be specified.
|
|
||||||
Encodings that are not ASCII based (such as UCS-2) are not allowed, and
|
|
||||||
should be wrapped with ``codecs.getreader(fp)(encoding)``, or simply decoded
|
|
||||||
to a :class:`unicode` object and passed to :func:`loads`. The default
|
|
||||||
setting of ``'utf-8'`` is fastest and should be using whenever possible.
|
|
||||||
|
|
||||||
If *fp.read()* returns :class:`str` then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as :class:`str` for performance and
|
|
||||||
memory reasons. If your code expects only :class:`unicode` the appropriate
|
|
||||||
solution is to wrap fp with a reader as demonstrated above.
|
|
||||||
|
|
||||||
*object_hook* is an optional function that will be called with the result of
|
|
||||||
any object literal decode (a :class:`dict`). The return value of
|
|
||||||
*object_hook* will be used instead of the :class:`dict`. This feature can be used
|
|
||||||
to implement custom decoders (e.g. JSON-RPC class hinting).
|
|
||||||
|
|
||||||
*object_pairs_hook* is an optional function that will be called with the
|
|
||||||
result of any object literal decode with an ordered list of pairs. The
|
|
||||||
return value of *object_pairs_hook* will be used instead of the
|
|
||||||
:class:`dict`. This feature can be used to implement custom decoders that
|
|
||||||
rely on the order that the key and value pairs are decoded (for example,
|
|
||||||
:class:`collections.OrderedDict` will remember the order of insertion). If
|
|
||||||
*object_hook* is also defined, the *object_pairs_hook* takes priority.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Added support for *object_pairs_hook*.
|
|
||||||
|
|
||||||
*parse_float*, if specified, will be called with the string of every JSON
|
|
||||||
float to be decoded. By default, this is equivalent to ``float(num_str)``.
|
|
||||||
This can be used to use another datatype or parser for JSON floats
|
|
||||||
(e.g. :class:`decimal.Decimal`).
|
|
||||||
|
|
||||||
*parse_int*, if specified, will be called with the string of every JSON int
|
|
||||||
to be decoded. By default, this is equivalent to ``int(num_str)``. This can
|
|
||||||
be used to use another datatype or parser for JSON integers
|
|
||||||
(e.g. :class:`float`).
|
|
||||||
|
|
||||||
*parse_constant*, if specified, will be called with one of the following
|
|
||||||
strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be used to
|
|
||||||
raise an exception if invalid JSON numbers are encountered.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then *parse_float* is set to
|
|
||||||
:class:`decimal.Decimal`. This is a convenience for parity with the
|
|
||||||
:func:`dump` parameter.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
*use_decimal* is new in 2.1.0.
|
|
||||||
|
|
||||||
To use a custom :class:`JSONDecoder` subclass, specify it with the ``cls``
|
|
||||||
kwarg. Additional keyword arguments will be passed to the constructor of the
|
|
||||||
class.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
:func:`load` will read the rest of the file-like object as a string and
|
|
||||||
then call :func:`loads`. It does not stop at the end of the first valid
|
|
||||||
JSON document it finds and it will raise an error if there is anything
|
|
||||||
other than whitespace after the document. Except for files containing
|
|
||||||
only one JSON document, it is recommended to use :func:`loads`.
|
|
||||||
|
|
||||||
|
|
||||||
.. function:: loads(s[, encoding[, cls[, object_hook[, parse_float[, parse_int[, parse_constant[, object_pairs_hook[, use_decimal[, **kw]]]]]]]]])
|
|
||||||
|
|
||||||
Deserialize *s* (a :class:`str` or :class:`unicode` instance containing a JSON
|
|
||||||
document) to a Python object.
|
|
||||||
|
|
||||||
If *s* is a :class:`str` instance and is encoded with an ASCII based encoding
|
|
||||||
other than UTF-8 (e.g. latin-1), then an appropriate *encoding* name must be
|
|
||||||
specified. Encodings that are not ASCII based (such as UCS-2) are not
|
|
||||||
allowed and should be decoded to :class:`unicode` first.
|
|
||||||
|
|
||||||
If *s* is a :class:`str` then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as :class:`str` for performance and
|
|
||||||
memory reasons. If your code expects only :class:`unicode` the appropriate
|
|
||||||
solution is decode *s* to :class:`unicode` prior to calling loads.
|
|
||||||
|
|
||||||
The other arguments have the same meaning as in :func:`load`.
|
|
||||||
|
|
||||||
|
|
||||||
Encoders and decoders
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
.. class:: JSONDecoder([encoding[, object_hook[, parse_float[, parse_int[, parse_constant[, object_pairs_hook[, strict]]]]]]])
|
|
||||||
|
|
||||||
Simple JSON decoder.
|
|
||||||
|
|
||||||
Performs the following translations in decoding by default:
|
|
||||||
|
|
||||||
+---------------+-------------------+
|
|
||||||
| JSON | Python |
|
|
||||||
+===============+===================+
|
|
||||||
| object | dict |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| array | list |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| string | unicode |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (int) | int, long |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (real) | float |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| true | True |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| false | False |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| null | None |
|
|
||||||
+---------------+-------------------+
|
|
||||||
|
|
||||||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as their
|
|
||||||
corresponding ``float`` values, which is outside the JSON spec.
|
|
||||||
|
|
||||||
*encoding* determines the encoding used to interpret any :class:`str` objects
|
|
||||||
decoded by this instance (``'utf-8'`` by default). It has no effect when decoding
|
|
||||||
:class:`unicode` objects.
|
|
||||||
|
|
||||||
Note that currently only encodings that are a superset of ASCII work, strings
|
|
||||||
of other encodings should be passed in as :class:`unicode`.
|
|
||||||
|
|
||||||
*object_hook* is an optional function that will be called with the result of
|
|
||||||
every JSON object decoded and its return value will be used in place of the
|
|
||||||
given :class:`dict`. This can be used to provide custom deserializations
|
|
||||||
(e.g. to support JSON-RPC class hinting).
|
|
||||||
|
|
||||||
*object_pairs_hook* is an optional function that will be called with the
|
|
||||||
result of any object literal decode with an ordered list of pairs. The
|
|
||||||
return value of *object_pairs_hook* will be used instead of the
|
|
||||||
:class:`dict`. This feature can be used to implement custom decoders that
|
|
||||||
rely on the order that the key and value pairs are decoded (for example,
|
|
||||||
:class:`collections.OrderedDict` will remember the order of insertion). If
|
|
||||||
*object_hook* is also defined, the *object_pairs_hook* takes priority.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Added support for *object_pairs_hook*.
|
|
||||||
|
|
||||||
*parse_float*, if specified, will be called with the string of every JSON
|
|
||||||
float to be decoded. By default, this is equivalent to ``float(num_str)``.
|
|
||||||
This can be used to use another datatype or parser for JSON floats
|
|
||||||
(e.g. :class:`decimal.Decimal`).
|
|
||||||
|
|
||||||
*parse_int*, if specified, will be called with the string of every JSON int
|
|
||||||
to be decoded. By default, this is equivalent to ``int(num_str)``. This can
|
|
||||||
be used to use another datatype or parser for JSON integers
|
|
||||||
(e.g. :class:`float`).
|
|
||||||
|
|
||||||
*parse_constant*, if specified, will be called with one of the following
|
|
||||||
strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be used to
|
|
||||||
raise an exception if invalid JSON numbers are encountered.
|
|
||||||
|
|
||||||
*strict* controls the parser's behavior when it encounters an invalid
|
|
||||||
control character in a string. The default setting of ``True`` means that
|
|
||||||
unescaped control characters are parse errors, if ``False`` then control
|
|
||||||
characters will be allowed in strings.
|
|
||||||
|
|
||||||
.. method:: decode(s)
|
|
||||||
|
|
||||||
Return the Python representation of *s* (a :class:`str` or
|
|
||||||
:class:`unicode` instance containing a JSON document)
|
|
||||||
|
|
||||||
If *s* is a :class:`str` then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as :class:`str` for performance and
|
|
||||||
memory reasons. If your code expects only :class:`unicode` the
|
|
||||||
appropriate solution is decode *s* to :class:`unicode` prior to calling
|
|
||||||
decode.
|
|
||||||
|
|
||||||
.. method:: raw_decode(s)
|
|
||||||
|
|
||||||
Decode a JSON document from *s* (a :class:`str` or :class:`unicode`
|
|
||||||
beginning with a JSON document) and return a 2-tuple of the Python
|
|
||||||
representation and the index in *s* where the document ended.
|
|
||||||
|
|
||||||
This can be used to decode a JSON document from a string that may have
|
|
||||||
extraneous data at the end.
|
|
||||||
|
|
||||||
|
|
||||||
.. class:: JSONEncoder([skipkeys[, ensure_ascii[, check_circular[, allow_nan[, sort_keys[, indent[, separators[, encoding[, default]]]]]]]]])
|
|
||||||
|
|
||||||
Extensible JSON encoder for Python data structures.
|
|
||||||
|
|
||||||
Supports the following objects and types by default:
|
|
||||||
|
|
||||||
+-------------------+---------------+
|
|
||||||
| Python | JSON |
|
|
||||||
+===================+===============+
|
|
||||||
| dict | object |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| list, tuple | array |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| str, unicode | string |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| int, long, float | number |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| True | true |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| False | false |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| None | null |
|
|
||||||
+-------------------+---------------+
|
|
||||||
|
|
||||||
To extend this to recognize other objects, subclass and implement a
|
|
||||||
:meth:`default` method with another method that returns a serializable object
|
|
||||||
for ``o`` if possible, otherwise it should call the superclass implementation
|
|
||||||
(to raise :exc:`TypeError`).
|
|
||||||
|
|
||||||
If *skipkeys* is false (the default), then it is a :exc:`TypeError` to
|
|
||||||
attempt encoding of keys that are not str, int, long, float or None. If
|
|
||||||
*skipkeys* is true, such items are simply skipped.
|
|
||||||
|
|
||||||
If *ensure_ascii* is true (the default), the output is guaranteed to be
|
|
||||||
:class:`str` objects with all incoming unicode characters escaped. If
|
|
||||||
*ensure_ascii* is false, the output will be a unicode object.
|
|
||||||
|
|
||||||
If *check_circular* is false (the default), then lists, dicts, and custom
|
|
||||||
encoded objects will be checked for circular references during encoding to
|
|
||||||
prevent an infinite recursion (which would cause an :exc:`OverflowError`).
|
|
||||||
Otherwise, no such check takes place.
|
|
||||||
|
|
||||||
If *allow_nan* is true (the default), then ``NaN``, ``Infinity``, and
|
|
||||||
``-Infinity`` will be encoded as such. This behavior is not JSON
|
|
||||||
specification compliant, but is consistent with most JavaScript based
|
|
||||||
encoders and decoders. Otherwise, it will be a :exc:`ValueError` to encode
|
|
||||||
such floats.
|
|
||||||
|
|
||||||
If *sort_keys* is true (not the default), then the output of dictionaries
|
|
||||||
will be sorted by key; this is useful for regression tests to ensure that
|
|
||||||
JSON serializations can be compared on a day-to-day basis.
|
|
||||||
|
|
||||||
If *indent* is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. ``None`` (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Changed *indent* from an integer number of spaces to a string.
|
|
||||||
|
|
||||||
If specified, *separators* should be an ``(item_separator, key_separator)``
|
|
||||||
tuple. By default, ``(', ', ': ')`` are used. To get the most compact JSON
|
|
||||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
|
||||||
|
|
||||||
If specified, *default* should be a function that gets called for objects
|
|
||||||
that can't otherwise be serialized. It should return a JSON encodable
|
|
||||||
version of the object or raise a :exc:`TypeError`.
|
|
||||||
|
|
||||||
If *encoding* is not ``None``, then all input strings will be transformed
|
|
||||||
into unicode using that encoding prior to JSON-encoding. The default is
|
|
||||||
``'utf-8'``.
|
|
||||||
|
|
||||||
|
|
||||||
.. method:: default(o)
|
|
||||||
|
|
||||||
Implement this method in a subclass such that it returns a serializable
|
|
||||||
object for *o*, or calls the base implementation (to raise a
|
|
||||||
:exc:`TypeError`).
|
|
||||||
|
|
||||||
For example, to support arbitrary iterators, you could implement default
|
|
||||||
like this::
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
try:
|
|
||||||
iterable = iter(o)
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return list(iterable)
|
|
||||||
return JSONEncoder.default(self, o)
|
|
||||||
|
|
||||||
|
|
||||||
.. method:: encode(o)
|
|
||||||
|
|
||||||
Return a JSON string representation of a Python data structure, *o*. For
|
|
||||||
example::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.JSONEncoder().encode({"foo": ["bar", "baz"]})
|
|
||||||
'{"foo": ["bar", "baz"]}'
|
|
||||||
|
|
||||||
|
|
||||||
.. method:: iterencode(o)
|
|
||||||
|
|
||||||
Encode the given object, *o*, and yield each string representation as
|
|
||||||
available. For example::
|
|
||||||
|
|
||||||
for chunk in JSONEncoder().iterencode(bigobject):
|
|
||||||
mysocket.write(chunk)
|
|
||||||
|
|
||||||
Note that :meth:`encode` has much better performance than
|
|
||||||
:meth:`iterencode`.
|
|
||||||
|
|
||||||
.. class:: JSONEncoderForHTML([skipkeys[, ensure_ascii[, check_circular[, allow_nan[, sort_keys[, indent[, separators[, encoding[, default]]]]]]]]])
|
|
||||||
|
|
||||||
Subclass of :class:`JSONEncoder` that escapes &, <, and > for embedding in HTML.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
New in 2.1.0
|
|
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 202 B |
|
@ -1,833 +0,0 @@
|
||||||
/**
|
|
||||||
* Sphinx Doc Design
|
|
||||||
*/
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 100%;
|
|
||||||
background-color: #11303d;
|
|
||||||
color: #000;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: LAYOUT :::: */
|
|
||||||
|
|
||||||
div.document {
|
|
||||||
background-color: #1c4e63;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.documentwrapper {
|
|
||||||
float: left;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.bodywrapper {
|
|
||||||
margin: 0 0 0 230px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body {
|
|
||||||
background-color: white;
|
|
||||||
padding: 0 20px 30px 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebarwrapper {
|
|
||||||
padding: 10px 5px 0 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar {
|
|
||||||
float: left;
|
|
||||||
width: 230px;
|
|
||||||
margin-left: -100%;
|
|
||||||
font-size: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.clearer {
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer {
|
|
||||||
color: #fff;
|
|
||||||
width: 100%;
|
|
||||||
padding: 9px 0 9px 0;
|
|
||||||
text-align: center;
|
|
||||||
font-size: 75%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer a {
|
|
||||||
color: #fff;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related {
|
|
||||||
background-color: #133f52;
|
|
||||||
color: #fff;
|
|
||||||
width: 100%;
|
|
||||||
height: 30px;
|
|
||||||
line-height: 30px;
|
|
||||||
font-size: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related h3 {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related ul {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0 0 0 10px;
|
|
||||||
list-style: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related li {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related li.right {
|
|
||||||
float: right;
|
|
||||||
margin-right: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related a {
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ::: TOC :::: */
|
|
||||||
div.sphinxsidebar h3 {
|
|
||||||
font-family: 'Trebuchet MS', sans-serif;
|
|
||||||
color: white;
|
|
||||||
font-size: 1.4em;
|
|
||||||
font-weight: normal;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h4 {
|
|
||||||
font-family: 'Trebuchet MS', sans-serif;
|
|
||||||
color: white;
|
|
||||||
font-size: 1.3em;
|
|
||||||
font-weight: normal;
|
|
||||||
margin: 5px 0 0 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar p {
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar p.topless {
|
|
||||||
margin: 5px 10px 10px 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul {
|
|
||||||
margin: 10px;
|
|
||||||
padding: 0;
|
|
||||||
list-style: none;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul ul,
|
|
||||||
div.sphinxsidebar ul.want-points {
|
|
||||||
margin-left: 20px;
|
|
||||||
list-style: square;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul ul {
|
|
||||||
margin-top: 0;
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar a {
|
|
||||||
color: #98dbcc;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar form {
|
|
||||||
margin-top: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar input {
|
|
||||||
border: 1px solid #98dbcc;
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: MODULE CLOUD :::: */
|
|
||||||
div.modulecloud {
|
|
||||||
margin: -5px 10px 5px 10px;
|
|
||||||
padding: 10px;
|
|
||||||
line-height: 160%;
|
|
||||||
border: 1px solid #cbe7e5;
|
|
||||||
background-color: #f2fbfd;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.modulecloud a {
|
|
||||||
padding: 0 5px 0 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: SEARCH :::: */
|
|
||||||
ul.search {
|
|
||||||
margin: 10px 0 0 20px;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li {
|
|
||||||
padding: 5px 0 5px 20px;
|
|
||||||
background-image: url(file.png);
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
background-position: 0 7px;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li a {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li div.context {
|
|
||||||
color: #888;
|
|
||||||
margin: 2px 0 0 30px;
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.keywordmatches li.goodmatch a {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: COMMON FORM STYLES :::: */
|
|
||||||
|
|
||||||
div.actions {
|
|
||||||
padding: 5px 10px 5px 10px;
|
|
||||||
border-top: 1px solid #cbe7e5;
|
|
||||||
border-bottom: 1px solid #cbe7e5;
|
|
||||||
background-color: #e0f6f4;
|
|
||||||
}
|
|
||||||
|
|
||||||
form dl {
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
form dt {
|
|
||||||
clear: both;
|
|
||||||
float: left;
|
|
||||||
min-width: 110px;
|
|
||||||
margin-right: 10px;
|
|
||||||
padding-top: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
input#homepage {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.error {
|
|
||||||
margin: 5px 20px 0 0;
|
|
||||||
padding: 5px;
|
|
||||||
border: 1px solid #d00;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: INLINE COMMENTS :::: */
|
|
||||||
|
|
||||||
div.inlinecomments {
|
|
||||||
position: absolute;
|
|
||||||
right: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments a.bubble {
|
|
||||||
display: block;
|
|
||||||
float: right;
|
|
||||||
background-image: url(style/comment.png);
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
width: 25px;
|
|
||||||
height: 25px;
|
|
||||||
text-align: center;
|
|
||||||
padding-top: 3px;
|
|
||||||
font-size: 0.9em;
|
|
||||||
line-height: 14px;
|
|
||||||
font-weight: bold;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments a.bubble span {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments a.emptybubble {
|
|
||||||
background-image: url(style/nocomment.png);
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments a.bubble:hover {
|
|
||||||
background-image: url(style/hovercomment.png);
|
|
||||||
text-decoration: none;
|
|
||||||
color: #3ca0a4;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments div.comments {
|
|
||||||
float: right;
|
|
||||||
margin: 25px 5px 0 0;
|
|
||||||
max-width: 50em;
|
|
||||||
min-width: 30em;
|
|
||||||
border: 1px solid #2eabb0;
|
|
||||||
background-color: #f2fbfd;
|
|
||||||
z-index: 150;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments {
|
|
||||||
border: 1px solid #2eabb0;
|
|
||||||
margin-top: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.nocomments {
|
|
||||||
padding: 10px;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments div.comments h3,
|
|
||||||
div#comments h3 {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
background-color: #2eabb0;
|
|
||||||
color: white;
|
|
||||||
border: none;
|
|
||||||
padding: 3px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments div.comments div.actions {
|
|
||||||
padding: 4px;
|
|
||||||
margin: 0;
|
|
||||||
border-top: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment {
|
|
||||||
margin: 10px;
|
|
||||||
border: 1px solid #2eabb0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments div.comment h4,
|
|
||||||
div.commentwindow div.comment h4,
|
|
||||||
div#comments div.comment h4 {
|
|
||||||
margin: 10px 0 0 0;
|
|
||||||
background-color: #2eabb0;
|
|
||||||
color: white;
|
|
||||||
border: none;
|
|
||||||
padding: 1px 4px 1px 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment h4 {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment h4 a {
|
|
||||||
color: #d5f4f4;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments div.comment div.text,
|
|
||||||
div.commentwindow div.comment div.text,
|
|
||||||
div#comments div.comment div.text {
|
|
||||||
margin: -5px 0 -5px 0;
|
|
||||||
padding: 0 10px 0 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments div.comment div.meta,
|
|
||||||
div.commentwindow div.comment div.meta,
|
|
||||||
div#comments div.comment div.meta {
|
|
||||||
text-align: right;
|
|
||||||
padding: 2px 10px 2px 0;
|
|
||||||
font-size: 95%;
|
|
||||||
color: #538893;
|
|
||||||
border-top: 1px solid #cbe7e5;
|
|
||||||
background-color: #e0f6f4;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow {
|
|
||||||
position: absolute;
|
|
||||||
width: 500px;
|
|
||||||
border: 1px solid #cbe7e5;
|
|
||||||
background-color: #f2fbfd;
|
|
||||||
display: none;
|
|
||||||
z-index: 130;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow h3 {
|
|
||||||
margin: 0;
|
|
||||||
background-color: #2eabb0;
|
|
||||||
color: white;
|
|
||||||
border: none;
|
|
||||||
padding: 5px;
|
|
||||||
font-size: 1.5em;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.actions {
|
|
||||||
margin: 10px -10px 0 -10px;
|
|
||||||
padding: 4px 10px 4px 10px;
|
|
||||||
color: #538893;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.actions input {
|
|
||||||
border: 1px solid #2eabb0;
|
|
||||||
background-color: white;
|
|
||||||
color: #135355;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.form {
|
|
||||||
padding: 0 10px 0 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.form input,
|
|
||||||
div.commentwindow div.form textarea {
|
|
||||||
border: 1px solid #3c9ea2;
|
|
||||||
background-color: white;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.error {
|
|
||||||
margin: 10px 5px 10px 5px;
|
|
||||||
background-color: #fbe5dc;
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.form textarea {
|
|
||||||
width: 99%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.preview {
|
|
||||||
margin: 10px 0 10px 0;
|
|
||||||
background-color: #70d0d4;
|
|
||||||
padding: 0 1px 1px 25px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.preview h4 {
|
|
||||||
margin: 0 0 -5px -20px;
|
|
||||||
padding: 4px 0 0 4px;
|
|
||||||
color: white;
|
|
||||||
font-size: 1.3em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.preview div.comment {
|
|
||||||
background-color: #f2fbfd;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentwindow div.preview div.comment h4 {
|
|
||||||
margin: 10px 0 0 0!important;
|
|
||||||
padding: 1px 4px 1px 4px!important;
|
|
||||||
font-size: 1.2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: SUGGEST CHANGES :::: */
|
|
||||||
div#suggest-changes-box input, div#suggest-changes-box textarea {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
background-color: white;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#suggest-changes-box textarea {
|
|
||||||
width: 99%;
|
|
||||||
height: 400px;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* :::: PREVIEW :::: */
|
|
||||||
div.preview {
|
|
||||||
background-image: url(style/preview.png);
|
|
||||||
padding: 0 20px 20px 20px;
|
|
||||||
margin-bottom: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* :::: INDEX PAGE :::: */
|
|
||||||
|
|
||||||
table.contentstable {
|
|
||||||
width: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.contentstable p.biglink {
|
|
||||||
line-height: 150%;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.biglink {
|
|
||||||
font-size: 1.3em;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.linkdescr {
|
|
||||||
font-style: italic;
|
|
||||||
padding-top: 5px;
|
|
||||||
font-size: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: INDEX STYLES :::: */
|
|
||||||
|
|
||||||
table.indextable td {
|
|
||||||
text-align: left;
|
|
||||||
vertical-align: top;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable dl, table.indextable dd {
|
|
||||||
margin-top: 0;
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable tr.pcap {
|
|
||||||
height: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable tr.cap {
|
|
||||||
margin-top: 10px;
|
|
||||||
background-color: #f2f2f2;
|
|
||||||
}
|
|
||||||
|
|
||||||
img.toggler {
|
|
||||||
margin-right: 3px;
|
|
||||||
margin-top: 3px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.pfform {
|
|
||||||
margin: 10px 0 20px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: GLOBAL STYLES :::: */
|
|
||||||
|
|
||||||
.docwarning {
|
|
||||||
background-color: #ffe4e4;
|
|
||||||
padding: 10px;
|
|
||||||
margin: 0 -20px 0 -20px;
|
|
||||||
border-bottom: 1px solid #f66;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.subhead {
|
|
||||||
font-weight: bold;
|
|
||||||
margin-top: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
color: #355f7c;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body h1,
|
|
||||||
div.body h2,
|
|
||||||
div.body h3,
|
|
||||||
div.body h4,
|
|
||||||
div.body h5,
|
|
||||||
div.body h6 {
|
|
||||||
font-family: 'Trebuchet MS', sans-serif;
|
|
||||||
background-color: #f2f2f2;
|
|
||||||
font-weight: normal;
|
|
||||||
color: #20435c;
|
|
||||||
border-bottom: 1px solid #ccc;
|
|
||||||
margin: 20px -20px 10px -20px;
|
|
||||||
padding: 3px 0 3px 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body h1 { margin-top: 0; font-size: 200%; }
|
|
||||||
div.body h2 { font-size: 160%; }
|
|
||||||
div.body h3 { font-size: 140%; }
|
|
||||||
div.body h4 { font-size: 120%; }
|
|
||||||
div.body h5 { font-size: 110%; }
|
|
||||||
div.body h6 { font-size: 100%; }
|
|
||||||
|
|
||||||
a.headerlink {
|
|
||||||
color: #c60f0f;
|
|
||||||
font-size: 0.8em;
|
|
||||||
padding: 0 4px 0 4px;
|
|
||||||
text-decoration: none;
|
|
||||||
visibility: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1:hover > a.headerlink,
|
|
||||||
h2:hover > a.headerlink,
|
|
||||||
h3:hover > a.headerlink,
|
|
||||||
h4:hover > a.headerlink,
|
|
||||||
h5:hover > a.headerlink,
|
|
||||||
h6:hover > a.headerlink,
|
|
||||||
dt:hover > a.headerlink {
|
|
||||||
visibility: visible;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.headerlink:hover {
|
|
||||||
background-color: #c60f0f;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body p, div.body dd, div.body li {
|
|
||||||
text-align: justify;
|
|
||||||
line-height: 130%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body p.caption {
|
|
||||||
text-align: inherit;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body td {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.fakelist {
|
|
||||||
list-style: none;
|
|
||||||
margin: 10px 0 10px 20px;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-list ul {
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.first {
|
|
||||||
margin-top: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* "Footnotes" heading */
|
|
||||||
p.rubric {
|
|
||||||
margin-top: 30px;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* "Topics" */
|
|
||||||
|
|
||||||
div.topic {
|
|
||||||
background-color: #eee;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
padding: 0 7px 0 7px;
|
|
||||||
margin: 10px 0 10px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.topic-title {
|
|
||||||
font-size: 1.1em;
|
|
||||||
font-weight: bold;
|
|
||||||
margin-top: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Admonitions */
|
|
||||||
|
|
||||||
div.admonition {
|
|
||||||
margin-top: 10px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
padding: 7px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition dt {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition dl {
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition p {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.seealso {
|
|
||||||
background-color: #ffc;
|
|
||||||
border: 1px solid #ff6;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.warning {
|
|
||||||
background-color: #ffe4e4;
|
|
||||||
border: 1px solid #f66;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note {
|
|
||||||
background-color: #eee;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title {
|
|
||||||
margin: 0px 10px 5px 0px;
|
|
||||||
font-weight: bold;
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title:after {
|
|
||||||
content: ":";
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body p.centered {
|
|
||||||
text-align: center;
|
|
||||||
margin-top: 25px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.docutils {
|
|
||||||
border: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.docutils td, table.docutils th {
|
|
||||||
padding: 1px 8px 1px 0;
|
|
||||||
border-top: 0;
|
|
||||||
border-left: 0;
|
|
||||||
border-right: 0;
|
|
||||||
border-bottom: 1px solid #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.field-list td, table.field-list th {
|
|
||||||
border: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.footnote td, table.footnote th {
|
|
||||||
border: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-list ul {
|
|
||||||
margin: 0;
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-list p {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
dl {
|
|
||||||
margin-bottom: 15px;
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd p {
|
|
||||||
margin-top: 0px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd ul, dd table {
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd {
|
|
||||||
margin-top: 3px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
margin-left: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.refcount {
|
|
||||||
color: #060;
|
|
||||||
}
|
|
||||||
|
|
||||||
dt:target,
|
|
||||||
.highlight {
|
|
||||||
background-color: #fbe54e;
|
|
||||||
}
|
|
||||||
|
|
||||||
dl.glossary dt {
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
th {
|
|
||||||
text-align: left;
|
|
||||||
padding-right: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
pre {
|
|
||||||
padding: 5px;
|
|
||||||
background-color: #efc;
|
|
||||||
color: #333;
|
|
||||||
border: 1px solid #ac9;
|
|
||||||
border-left: none;
|
|
||||||
border-right: none;
|
|
||||||
overflow: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
td.linenos pre {
|
|
||||||
padding: 5px 0px;
|
|
||||||
border: 0;
|
|
||||||
background-color: transparent;
|
|
||||||
color: #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.highlighttable {
|
|
||||||
margin-left: 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.highlighttable td {
|
|
||||||
padding: 0 0.5em 0 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt {
|
|
||||||
background-color: #ecf0f3;
|
|
||||||
padding: 0 1px 0 1px;
|
|
||||||
font-size: 0.95em;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.descname {
|
|
||||||
background-color: transparent;
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.descclassname {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.xref, a tt {
|
|
||||||
background-color: transparent;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.footnote:target { background-color: #ffa }
|
|
||||||
|
|
||||||
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
.optional {
|
|
||||||
font-size: 1.3em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.versionmodified {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment {
|
|
||||||
margin: 0;
|
|
||||||
padding: 10px 30px 10px 30px;
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment h3 {
|
|
||||||
background-color: #326591;
|
|
||||||
color: white;
|
|
||||||
margin: -10px -30px 10px -30px;
|
|
||||||
padding: 5px;
|
|
||||||
font-size: 1.4em;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment input,
|
|
||||||
form.comment textarea {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
padding: 2px;
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment input[type="text"] {
|
|
||||||
width: 240px;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment textarea {
|
|
||||||
width: 100%;
|
|
||||||
height: 200px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.system-message {
|
|
||||||
background-color: #fda;
|
|
||||||
padding: 5px;
|
|
||||||
border: 3px solid red;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: PRINT :::: */
|
|
||||||
@media print {
|
|
||||||
div.document,
|
|
||||||
div.documentwrapper,
|
|
||||||
div.bodywrapper {
|
|
||||||
margin: 0;
|
|
||||||
width : 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar,
|
|
||||||
div.related,
|
|
||||||
div.footer,
|
|
||||||
div#comments div.new-comment-box,
|
|
||||||
#top-link {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,352 +0,0 @@
|
||||||
/// XXX: make it cross browser
|
|
||||||
|
|
||||||
/**
|
|
||||||
* make the code below compatible with browsers without
|
|
||||||
* an installed firebug like debugger
|
|
||||||
*/
|
|
||||||
if (!window.console || !console.firebug) {
|
|
||||||
var names = ["log", "debug", "info", "warn", "error", "assert", "dir", "dirxml",
|
|
||||||
"group", "groupEnd", "time", "timeEnd", "count", "trace", "profile", "profileEnd"];
|
|
||||||
window.console = {};
|
|
||||||
for (var i = 0; i < names.length; ++i)
|
|
||||||
window.console[names[i]] = function() {}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* small helper function to urldecode strings
|
|
||||||
*/
|
|
||||||
jQuery.urldecode = function(x) {
|
|
||||||
return decodeURIComponent(x).replace(/\+/g, ' ');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* small helper function to urlencode strings
|
|
||||||
*/
|
|
||||||
jQuery.urlencode = encodeURIComponent;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function returns the parsed url parameters of the
|
|
||||||
* current request. Multiple values per key are supported,
|
|
||||||
* it will always return arrays of strings for the value parts.
|
|
||||||
*/
|
|
||||||
jQuery.getQueryParameters = function(s) {
|
|
||||||
if (typeof s == 'undefined')
|
|
||||||
s = document.location.search;
|
|
||||||
var parts = s.substr(s.indexOf('?') + 1).split('&');
|
|
||||||
var result = {};
|
|
||||||
for (var i = 0; i < parts.length; i++) {
|
|
||||||
var tmp = parts[i].split('=', 2);
|
|
||||||
var key = jQuery.urldecode(tmp[0]);
|
|
||||||
var value = jQuery.urldecode(tmp[1]);
|
|
||||||
if (key in result)
|
|
||||||
result[key].push(value);
|
|
||||||
else
|
|
||||||
result[key] = [value];
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* small function to check if an array contains
|
|
||||||
* a given item.
|
|
||||||
*/
|
|
||||||
jQuery.contains = function(arr, item) {
|
|
||||||
for (var i = 0; i < arr.length; i++) {
|
|
||||||
if (arr[i] == item)
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* highlight a given string on a jquery object by wrapping it in
|
|
||||||
* span elements with the given class name.
|
|
||||||
*/
|
|
||||||
jQuery.fn.highlightText = function(text, className) {
|
|
||||||
function highlight(node) {
|
|
||||||
if (node.nodeType == 3) {
|
|
||||||
var val = node.nodeValue;
|
|
||||||
var pos = val.toLowerCase().indexOf(text);
|
|
||||||
if (pos >= 0 && !jQuery.className.has(node.parentNode, className)) {
|
|
||||||
var span = document.createElement("span");
|
|
||||||
span.className = className;
|
|
||||||
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
|
||||||
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
|
|
||||||
document.createTextNode(val.substr(pos + text.length)),
|
|
||||||
node.nextSibling));
|
|
||||||
node.nodeValue = val.substr(0, pos);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (!jQuery(node).is("button, select, textarea")) {
|
|
||||||
jQuery.each(node.childNodes, function() {
|
|
||||||
highlight(this)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.each(function() {
|
|
||||||
highlight(this);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Small JavaScript module for the documentation.
|
|
||||||
*/
|
|
||||||
var Documentation = {
|
|
||||||
|
|
||||||
init : function() {
|
|
||||||
/* this.addContextElements(); -- now done statically */
|
|
||||||
this.fixFirefoxAnchorBug();
|
|
||||||
this.highlightSearchWords();
|
|
||||||
this.initModIndex();
|
|
||||||
this.initComments();
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add context elements like header anchor links
|
|
||||||
*/
|
|
||||||
addContextElements : function() {
|
|
||||||
for (var i = 1; i <= 6; i++) {
|
|
||||||
$('h' + i + '[@id]').each(function() {
|
|
||||||
$('<a class="headerlink">\u00B6</a>').
|
|
||||||
attr('href', '#' + this.id).
|
|
||||||
attr('title', 'Permalink to this headline').
|
|
||||||
appendTo(this);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
$('dt[@id]').each(function() {
|
|
||||||
$('<a class="headerlink">\u00B6</a>').
|
|
||||||
attr('href', '#' + this.id).
|
|
||||||
attr('title', 'Permalink to this definition').
|
|
||||||
appendTo(this);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* workaround a firefox stupidity
|
|
||||||
*/
|
|
||||||
fixFirefoxAnchorBug : function() {
|
|
||||||
if (document.location.hash && $.browser.mozilla)
|
|
||||||
window.setTimeout(function() {
|
|
||||||
document.location.href += '';
|
|
||||||
}, 10);
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* highlight the search words provided in the url in the text
|
|
||||||
*/
|
|
||||||
highlightSearchWords : function() {
|
|
||||||
var params = $.getQueryParameters();
|
|
||||||
var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
|
|
||||||
if (terms.length) {
|
|
||||||
var body = $('div.body');
|
|
||||||
window.setTimeout(function() {
|
|
||||||
$.each(terms, function() {
|
|
||||||
body.highlightText(this.toLowerCase(), 'highlight');
|
|
||||||
});
|
|
||||||
}, 10);
|
|
||||||
$('<li class="highlight-link"><a href="javascript:Documentation.' +
|
|
||||||
'hideSearchWords()">Hide Search Matches</a></li>')
|
|
||||||
.appendTo($('.sidebar .this-page-menu'));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* init the modindex toggle buttons
|
|
||||||
*/
|
|
||||||
initModIndex : function() {
|
|
||||||
var togglers = $('img.toggler').click(function() {
|
|
||||||
var src = $(this).attr('src');
|
|
||||||
var idnum = $(this).attr('id').substr(7);
|
|
||||||
console.log($('tr.cg-' + idnum).toggle());
|
|
||||||
if (src.substr(-9) == 'minus.png')
|
|
||||||
$(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
|
|
||||||
else
|
|
||||||
$(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
|
|
||||||
}).css('display', '');
|
|
||||||
if (DOCUMENTATION_OPTIONS.COLLAPSE_MODINDEX) {
|
|
||||||
togglers.click();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* init the inline comments
|
|
||||||
*/
|
|
||||||
initComments : function() {
|
|
||||||
$('.inlinecomments div.actions').each(function() {
|
|
||||||
this.innerHTML += ' | ';
|
|
||||||
$(this).append($('<a href="#">hide comments</a>').click(function() {
|
|
||||||
$(this).parent().parent().toggle();
|
|
||||||
return false;
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
$('.inlinecomments .comments').hide();
|
|
||||||
$('.inlinecomments a.bubble').each(function() {
|
|
||||||
$(this).click($(this).is('.emptybubble') ? function() {
|
|
||||||
var params = $.getQueryParameters(this.href);
|
|
||||||
Documentation.newComment(params.target[0]);
|
|
||||||
return false;
|
|
||||||
} : function() {
|
|
||||||
$('.comments', $(this).parent().parent()[0]).toggle();
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
$('#comments div.actions a.newcomment').click(function() {
|
|
||||||
Documentation.newComment();
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
if (document.location.hash.match(/^#comment-/))
|
|
||||||
$('.inlinecomments .comments ' + document.location.hash)
|
|
||||||
.parent().toggle();
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* helper function to hide the search marks again
|
|
||||||
*/
|
|
||||||
hideSearchWords : function() {
|
|
||||||
$('.sidebar .this-page-menu li.highlight-link').fadeOut(300);
|
|
||||||
$('span.highlight').removeClass('highlight');
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* show the comment window for a certain id or the whole page.
|
|
||||||
*/
|
|
||||||
newComment : function(id) {
|
|
||||||
Documentation.CommentWindow.openFor(id || '');
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* write a new comment from within a comment view box
|
|
||||||
*/
|
|
||||||
newCommentFromBox : function(link) {
|
|
||||||
var params = $.getQueryParameters(link.href);
|
|
||||||
$(link).parent().parent().fadeOut('slow');
|
|
||||||
this.newComment(params.target);
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* make the url absolute
|
|
||||||
*/
|
|
||||||
makeURL : function(relativeURL) {
|
|
||||||
return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get the current relative url
|
|
||||||
*/
|
|
||||||
getCurrentURL : function() {
|
|
||||||
var path = document.location.pathname;
|
|
||||||
var parts = path.split(/\//);
|
|
||||||
$.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
|
|
||||||
if (this == '..')
|
|
||||||
parts.pop();
|
|
||||||
});
|
|
||||||
var url = parts.join('/');
|
|
||||||
return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* class that represents the comment window
|
|
||||||
*/
|
|
||||||
CommentWindow : (function() {
|
|
||||||
var openWindows = {};
|
|
||||||
|
|
||||||
var Window = function(sectionID) {
|
|
||||||
this.url = Documentation.makeURL('@comments/' + Documentation.getCurrentURL()
|
|
||||||
+ '/?target=' + $.urlencode(sectionID) + '&mode=ajax');
|
|
||||||
this.sectionID = sectionID;
|
|
||||||
|
|
||||||
this.root = $('<div class="commentwindow"></div>');
|
|
||||||
this.root.appendTo($('body'));
|
|
||||||
this.title = $('<h3>New Comment</h3>').appendTo(this.root);
|
|
||||||
this.body = $('<div class="form">please wait...</div>').appendTo(this.root);
|
|
||||||
this.resizeHandle = $('<div class="resizehandle"></div>').appendTo(this.root);
|
|
||||||
|
|
||||||
this.root.Draggable({
|
|
||||||
handle: this.title[0]
|
|
||||||
});
|
|
||||||
|
|
||||||
this.root.css({
|
|
||||||
left: window.innerWidth / 2 - $(this.root).width() / 2,
|
|
||||||
top: window.scrollY + (window.innerHeight / 2 - 150)
|
|
||||||
});
|
|
||||||
this.root.fadeIn('slow');
|
|
||||||
this.updateView();
|
|
||||||
};
|
|
||||||
|
|
||||||
Window.prototype.updateView = function(data) {
|
|
||||||
var self = this;
|
|
||||||
function update(data) {
|
|
||||||
if (data.posted) {
|
|
||||||
document.location.hash = '#comment-' + data.commentID;
|
|
||||||
document.location.reload();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
self.body.html(data.body);
|
|
||||||
$('div.actions', self.body).append($('<input>')
|
|
||||||
.attr('type', 'button')
|
|
||||||
.attr('value', 'Close')
|
|
||||||
.click(function() { self.close(); })
|
|
||||||
);
|
|
||||||
$('div.actions input[@name="preview"]')
|
|
||||||
.attr('type', 'button')
|
|
||||||
.click(function() { self.submitForm($('form', self.body)[0], true); });
|
|
||||||
$('form', self.body).bind("submit", function() {
|
|
||||||
self.submitForm(this);
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (data.error) {
|
|
||||||
self.root.Highlight(1000, '#aadee1');
|
|
||||||
$('div.error', self.root).slideDown(500);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof data == 'undefined')
|
|
||||||
$.getJSON(this.url, function(json) { update(json); });
|
|
||||||
else
|
|
||||||
$.ajax({
|
|
||||||
url: this.url,
|
|
||||||
type: 'POST',
|
|
||||||
dataType: 'json',
|
|
||||||
data: data,
|
|
||||||
success: function(json) { update(json); }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Window.prototype.getFormValue = function(name) {
|
|
||||||
return $('*[@name="' + name + '"]', this.body)[0].value;
|
|
||||||
}
|
|
||||||
|
|
||||||
Window.prototype.submitForm = function(form, previewMode) {
|
|
||||||
this.updateView({
|
|
||||||
author: form.author.value,
|
|
||||||
author_mail: form.author_mail.value,
|
|
||||||
title: form.title.value,
|
|
||||||
comment_body: form.comment_body.value,
|
|
||||||
preview: previewMode ? 'yes' : ''
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Window.prototype.close = function() {
|
|
||||||
var self = this;
|
|
||||||
delete openWindows[this.sectionID];
|
|
||||||
this.root.fadeOut('slow', function() {
|
|
||||||
self.root.remove();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Window.openFor = function(sectionID) {
|
|
||||||
if (sectionID in openWindows)
|
|
||||||
return openWindows[sectionID];
|
|
||||||
return new Window(sectionID);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Window;
|
|
||||||
})()
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
$(document).ready(function() {
|
|
||||||
Documentation.init();
|
|
||||||
});
|
|
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 392 B |
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 199 B |
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 218 B |
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 199 B |
|
@ -1,59 +0,0 @@
|
||||||
.c { color: #408090; font-style: italic } /* Comment */
|
|
||||||
.err { border: 1px solid #FF0000 } /* Error */
|
|
||||||
.k { color: #007020; font-weight: bold } /* Keyword */
|
|
||||||
.o { color: #666666 } /* Operator */
|
|
||||||
.cm { color: #408090; font-style: italic } /* Comment.Multiline */
|
|
||||||
.cp { color: #007020 } /* Comment.Preproc */
|
|
||||||
.c1 { color: #408090; font-style: italic } /* Comment.Single */
|
|
||||||
.cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */
|
|
||||||
.gd { color: #A00000 } /* Generic.Deleted */
|
|
||||||
.ge { font-style: italic } /* Generic.Emph */
|
|
||||||
.gr { color: #FF0000 } /* Generic.Error */
|
|
||||||
.gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
|
||||||
.gi { color: #00A000 } /* Generic.Inserted */
|
|
||||||
.go { color: #303030 } /* Generic.Output */
|
|
||||||
.gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
|
|
||||||
.gs { font-weight: bold } /* Generic.Strong */
|
|
||||||
.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
|
||||||
.gt { color: #0040D0 } /* Generic.Traceback */
|
|
||||||
.kc { color: #007020; font-weight: bold } /* Keyword.Constant */
|
|
||||||
.kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
|
|
||||||
.kp { color: #007020 } /* Keyword.Pseudo */
|
|
||||||
.kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
|
|
||||||
.kt { color: #902000 } /* Keyword.Type */
|
|
||||||
.m { color: #208050 } /* Literal.Number */
|
|
||||||
.s { color: #4070a0 } /* Literal.String */
|
|
||||||
.na { color: #4070a0 } /* Name.Attribute */
|
|
||||||
.nb { color: #007020 } /* Name.Builtin */
|
|
||||||
.nc { color: #0e84b5; font-weight: bold } /* Name.Class */
|
|
||||||
.no { color: #60add5 } /* Name.Constant */
|
|
||||||
.nd { color: #555555; font-weight: bold } /* Name.Decorator */
|
|
||||||
.ni { color: #d55537; font-weight: bold } /* Name.Entity */
|
|
||||||
.ne { color: #007020 } /* Name.Exception */
|
|
||||||
.nf { color: #06287e } /* Name.Function */
|
|
||||||
.nl { color: #002070; font-weight: bold } /* Name.Label */
|
|
||||||
.nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
|
|
||||||
.nt { color: #062873; font-weight: bold } /* Name.Tag */
|
|
||||||
.nv { color: #bb60d5 } /* Name.Variable */
|
|
||||||
.ow { color: #007020; font-weight: bold } /* Operator.Word */
|
|
||||||
.w { color: #bbbbbb } /* Text.Whitespace */
|
|
||||||
.mf { color: #208050 } /* Literal.Number.Float */
|
|
||||||
.mh { color: #208050 } /* Literal.Number.Hex */
|
|
||||||
.mi { color: #208050 } /* Literal.Number.Integer */
|
|
||||||
.mo { color: #208050 } /* Literal.Number.Oct */
|
|
||||||
.sb { color: #4070a0 } /* Literal.String.Backtick */
|
|
||||||
.sc { color: #4070a0 } /* Literal.String.Char */
|
|
||||||
.sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
|
|
||||||
.s2 { color: #4070a0 } /* Literal.String.Double */
|
|
||||||
.se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
|
|
||||||
.sh { color: #4070a0 } /* Literal.String.Heredoc */
|
|
||||||
.si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
|
|
||||||
.sx { color: #c65d09 } /* Literal.String.Other */
|
|
||||||
.sr { color: #235388 } /* Literal.String.Regex */
|
|
||||||
.s1 { color: #4070a0 } /* Literal.String.Single */
|
|
||||||
.ss { color: #517918 } /* Literal.String.Symbol */
|
|
||||||
.bp { color: #007020 } /* Name.Builtin.Pseudo */
|
|
||||||
.vc { color: #bb60d5 } /* Name.Variable.Class */
|
|
||||||
.vg { color: #bb60d5 } /* Name.Variable.Global */
|
|
||||||
.vi { color: #bb60d5 } /* Name.Variable.Instance */
|
|
||||||
.il { color: #208050 } /* Literal.Number.Integer.Long */
|
|
|
@ -1,16 +0,0 @@
|
||||||
/**
|
|
||||||
* Sphinx Doc Design -- Right Side Bar Overrides
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
div.sphinxsidebar {
|
|
||||||
float: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.bodywrapper {
|
|
||||||
margin: 0 230px 0 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments {
|
|
||||||
right: 250px;
|
|
||||||
}
|
|
|
@ -1,404 +0,0 @@
|
||||||
/**
|
|
||||||
* helper function to return a node containing the
|
|
||||||
* search summary for a given text. keywords is a list
|
|
||||||
* of stemmed words, hlwords is the list of normal, unstemmed
|
|
||||||
* words. the first one is used to find the occurance, the
|
|
||||||
* latter for highlighting it.
|
|
||||||
*/
|
|
||||||
jQuery.makeSearchSummary = function(text, keywords, hlwords) {
|
|
||||||
var textLower = text.toLowerCase();
|
|
||||||
var start = 0;
|
|
||||||
$.each(keywords, function() {
|
|
||||||
var i = textLower.indexOf(this.toLowerCase());
|
|
||||||
if (i > -1) {
|
|
||||||
start = i;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
start = Math.max(start - 120, 0);
|
|
||||||
var excerpt = ((start > 0) ? '...' : '') +
|
|
||||||
$.trim(text.substr(start, 240)) +
|
|
||||||
((start + 240 - text.length) ? '...' : '');
|
|
||||||
var rv = $('<div class="context"></div>').text(excerpt);
|
|
||||||
$.each(hlwords, function() {
|
|
||||||
rv = rv.highlightText(this, 'highlight');
|
|
||||||
});
|
|
||||||
return rv;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Porter Stemmer
|
|
||||||
*/
|
|
||||||
var PorterStemmer = function() {
|
|
||||||
|
|
||||||
var step2list = {
|
|
||||||
ational: 'ate',
|
|
||||||
tional: 'tion',
|
|
||||||
enci: 'ence',
|
|
||||||
anci: 'ance',
|
|
||||||
izer: 'ize',
|
|
||||||
bli: 'ble',
|
|
||||||
alli: 'al',
|
|
||||||
entli: 'ent',
|
|
||||||
eli: 'e',
|
|
||||||
ousli: 'ous',
|
|
||||||
ization: 'ize',
|
|
||||||
ation: 'ate',
|
|
||||||
ator: 'ate',
|
|
||||||
alism: 'al',
|
|
||||||
iveness: 'ive',
|
|
||||||
fulness: 'ful',
|
|
||||||
ousness: 'ous',
|
|
||||||
aliti: 'al',
|
|
||||||
iviti: 'ive',
|
|
||||||
biliti: 'ble',
|
|
||||||
logi: 'log'
|
|
||||||
};
|
|
||||||
|
|
||||||
var step3list = {
|
|
||||||
icate: 'ic',
|
|
||||||
ative: '',
|
|
||||||
alize: 'al',
|
|
||||||
iciti: 'ic',
|
|
||||||
ical: 'ic',
|
|
||||||
ful: '',
|
|
||||||
ness: ''
|
|
||||||
};
|
|
||||||
|
|
||||||
var c = "[^aeiou]"; // consonant
|
|
||||||
var v = "[aeiouy]"; // vowel
|
|
||||||
var C = c + "[^aeiouy]*"; // consonant sequence
|
|
||||||
var V = v + "[aeiou]*"; // vowel sequence
|
|
||||||
|
|
||||||
var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
|
|
||||||
var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
|
|
||||||
var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
|
|
||||||
var s_v = "^(" + C + ")?" + v; // vowel in stem
|
|
||||||
|
|
||||||
this.stemWord = function (w) {
|
|
||||||
var stem;
|
|
||||||
var suffix;
|
|
||||||
var firstch;
|
|
||||||
var origword = w;
|
|
||||||
|
|
||||||
if (w.length < 3) {
|
|
||||||
return w;
|
|
||||||
}
|
|
||||||
|
|
||||||
var re;
|
|
||||||
var re2;
|
|
||||||
var re3;
|
|
||||||
var re4;
|
|
||||||
|
|
||||||
firstch = w.substr(0,1);
|
|
||||||
if (firstch == "y") {
|
|
||||||
w = firstch.toUpperCase() + w.substr(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 1a
|
|
||||||
re = /^(.+?)(ss|i)es$/;
|
|
||||||
re2 = /^(.+?)([^s])s$/;
|
|
||||||
|
|
||||||
if (re.test(w)) {
|
|
||||||
w = w.replace(re,"$1$2");
|
|
||||||
}
|
|
||||||
else if (re2.test(w)) {
|
|
||||||
w = w.replace(re2,"$1$2");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 1b
|
|
||||||
re = /^(.+?)eed$/;
|
|
||||||
re2 = /^(.+?)(ed|ing)$/;
|
|
||||||
if (re.test(w)) {
|
|
||||||
var fp = re.exec(w);
|
|
||||||
re = new RegExp(mgr0);
|
|
||||||
if (re.test(fp[1])) {
|
|
||||||
re = /.$/;
|
|
||||||
w = w.replace(re,"");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (re2.test(w)) {
|
|
||||||
var fp = re2.exec(w);
|
|
||||||
stem = fp[1];
|
|
||||||
re2 = new RegExp(s_v);
|
|
||||||
if (re2.test(stem)) {
|
|
||||||
w = stem;
|
|
||||||
re2 = /(at|bl|iz)$/;
|
|
||||||
re3 = new RegExp("([^aeiouylsz])\\1$");
|
|
||||||
re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
|
||||||
if (re2.test(w)) {
|
|
||||||
w = w + "e";
|
|
||||||
}
|
|
||||||
else if (re3.test(w)) {
|
|
||||||
re = /.$/; w = w.replace(re,"");
|
|
||||||
}
|
|
||||||
else if (re4.test(w)) {
|
|
||||||
w = w + "e";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 1c
|
|
||||||
re = /^(.+?)y$/;
|
|
||||||
if (re.test(w)) {
|
|
||||||
var fp = re.exec(w);
|
|
||||||
stem = fp[1];
|
|
||||||
re = new RegExp(s_v);
|
|
||||||
if (re.test(stem)) { w = stem + "i"; }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 2
|
|
||||||
re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
|
|
||||||
if (re.test(w)) {
|
|
||||||
var fp = re.exec(w);
|
|
||||||
stem = fp[1];
|
|
||||||
suffix = fp[2];
|
|
||||||
re = new RegExp(mgr0);
|
|
||||||
if (re.test(stem)) {
|
|
||||||
w = stem + step2list[suffix];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 3
|
|
||||||
re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
|
|
||||||
if (re.test(w)) {
|
|
||||||
var fp = re.exec(w);
|
|
||||||
stem = fp[1];
|
|
||||||
suffix = fp[2];
|
|
||||||
re = new RegExp(mgr0);
|
|
||||||
if (re.test(stem)) {
|
|
||||||
w = stem + step3list[suffix];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 4
|
|
||||||
re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
|
|
||||||
re2 = /^(.+?)(s|t)(ion)$/;
|
|
||||||
if (re.test(w)) {
|
|
||||||
var fp = re.exec(w);
|
|
||||||
stem = fp[1];
|
|
||||||
re = new RegExp(mgr1);
|
|
||||||
if (re.test(stem)) {
|
|
||||||
w = stem;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (re2.test(w)) {
|
|
||||||
var fp = re2.exec(w);
|
|
||||||
stem = fp[1] + fp[2];
|
|
||||||
re2 = new RegExp(mgr1);
|
|
||||||
if (re2.test(stem)) {
|
|
||||||
w = stem;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 5
|
|
||||||
re = /^(.+?)e$/;
|
|
||||||
if (re.test(w)) {
|
|
||||||
var fp = re.exec(w);
|
|
||||||
stem = fp[1];
|
|
||||||
re = new RegExp(mgr1);
|
|
||||||
re2 = new RegExp(meq1);
|
|
||||||
re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
|
||||||
if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) {
|
|
||||||
w = stem;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
re = /ll$/;
|
|
||||||
re2 = new RegExp(mgr1);
|
|
||||||
if (re.test(w) && re2.test(w)) {
|
|
||||||
re = /.$/;
|
|
||||||
w = w.replace(re,"");
|
|
||||||
}
|
|
||||||
|
|
||||||
// and turn initial Y back to y
|
|
||||||
if (firstch == "y") {
|
|
||||||
w = firstch.toLowerCase() + w.substr(1);
|
|
||||||
}
|
|
||||||
return w;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Search Module
|
|
||||||
*/
|
|
||||||
var Search = {
|
|
||||||
|
|
||||||
init : function() {
|
|
||||||
var params = $.getQueryParameters();
|
|
||||||
if (params.q) {
|
|
||||||
var query = params.q[0];
|
|
||||||
$('input[@name="q"]')[0].value = query;
|
|
||||||
this.performSearch(query);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* perform a search for something
|
|
||||||
*/
|
|
||||||
performSearch : function(query) {
|
|
||||||
// create the required interface elements
|
|
||||||
var out = $('#search-results');
|
|
||||||
var title = $('<h2>Searching</h2>').appendTo(out);
|
|
||||||
var dots = $('<span></span>').appendTo(title);
|
|
||||||
var status = $('<p style="display: none"></p>').appendTo(out);
|
|
||||||
var output = $('<ul class="search"/>').appendTo(out);
|
|
||||||
|
|
||||||
// spawn a background runner for updating the dots
|
|
||||||
// until the search has finished
|
|
||||||
var pulseStatus = 0;
|
|
||||||
function pulse() {
|
|
||||||
pulseStatus = (pulseStatus + 1) % 4;
|
|
||||||
var dotString = '';
|
|
||||||
for (var i = 0; i < pulseStatus; i++) {
|
|
||||||
dotString += '.';
|
|
||||||
}
|
|
||||||
dots.text(dotString);
|
|
||||||
if (pulseStatus > -1) {
|
|
||||||
window.setTimeout(pulse, 500);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
pulse();
|
|
||||||
|
|
||||||
// stem the searchwords and add them to the
|
|
||||||
// correct list
|
|
||||||
var stemmer = new PorterStemmer();
|
|
||||||
var searchwords = [];
|
|
||||||
var excluded = [];
|
|
||||||
var hlwords = [];
|
|
||||||
var tmp = query.split(/\s+/);
|
|
||||||
for (var i = 0; i < tmp.length; i++) {
|
|
||||||
// stem the word
|
|
||||||
var word = stemmer.stemWord(tmp[i]).toLowerCase();
|
|
||||||
// select the correct list
|
|
||||||
if (word[0] == '-') {
|
|
||||||
var toAppend = excluded;
|
|
||||||
word = word.substr(1);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
var toAppend = searchwords;
|
|
||||||
hlwords.push(tmp[i].toLowerCase());
|
|
||||||
}
|
|
||||||
// only add if not already in the list
|
|
||||||
if (!$.contains(toAppend, word)) {
|
|
||||||
toAppend.push(word);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
var highlightstring = '?highlight=' + $.urlencode(hlwords.join(" "));
|
|
||||||
|
|
||||||
console.debug('SEARCH: searching for:');
|
|
||||||
console.info('required: ', searchwords);
|
|
||||||
console.info('excluded: ', excluded);
|
|
||||||
|
|
||||||
// fetch searchindex and perform search
|
|
||||||
$.getJSON('searchindex.json', function(data) {
|
|
||||||
|
|
||||||
// prepare search
|
|
||||||
var filenames = data[0];
|
|
||||||
var titles = data[1]
|
|
||||||
var words = data[2];
|
|
||||||
var fileMap = {};
|
|
||||||
var files = null;
|
|
||||||
|
|
||||||
// perform the search on the required words
|
|
||||||
for (var i = 0; i < searchwords.length; i++) {
|
|
||||||
var word = searchwords[i];
|
|
||||||
// no match but word was a required one
|
|
||||||
if ((files = words[word]) == null) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
// create the mapping
|
|
||||||
for (var j = 0; j < files.length; j++) {
|
|
||||||
var file = files[j];
|
|
||||||
if (file in fileMap) {
|
|
||||||
fileMap[file].push(word);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
fileMap[file] = [word];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// now check if the files are in the correct
|
|
||||||
// areas and if the don't contain excluded words
|
|
||||||
var results = [];
|
|
||||||
for (var file in fileMap) {
|
|
||||||
var valid = true;
|
|
||||||
|
|
||||||
// check if all requirements are matched
|
|
||||||
if (fileMap[file].length != searchwords.length) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// ensure that none of the excluded words is in the
|
|
||||||
// search result.
|
|
||||||
for (var i = 0; i < excluded.length; i++) {
|
|
||||||
if ($.contains(words[excluded[i]] || [], file)) {
|
|
||||||
valid = false;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if we have still a valid result we can add it
|
|
||||||
// to the result list
|
|
||||||
if (valid) {
|
|
||||||
results.push([filenames[file], titles[file]]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// delete unused variables in order to not waste
|
|
||||||
// memory until list is retrieved completely
|
|
||||||
delete filenames, titles, words, data;
|
|
||||||
|
|
||||||
// now sort the results by title
|
|
||||||
results.sort(function(a, b) {
|
|
||||||
var left = a[1].toLowerCase();
|
|
||||||
var right = b[1].toLowerCase();
|
|
||||||
return (left > right) ? -1 : ((left < right) ? 1 : 0);
|
|
||||||
});
|
|
||||||
|
|
||||||
// print the results
|
|
||||||
var resultCount = results.length;
|
|
||||||
function displayNextItem() {
|
|
||||||
// results left, load the summary and display it
|
|
||||||
if (results.length) {
|
|
||||||
var item = results.pop();
|
|
||||||
var listItem = $('<li style="display:none"></li>');
|
|
||||||
listItem.append($('<a/>').attr(
|
|
||||||
'href',
|
|
||||||
item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
|
|
||||||
highlightstring).html(item[1]));
|
|
||||||
$.get('_sources/' + item[0] + '.txt', function(data) {
|
|
||||||
listItem.append($.makeSearchSummary(data, searchwords, hlwords));
|
|
||||||
output.append(listItem);
|
|
||||||
listItem.slideDown(10, function() {
|
|
||||||
displayNextItem();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// search finished, update title and status message
|
|
||||||
else {
|
|
||||||
pulseStatus = -1;
|
|
||||||
title.text('Search Results');
|
|
||||||
if (!resultCount) {
|
|
||||||
status.text('Your search did not match any documents. ' +
|
|
||||||
'Please make sure that all words are spelled ' +
|
|
||||||
'correctly and that you\'ve selected enough ' +
|
|
||||||
'categories.');
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
status.text('Search finished, found ' + resultCount +
|
|
||||||
' page' + (resultCount != 1 ? 's' : '') +
|
|
||||||
' matching the search query.');
|
|
||||||
}
|
|
||||||
status.fadeIn(500);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
displayNextItem();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
$(document).ready(function() {
|
|
||||||
Search.init();
|
|
||||||
});
|
|
|
@ -1,504 +0,0 @@
|
||||||
/**
|
|
||||||
* Alternate Sphinx design
|
|
||||||
* Originally created by Armin Ronacher for Werkzeug, adapted by Georg Brandl.
|
|
||||||
*/
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif;
|
|
||||||
font-size: 14px;
|
|
||||||
letter-spacing: -0.01em;
|
|
||||||
line-height: 150%;
|
|
||||||
text-align: center;
|
|
||||||
/*background-color: #AFC1C4; */
|
|
||||||
background-color: #BFD1D4;
|
|
||||||
color: black;
|
|
||||||
padding: 0;
|
|
||||||
border: 1px solid #aaa;
|
|
||||||
|
|
||||||
margin: 0px 80px 0px 80px;
|
|
||||||
min-width: 740px;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
color: #CA7900;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
color: #2491CF;
|
|
||||||
}
|
|
||||||
|
|
||||||
pre {
|
|
||||||
font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
|
||||||
font-size: 0.95em;
|
|
||||||
letter-spacing: 0.015em;
|
|
||||||
padding: 0.5em;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
background-color: #f8f8f8;
|
|
||||||
}
|
|
||||||
|
|
||||||
td.linenos pre {
|
|
||||||
padding: 0.5em 0;
|
|
||||||
border: 0;
|
|
||||||
background-color: transparent;
|
|
||||||
color: #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.highlighttable {
|
|
||||||
margin-left: 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.highlighttable td {
|
|
||||||
padding: 0 0.5em 0 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
cite, code, tt {
|
|
||||||
font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
|
||||||
font-size: 0.95em;
|
|
||||||
letter-spacing: 0.01em;
|
|
||||||
}
|
|
||||||
|
|
||||||
hr {
|
|
||||||
border: 1px solid #abc;
|
|
||||||
margin: 2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt {
|
|
||||||
background-color: #f2f2f2;
|
|
||||||
border-bottom: 1px solid #ddd;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.descname {
|
|
||||||
background-color: transparent;
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.2em;
|
|
||||||
border: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.descclassname {
|
|
||||||
background-color: transparent;
|
|
||||||
border: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.xref {
|
|
||||||
background-color: transparent;
|
|
||||||
font-weight: bold;
|
|
||||||
border: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
a tt {
|
|
||||||
background-color: transparent;
|
|
||||||
font-weight: bold;
|
|
||||||
border: 0;
|
|
||||||
color: #CA7900;
|
|
||||||
}
|
|
||||||
|
|
||||||
a tt:hover {
|
|
||||||
color: #2491CF;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-list ul {
|
|
||||||
margin: 0;
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-list p {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
dl {
|
|
||||||
margin-bottom: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd p {
|
|
||||||
margin-top: 0px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd ul, dd table {
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd {
|
|
||||||
margin-top: 3px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
margin-left: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.refcount {
|
|
||||||
color: #060;
|
|
||||||
}
|
|
||||||
|
|
||||||
dt:target,
|
|
||||||
.highlight {
|
|
||||||
background-color: #fbe54e;
|
|
||||||
}
|
|
||||||
|
|
||||||
dl.glossary dt {
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
pre {
|
|
||||||
line-height: 120%;
|
|
||||||
}
|
|
||||||
|
|
||||||
pre a {
|
|
||||||
color: inherit;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
.first {
|
|
||||||
margin-top: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.document {
|
|
||||||
background-color: white;
|
|
||||||
text-align: left;
|
|
||||||
background-image: url(contents.png);
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
div.documentwrapper {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
div.clearer {
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related h3 {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related ul {
|
|
||||||
background-image: url(navigation.png);
|
|
||||||
height: 2em;
|
|
||||||
list-style: none;
|
|
||||||
border-top: 1px solid #ddd;
|
|
||||||
border-bottom: 1px solid #ddd;
|
|
||||||
margin: 0;
|
|
||||||
padding-left: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related ul li {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
height: 2em;
|
|
||||||
float: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related ul li.right {
|
|
||||||
float: right;
|
|
||||||
margin-right: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related ul li a {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0 5px 0 5px;
|
|
||||||
line-height: 1.75em;
|
|
||||||
color: #EE9816;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related ul li a:hover {
|
|
||||||
color: #3CA8E7;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0.5em 20px 20px 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.bodywrapper {
|
|
||||||
margin: 0 240px 0 0;
|
|
||||||
border-right: 1px solid #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body a {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0.5em 15px 15px 0;
|
|
||||||
width: 210px;
|
|
||||||
float: right;
|
|
||||||
text-align: left;
|
|
||||||
/* margin-left: -100%; */
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h4, div.sphinxsidebar h3 {
|
|
||||||
margin: 1em 0 0.5em 0;
|
|
||||||
font-size: 0.9em;
|
|
||||||
padding: 0.1em 0 0.1em 0.5em;
|
|
||||||
color: white;
|
|
||||||
border: 1px solid #86989B;
|
|
||||||
background-color: #AFC1C4;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul {
|
|
||||||
padding-left: 1.5em;
|
|
||||||
margin-top: 7px;
|
|
||||||
list-style: none;
|
|
||||||
padding: 0;
|
|
||||||
line-height: 130%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul ul {
|
|
||||||
list-style: square;
|
|
||||||
margin-left: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
p {
|
|
||||||
margin: 0.8em 0 0.5em 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.rubric {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0.7em 0 0.3em 0;
|
|
||||||
font-size: 1.5em;
|
|
||||||
color: #11557C;
|
|
||||||
}
|
|
||||||
|
|
||||||
h2 {
|
|
||||||
margin: 1.3em 0 0.2em 0;
|
|
||||||
font-size: 1.35em;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
h3 {
|
|
||||||
margin: 1em 0 -0.3em 0;
|
|
||||||
font-size: 1.2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {
|
|
||||||
color: black!important;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor {
|
|
||||||
display: none;
|
|
||||||
margin: 0 0 0 0.3em;
|
|
||||||
padding: 0 0.2em 0 0.2em;
|
|
||||||
color: #aaa!important;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor,
|
|
||||||
h5:hover a.anchor, h6:hover a.anchor {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover,
|
|
||||||
h5 a.anchor:hover, h6 a.anchor:hover {
|
|
||||||
color: #777;
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
table {
|
|
||||||
border-collapse: collapse;
|
|
||||||
margin: 0 -0.5em 0 -0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table td, table th {
|
|
||||||
padding: 0.2em 0.5em 0.2em 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer {
|
|
||||||
background-color: #E3EFF1;
|
|
||||||
color: #86989B;
|
|
||||||
padding: 3px 8px 3px 0;
|
|
||||||
clear: both;
|
|
||||||
font-size: 0.8em;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer a {
|
|
||||||
color: #86989B;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.pagination {
|
|
||||||
margin-top: 2em;
|
|
||||||
padding-top: 0.5em;
|
|
||||||
border-top: 1px solid black;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul.toc {
|
|
||||||
margin: 1em 0 1em 0;
|
|
||||||
padding: 0 0 0 0.5em;
|
|
||||||
list-style: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul.toc li {
|
|
||||||
margin: 0.5em 0 0.5em 0;
|
|
||||||
font-size: 0.9em;
|
|
||||||
line-height: 130%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul.toc li p {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul.toc ul {
|
|
||||||
margin: 0.2em 0 0.2em 0;
|
|
||||||
padding: 0 0 0 1.8em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul.toc ul li {
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition, div.warning {
|
|
||||||
font-size: 0.9em;
|
|
||||||
margin: 1em 0 0 0;
|
|
||||||
border: 1px solid #86989B;
|
|
||||||
background-color: #f7f7f7;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition p, div.warning p {
|
|
||||||
margin: 0.5em 1em 0.5em 1em;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition pre, div.warning pre {
|
|
||||||
margin: 0.4em 1em 0.4em 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition p.admonition-title,
|
|
||||||
div.warning p.admonition-title {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0.1em 0 0.1em 0.5em;
|
|
||||||
color: white;
|
|
||||||
border-bottom: 1px solid #86989B;
|
|
||||||
font-weight: bold;
|
|
||||||
background-color: #AFC1C4;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.warning {
|
|
||||||
border: 1px solid #940000;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.warning p.admonition-title {
|
|
||||||
background-color: #CF0000;
|
|
||||||
border-bottom-color: #940000;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition ul, div.admonition ol,
|
|
||||||
div.warning ul, div.warning ol {
|
|
||||||
margin: 0.1em 0.5em 0.5em 3em;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.versioninfo {
|
|
||||||
margin: 1em 0 0 0;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
background-color: #DDEAF0;
|
|
||||||
padding: 8px;
|
|
||||||
line-height: 1.3em;
|
|
||||||
font-size: 0.9em;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
a.headerlink {
|
|
||||||
color: #c60f0f!important;
|
|
||||||
font-size: 1em;
|
|
||||||
margin-left: 6px;
|
|
||||||
padding: 0 4px 0 4px;
|
|
||||||
text-decoration: none!important;
|
|
||||||
visibility: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1:hover > a.headerlink,
|
|
||||||
h2:hover > a.headerlink,
|
|
||||||
h3:hover > a.headerlink,
|
|
||||||
h4:hover > a.headerlink,
|
|
||||||
h5:hover > a.headerlink,
|
|
||||||
h6:hover > a.headerlink,
|
|
||||||
dt:hover > a.headerlink {
|
|
||||||
visibility: visible;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.headerlink:hover {
|
|
||||||
background-color: #ccc;
|
|
||||||
color: white!important;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable td {
|
|
||||||
text-align: left;
|
|
||||||
vertical-align: top;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable dl, table.indextable dd {
|
|
||||||
margin-top: 0;
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable tr.pcap {
|
|
||||||
height: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable tr.cap {
|
|
||||||
margin-top: 10px;
|
|
||||||
background-color: #f2f2f2;
|
|
||||||
}
|
|
||||||
|
|
||||||
img.toggler {
|
|
||||||
margin-right: 3px;
|
|
||||||
margin-top: 3px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.pfform {
|
|
||||||
margin: 10px 0 20px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.contentstable {
|
|
||||||
width: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.contentstable p.biglink {
|
|
||||||
line-height: 150%;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.biglink {
|
|
||||||
font-size: 1.3em;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.linkdescr {
|
|
||||||
font-style: italic;
|
|
||||||
padding-top: 5px;
|
|
||||||
font-size: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search {
|
|
||||||
margin: 10px 0 0 20px;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li {
|
|
||||||
padding: 5px 0 5px 20px;
|
|
||||||
background-image: url(file.png);
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
background-position: 0 7px;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li a {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li div.context {
|
|
||||||
color: #888;
|
|
||||||
margin: 2px 0 0 30px;
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.keywordmatches li.goodmatch a {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
|
@ -1,19 +0,0 @@
|
||||||
/**
|
|
||||||
* Sphinx Doc Design -- Sticky sidebar Overrides
|
|
||||||
*/
|
|
||||||
|
|
||||||
div.sphinxsidebar {
|
|
||||||
top: 30px;
|
|
||||||
left: 0px;
|
|
||||||
position: fixed;
|
|
||||||
margin: 0;
|
|
||||||
float: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related {
|
|
||||||
position: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.documentwrapper {
|
|
||||||
margin-top: 30px;
|
|
||||||
}
|
|
|
@ -1,700 +0,0 @@
|
||||||
/**
|
|
||||||
* Sphinx Doc Design -- traditional python.org style
|
|
||||||
*/
|
|
||||||
|
|
||||||
body {
|
|
||||||
color: #000;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: LAYOUT :::: */
|
|
||||||
|
|
||||||
div.documentwrapper {
|
|
||||||
float: left;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.bodywrapper {
|
|
||||||
margin: 0 230px 0 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body {
|
|
||||||
background-color: white;
|
|
||||||
padding: 0 20px 30px 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebarwrapper {
|
|
||||||
border: 1px solid #99ccff;
|
|
||||||
padding: 10px;
|
|
||||||
margin: 10px 15px 10px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar {
|
|
||||||
float: right;
|
|
||||||
margin-left: -100%;
|
|
||||||
width: 230px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.clearer {
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer {
|
|
||||||
clear: both;
|
|
||||||
width: 100%;
|
|
||||||
background-color: #99ccff;
|
|
||||||
padding: 9px 0 9px 0;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related {
|
|
||||||
background-color: #99ccff;
|
|
||||||
color: #333;
|
|
||||||
width: 100%;
|
|
||||||
height: 30px;
|
|
||||||
line-height: 30px;
|
|
||||||
border-bottom: 5px solid white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related h3 {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related ul {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0 0 0 10px;
|
|
||||||
list-style: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related li {
|
|
||||||
display: inline;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related li.right {
|
|
||||||
float: right;
|
|
||||||
margin-right: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ::: SIDEBAR :::: */
|
|
||||||
div.sphinxsidebar h3 {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h4 {
|
|
||||||
margin: 5px 0 0 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar p.topless {
|
|
||||||
margin: 5px 10px 10px 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul {
|
|
||||||
margin: 10px;
|
|
||||||
margin-left: 15px;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul ul {
|
|
||||||
margin-top: 0;
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar form {
|
|
||||||
margin-top: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* :::: SEARCH :::: */
|
|
||||||
ul.search {
|
|
||||||
margin: 10px 0 0 20px;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li {
|
|
||||||
padding: 5px 0 5px 20px;
|
|
||||||
background-image: url(file.png);
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
background-position: 0 7px;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li a {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.search li div.context {
|
|
||||||
color: #888;
|
|
||||||
margin: 2px 0 0 30px;
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.keywordmatches li.goodmatch a {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: COMMON FORM STYLES :::: */
|
|
||||||
|
|
||||||
div.actions {
|
|
||||||
border-top: 1px solid #aaa;
|
|
||||||
background-color: #ddd;
|
|
||||||
margin: 10px 0 0 -20px;
|
|
||||||
padding: 5px 0 5px 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
form dl {
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
form dt {
|
|
||||||
clear: both;
|
|
||||||
float: left;
|
|
||||||
min-width: 110px;
|
|
||||||
margin-right: 10px;
|
|
||||||
padding-top: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
input#homepage {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.error {
|
|
||||||
margin: 5px 20px 0 0;
|
|
||||||
padding: 5px;
|
|
||||||
border: 1px solid #d00;
|
|
||||||
/*border: 2px solid #05171e;
|
|
||||||
background-color: #092835;
|
|
||||||
color: white;*/
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: INLINE COMMENTS :::: */
|
|
||||||
|
|
||||||
div.inlinecommentswrapper {
|
|
||||||
float: right;
|
|
||||||
max-width: 40%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.commentmarker {
|
|
||||||
float: right;
|
|
||||||
background-image: url(style/comment.png);
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
width: 25px;
|
|
||||||
height: 25px;
|
|
||||||
text-align: center;
|
|
||||||
padding-top: 3px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.nocommentmarker {
|
|
||||||
float: right;
|
|
||||||
background-image: url(style/nocomment.png);
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
width: 25px;
|
|
||||||
height: 25px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomments {
|
|
||||||
margin-left: 10px;
|
|
||||||
margin-bottom: 5px;
|
|
||||||
background-color: #eee;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
padding: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.inlinecomment {
|
|
||||||
border-top: 1px solid #ccc;
|
|
||||||
padding-top: 5px;
|
|
||||||
margin-top: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.inlinecomments p {
|
|
||||||
margin: 5px 0 5px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.inlinecomments .head {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.inlinecomments .meta {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* :::: COMMENTS :::: */
|
|
||||||
|
|
||||||
div#comments h3 {
|
|
||||||
border-top: 1px solid #aaa;
|
|
||||||
padding: 5px 20px 5px 20px;
|
|
||||||
margin: 20px -20px 20px -20px;
|
|
||||||
background-color: #ddd;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
div#comments {
|
|
||||||
background-color: #ccc;
|
|
||||||
margin: 40px -20px -30px -20px;
|
|
||||||
padding: 0 0 1px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments h4 {
|
|
||||||
margin: 30px 0 20px 0;
|
|
||||||
background-color: #aaa;
|
|
||||||
border-bottom: 1px solid #09232e;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments form {
|
|
||||||
display: block;
|
|
||||||
margin: 0 0 0 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments textarea {
|
|
||||||
width: 98%;
|
|
||||||
height: 160px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.help {
|
|
||||||
margin: 20px 20px 10px 0;
|
|
||||||
background-color: #ccc;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.help p {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0 0 10px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments input, div#comments textarea {
|
|
||||||
font-family: 'Bitstream Vera Sans', 'Arial', sans-serif;
|
|
||||||
font-size: 13px;
|
|
||||||
color: black;
|
|
||||||
background-color: #aaa;
|
|
||||||
border: 1px solid #092835;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments input[type="reset"],
|
|
||||||
div#comments input[type="submit"] {
|
|
||||||
cursor: pointer;
|
|
||||||
font-weight: bold;
|
|
||||||
padding: 2px;
|
|
||||||
margin: 5px 5px 5px 0;
|
|
||||||
background-color: #666;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment {
|
|
||||||
margin: 10px 10px 10px 20px;
|
|
||||||
padding: 10px;
|
|
||||||
border: 1px solid #0f3646;
|
|
||||||
background-color: #aaa;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment p {
|
|
||||||
margin: 5px 0 5px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment p.meta {
|
|
||||||
font-style: italic;
|
|
||||||
color: #444;
|
|
||||||
text-align: right;
|
|
||||||
margin: -5px 0 -5px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment h4 {
|
|
||||||
margin: -10px -10px 5px -10px;
|
|
||||||
padding: 3px;
|
|
||||||
font-size: 15px;
|
|
||||||
background-color: #888;
|
|
||||||
color: white;
|
|
||||||
border: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment pre,
|
|
||||||
div#comments div.comment tt {
|
|
||||||
background-color: #ddd;
|
|
||||||
color: #111;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment a {
|
|
||||||
color: #fff;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments div.comment blockquote {
|
|
||||||
margin: 10px;
|
|
||||||
padding: 10px;
|
|
||||||
border-left: 1px solid #0f3646;
|
|
||||||
/*border: 1px solid #0f3646;
|
|
||||||
background-color: #071c25;*/
|
|
||||||
}
|
|
||||||
|
|
||||||
div#comments em.important {
|
|
||||||
color: #d00;
|
|
||||||
font-weight: bold;
|
|
||||||
font-style: normal;
|
|
||||||
}*/
|
|
||||||
|
|
||||||
/* :::: SUGGEST CHANGES :::: */
|
|
||||||
div#suggest-changes-box input, div#suggest-changes-box textarea {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
background-color: white;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#suggest-changes-box textarea {
|
|
||||||
width: 99%;
|
|
||||||
height: 400px;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* :::: PREVIEW :::: */
|
|
||||||
div.preview {
|
|
||||||
background-image: url(style/preview.png);
|
|
||||||
padding: 0 20px 20px 20px;
|
|
||||||
margin-bottom: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* :::: INDEX PAGE :::: */
|
|
||||||
|
|
||||||
table.contentstable {
|
|
||||||
width: 90%;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.contentstable p.biglink {
|
|
||||||
line-height: 150%;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.biglink {
|
|
||||||
font-size: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.linkdescr {
|
|
||||||
font-style: italic;
|
|
||||||
padding-top: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: GENINDEX STYLES :::: */
|
|
||||||
|
|
||||||
table.indextable td {
|
|
||||||
text-align: left;
|
|
||||||
vertical-align: top;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable dl, table.indextable dd {
|
|
||||||
margin-top: 0;
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable tr.pcap {
|
|
||||||
height: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.indextable tr.cap {
|
|
||||||
margin-top: 10px;
|
|
||||||
background-color: #f2f2f2;
|
|
||||||
}
|
|
||||||
|
|
||||||
img.toggler {
|
|
||||||
margin-right: 3px;
|
|
||||||
margin-top: 3px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: GLOBAL STYLES :::: */
|
|
||||||
|
|
||||||
p.subhead {
|
|
||||||
font-weight: bold;
|
|
||||||
margin-top: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:link:active { color: #ff0000; }
|
|
||||||
a:link:hover { background-color: #bbeeff; }
|
|
||||||
a:visited:hover { background-color: #bbeeff; }
|
|
||||||
a:visited { color: #551a8b; }
|
|
||||||
a:link { color: #0000bb; }
|
|
||||||
|
|
||||||
div.body h1,
|
|
||||||
div.body h2,
|
|
||||||
div.body h3,
|
|
||||||
div.body h4,
|
|
||||||
div.body h5,
|
|
||||||
div.body h6 {
|
|
||||||
font-family: avantgarde, sans-serif;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body h1 { font-size: 180%; }
|
|
||||||
div.body h2 { font-size: 150%; }
|
|
||||||
div.body h3 { font-size: 120%; }
|
|
||||||
div.body h4 { font-size: 120%; }
|
|
||||||
|
|
||||||
a.headerlink,
|
|
||||||
a.headerlink,
|
|
||||||
a.headerlink,
|
|
||||||
a.headerlink,
|
|
||||||
a.headerlink,
|
|
||||||
a.headerlink {
|
|
||||||
color: #c60f0f;
|
|
||||||
font-size: 0.8em;
|
|
||||||
padding: 0 4px 0 4px;
|
|
||||||
text-decoration: none;
|
|
||||||
visibility: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
*:hover > a.headerlink,
|
|
||||||
*:hover > a.headerlink,
|
|
||||||
*:hover > a.headerlink,
|
|
||||||
*:hover > a.headerlink,
|
|
||||||
*:hover > a.headerlink,
|
|
||||||
*:hover > a.headerlink {
|
|
||||||
visibility: visible;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.headerlink:hover,
|
|
||||||
a.headerlink:hover,
|
|
||||||
a.headerlink:hover,
|
|
||||||
a.headerlink:hover,
|
|
||||||
a.headerlink:hover,
|
|
||||||
a.headerlink:hover {
|
|
||||||
background-color: #c60f0f;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body p, div.body dd, div.body li {
|
|
||||||
text-align: justify;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body td {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.fakelist {
|
|
||||||
list-style: none;
|
|
||||||
margin: 10px 0 10px 20px;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* "Footnotes" heading */
|
|
||||||
p.rubric {
|
|
||||||
margin-top: 30px;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* "Topics" */
|
|
||||||
|
|
||||||
div.topic {
|
|
||||||
background-color: #eee;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
padding: 0 7px 0 7px;
|
|
||||||
margin: 10px 0 10px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.topic-title {
|
|
||||||
font-size: 1.1em;
|
|
||||||
font-weight: bold;
|
|
||||||
margin-top: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Admonitions */
|
|
||||||
|
|
||||||
div.admonition {
|
|
||||||
margin-top: 10px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
padding: 7px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition dt {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition dd {
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition dl {
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition p {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.seealso {
|
|
||||||
background-color: #ffc;
|
|
||||||
border: 1px solid #ff6;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.warning {
|
|
||||||
background-color: #ffe4e4;
|
|
||||||
border: 1px solid #f66;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note {
|
|
||||||
background-color: #eee;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title {
|
|
||||||
margin: 0px 10px 5px 0px;
|
|
||||||
font-weight: bold;
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title:after {
|
|
||||||
content: ":";
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body p.centered {
|
|
||||||
text-align: center;
|
|
||||||
margin-top: 25px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.docutils {
|
|
||||||
border: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.docutils td, table.docutils th {
|
|
||||||
padding: 0 8px 2px 0;
|
|
||||||
border-top: 0;
|
|
||||||
border-left: 0;
|
|
||||||
border-right: 0;
|
|
||||||
border-bottom: 1px solid #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.field-list td, table.field-list th {
|
|
||||||
border: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.footnote td, table.footnote th {
|
|
||||||
border: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
dl {
|
|
||||||
margin-bottom: 15px;
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd p {
|
|
||||||
margin-top: 0px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd ul, dd table {
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd {
|
|
||||||
margin-top: 3px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
margin-left: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
dl.glossary dt {
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.refcount {
|
|
||||||
color: #060;
|
|
||||||
}
|
|
||||||
|
|
||||||
th {
|
|
||||||
text-align: left;
|
|
||||||
padding-right: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
pre {
|
|
||||||
font-family: monospace;
|
|
||||||
padding: 5px;
|
|
||||||
color: #00008b;
|
|
||||||
border-left: none;
|
|
||||||
border-right: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt {
|
|
||||||
font-family: monospace;
|
|
||||||
background-color: #ecf0f3;
|
|
||||||
padding: 0 1px 0 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.descname {
|
|
||||||
background-color: transparent;
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.descclassname {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt.xref, a tt {
|
|
||||||
background-color: transparent;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.footnote:target { background-color: #ffa }
|
|
||||||
|
|
||||||
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
.optional {
|
|
||||||
font-size: 1.3em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.versionmodified {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment {
|
|
||||||
margin: 0;
|
|
||||||
padding: 10px 30px 10px 30px;
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment h3 {
|
|
||||||
background-color: #326591;
|
|
||||||
color: white;
|
|
||||||
margin: -10px -30px 10px -30px;
|
|
||||||
padding: 5px;
|
|
||||||
font-size: 1.4em;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment input,
|
|
||||||
form.comment textarea {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
padding: 2px;
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 13px;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment input[type="text"] {
|
|
||||||
width: 240px;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.comment textarea {
|
|
||||||
width: 100%;
|
|
||||||
height: 200px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* :::: PRINT :::: */
|
|
||||||
@media print {
|
|
||||||
div.documentwrapper {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar,
|
|
||||||
div.related,
|
|
||||||
div.footer,
|
|
||||||
div#comments div.new-comment-box,
|
|
||||||
#top-link {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,136 +0,0 @@
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
<head>
|
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
|
||||||
<title>Index — simplejson v2.1.1 documentation</title>
|
|
||||||
<link rel="stylesheet" href="_static/default.css" type="text/css" />
|
|
||||||
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
|
|
||||||
<script type="text/javascript">
|
|
||||||
var DOCUMENTATION_OPTIONS = {
|
|
||||||
URL_ROOT: '',
|
|
||||||
VERSION: '2.1.1',
|
|
||||||
COLLAPSE_MODINDEX: false,
|
|
||||||
FILE_SUFFIX: '.html'
|
|
||||||
};
|
|
||||||
</script>
|
|
||||||
<script type="text/javascript" src="_static/jquery.js"></script>
|
|
||||||
<script type="text/javascript" src="_static/interface.js"></script>
|
|
||||||
<script type="text/javascript" src="_static/doctools.js"></script>
|
|
||||||
<link rel="contents" title="Global table of contents" href="contents.html" />
|
|
||||||
<link rel="index" title="Global index" href="" />
|
|
||||||
<link rel="search" title="Search" href="search.html" />
|
|
||||||
<link rel="top" title="simplejson v2.1.1 documentation" href="index.html" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="related">
|
|
||||||
<h3>Navigation</h3>
|
|
||||||
<ul>
|
|
||||||
<li class="right" style="margin-right: 10px">
|
|
||||||
<a href="" title="General Index"
|
|
||||||
accesskey="I">index</a></li>
|
|
||||||
<li><a href="index.html">simplejson v2.1.1 documentation</a> »</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="document">
|
|
||||||
<div class="documentwrapper">
|
|
||||||
<div class="bodywrapper">
|
|
||||||
<div class="body">
|
|
||||||
|
|
||||||
|
|
||||||
<h1 id="index">Index</h1>
|
|
||||||
|
|
||||||
<a href="#D"><strong>D</strong></a> | <a href="#E"><strong>E</strong></a> | <a href="#I"><strong>I</strong></a> | <a href="#J"><strong>J</strong></a> | <a href="#L"><strong>L</strong></a> | <a href="#R"><strong>R</strong></a> | <a href="#S"><strong>S</strong></a>
|
|
||||||
|
|
||||||
<hr />
|
|
||||||
|
|
||||||
|
|
||||||
<h2 id="D">D</h2>
|
|
||||||
<table width="100%" class="indextable"><tr><td width="33%" valign="top">
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt><a href="index.html#simplejson.JSONDecoder.decode">decode() (simplejson.JSONDecoder method)</a></dt>
|
|
||||||
<dt><a href="index.html#simplejson.JSONEncoder.default">default() (simplejson.JSONEncoder method)</a></dt>
|
|
||||||
<dt><a href="index.html#simplejson.dump">dump() (in module simplejson)</a></dt></dl></td><td width="33%" valign="top"><dl>
|
|
||||||
<dt><a href="index.html#simplejson.dumps">dumps() (in module simplejson)</a></dt>
|
|
||||||
</dl></td></tr></table>
|
|
||||||
|
|
||||||
<h2 id="E">E</h2>
|
|
||||||
<table width="100%" class="indextable"><tr><td width="33%" valign="top">
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt><a href="index.html#simplejson.JSONEncoder.encode">encode() (simplejson.JSONEncoder method)</a></dt></dl></td><td width="33%" valign="top"><dl>
|
|
||||||
</dl></td></tr></table>
|
|
||||||
|
|
||||||
<h2 id="I">I</h2>
|
|
||||||
<table width="100%" class="indextable"><tr><td width="33%" valign="top">
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt><a href="index.html#simplejson.JSONEncoder.iterencode">iterencode() (simplejson.JSONEncoder method)</a></dt></dl></td><td width="33%" valign="top"><dl>
|
|
||||||
</dl></td></tr></table>
|
|
||||||
|
|
||||||
<h2 id="J">J</h2>
|
|
||||||
<table width="100%" class="indextable"><tr><td width="33%" valign="top">
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt><a href="index.html#simplejson.JSONDecoder">JSONDecoder (class in simplejson)</a></dt>
|
|
||||||
<dt><a href="index.html#simplejson.JSONEncoder">JSONEncoder (class in simplejson)</a></dt></dl></td><td width="33%" valign="top"><dl>
|
|
||||||
<dt><a href="index.html#simplejson.JSONEncoderForHTML">JSONEncoderForHTML (class in simplejson)</a></dt>
|
|
||||||
</dl></td></tr></table>
|
|
||||||
|
|
||||||
<h2 id="L">L</h2>
|
|
||||||
<table width="100%" class="indextable"><tr><td width="33%" valign="top">
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt><a href="index.html#simplejson.load">load() (in module simplejson)</a></dt>
|
|
||||||
<dt><a href="index.html#simplejson.loads">loads() (in module simplejson)</a></dt></dl></td><td width="33%" valign="top"><dl>
|
|
||||||
</dl></td></tr></table>
|
|
||||||
|
|
||||||
<h2 id="R">R</h2>
|
|
||||||
<table width="100%" class="indextable"><tr><td width="33%" valign="top">
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt><a href="index.html#simplejson.JSONDecoder.raw_decode">raw_decode() (simplejson.JSONDecoder method)</a></dt></dl></td><td width="33%" valign="top"><dl>
|
|
||||||
</dl></td></tr></table>
|
|
||||||
|
|
||||||
<h2 id="S">S</h2>
|
|
||||||
<table width="100%" class="indextable"><tr><td width="33%" valign="top">
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt><a href="index.html#module-simplejson">simplejson (module)</a></dt></dl></td><td width="33%" valign="top"><dl>
|
|
||||||
</dl></td></tr></table>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="sphinxsidebar">
|
|
||||||
<div class="sphinxsidebarwrapper">
|
|
||||||
|
|
||||||
|
|
||||||
<h3>Quick search</h3>
|
|
||||||
<form class="search" action="search.html" method="get">
|
|
||||||
<input type="text" name="q" size="18" /> <input type="submit" value="Go" />
|
|
||||||
<input type="hidden" name="check_keywords" value="yes" />
|
|
||||||
<input type="hidden" name="area" value="default" />
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="clearer"></div>
|
|
||||||
</div>
|
|
||||||
<div class="related">
|
|
||||||
<h3>Navigation</h3>
|
|
||||||
<ul>
|
|
||||||
<li class="right" style="margin-right: 10px">
|
|
||||||
<a href="" title="General Index"
|
|
||||||
accesskey="I">index</a></li>
|
|
||||||
<li><a href="index.html">simplejson v2.1.1 documentation</a> »</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="footer">
|
|
||||||
© Copyright 2008, Bob Ippolito.
|
|
||||||
Last updated on Mar 31, 2010.
|
|
||||||
Created using <a href="http://sphinx.pocoo.org/">Sphinx</a>.
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,564 +0,0 @@
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
<head>
|
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
|
||||||
<title>simplejson — JSON encoder and decoder — simplejson v2.1.1 documentation</title>
|
|
||||||
<link rel="stylesheet" href="_static/default.css" type="text/css" />
|
|
||||||
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
|
|
||||||
<script type="text/javascript">
|
|
||||||
var DOCUMENTATION_OPTIONS = {
|
|
||||||
URL_ROOT: '',
|
|
||||||
VERSION: '2.1.1',
|
|
||||||
COLLAPSE_MODINDEX: false,
|
|
||||||
FILE_SUFFIX: '.html'
|
|
||||||
};
|
|
||||||
</script>
|
|
||||||
<script type="text/javascript" src="_static/jquery.js"></script>
|
|
||||||
<script type="text/javascript" src="_static/interface.js"></script>
|
|
||||||
<script type="text/javascript" src="_static/doctools.js"></script>
|
|
||||||
<link rel="contents" title="Global table of contents" href="contents.html" />
|
|
||||||
<link rel="index" title="Global index" href="genindex.html" />
|
|
||||||
<link rel="search" title="Search" href="search.html" />
|
|
||||||
<link rel="top" title="simplejson v2.1.1 documentation" href="" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="related">
|
|
||||||
<h3>Navigation</h3>
|
|
||||||
<ul>
|
|
||||||
<li class="right" style="margin-right: 10px">
|
|
||||||
<a href="genindex.html" title="General Index"
|
|
||||||
accesskey="I">index</a></li>
|
|
||||||
<li><a href="">simplejson v2.1.1 documentation</a> »</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="document">
|
|
||||||
<div class="documentwrapper">
|
|
||||||
<div class="bodywrapper">
|
|
||||||
<div class="body">
|
|
||||||
|
|
||||||
|
|
||||||
<div class="section" id="module-simplejson">
|
|
||||||
<h1 id="module-simplejson"><tt class="xref docutils literal"><span class="pre">simplejson</span></tt> — JSON encoder and decoder<a class="headerlink" href="#module-simplejson" title="Permalink to this headline">¶</a></h1>
|
|
||||||
<p>JSON (JavaScript Object Notation) <<a class="reference external" href="http://json.org">http://json.org</a>> is a subset of JavaScript
|
|
||||||
syntax (ECMA-262 3rd edition) used as a lightweight data interchange format.</p>
|
|
||||||
<p><tt class="xref docutils literal"><span class="pre">simplejson</span></tt> exposes an API familiar to users of the standard library
|
|
||||||
<tt class="xref docutils literal"><span class="pre">marshal</span></tt> and <tt class="xref docutils literal"><span class="pre">pickle</span></tt> modules. It is the externally maintained
|
|
||||||
version of the <tt class="xref docutils literal"><span class="pre">json</span></tt> library contained in Python 2.6, but maintains
|
|
||||||
compatibility with Python 2.5 and (currently) has
|
|
||||||
significant performance advantages, even without using the optional C
|
|
||||||
extension for speedups.</p>
|
|
||||||
<p>Encoding basic Python object hierarchies:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">([</span><span class="s">'foo'</span><span class="p">,</span> <span class="p">{</span><span class="s">'bar'</span><span class="p">:</span> <span class="p">(</span><span class="s">'baz'</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="mf">1.0</span><span class="p">,</span> <span class="mf">2</span><span class="p">)}])</span>
|
|
||||||
<span class="go">'["foo", {"bar": ["baz", null, 1.0, 2]}]'</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">print</span> <span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">(</span><span class="s">"</span><span class="se">\"</span><span class="s">foo</span><span class="se">\b</span><span class="s">ar"</span><span class="p">)</span>
|
|
||||||
<span class="go">"\"foo\bar"</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">print</span> <span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">(</span><span class="s">u'</span><span class="se">\u1234</span><span class="s">'</span><span class="p">)</span>
|
|
||||||
<span class="go">"\u1234"</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">print</span> <span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">(</span><span class="s">'</span><span class="se">\\</span><span class="s">'</span><span class="p">)</span>
|
|
||||||
<span class="go">"\\"</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">print</span> <span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">({</span><span class="s">"c"</span><span class="p">:</span> <span class="mf">0</span><span class="p">,</span> <span class="s">"b"</span><span class="p">:</span> <span class="mf">0</span><span class="p">,</span> <span class="s">"a"</span><span class="p">:</span> <span class="mf">0</span><span class="p">},</span> <span class="n">sort_keys</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
|
|
||||||
<span class="go">{"a": 0, "b": 0, "c": 0}</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">from</span> <span class="nn">StringIO</span> <span class="k">import</span> <span class="n">StringIO</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">io</span> <span class="o">=</span> <span class="n">StringIO</span><span class="p">()</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">dump</span><span class="p">([</span><span class="s">'streaming API'</span><span class="p">],</span> <span class="n">io</span><span class="p">)</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">io</span><span class="o">.</span><span class="n">getvalue</span><span class="p">()</span>
|
|
||||||
<span class="go">'["streaming API"]'</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Compact encoding:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">([</span><span class="mf">1</span><span class="p">,</span><span class="mf">2</span><span class="p">,</span><span class="mf">3</span><span class="p">,{</span><span class="s">'4'</span><span class="p">:</span> <span class="mf">5</span><span class="p">,</span> <span class="s">'6'</span><span class="p">:</span> <span class="mf">7</span><span class="p">}],</span> <span class="n">separators</span><span class="o">=</span><span class="p">(</span><span class="s">','</span><span class="p">,</span><span class="s">':'</span><span class="p">))</span>
|
|
||||||
<span class="go">'[1,2,3,{"4":5,"6":7}]'</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Pretty printing:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">s</span> <span class="o">=</span> <span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">({</span><span class="s">'4'</span><span class="p">:</span> <span class="mf">5</span><span class="p">,</span> <span class="s">'6'</span><span class="p">:</span> <span class="mf">7</span><span class="p">},</span> <span class="n">sort_keys</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span> <span class="n">indent</span><span class="o">=</span><span class="mf">4</span> <span class="o">*</span> <span class="s">' '</span><span class="p">)</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">print</span> <span class="s">'</span><span class="se">\n</span><span class="s">'</span><span class="o">.</span><span class="n">join</span><span class="p">([</span><span class="n">l</span><span class="o">.</span><span class="n">rstrip</span><span class="p">()</span> <span class="k">for</span> <span class="n">l</span> <span class="ow">in</span> <span class="n">s</span><span class="o">.</span><span class="n">splitlines</span><span class="p">()])</span>
|
|
||||||
<span class="go">{</span>
|
|
||||||
<span class="go"> "4": 5,</span>
|
|
||||||
<span class="go"> "6": 7</span>
|
|
||||||
<span class="go">}</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Decoding JSON:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">obj</span> <span class="o">=</span> <span class="p">[</span><span class="s">u'foo'</span><span class="p">,</span> <span class="p">{</span><span class="s">u'bar'</span><span class="p">:</span> <span class="p">[</span><span class="s">u'baz'</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="mf">1.0</span><span class="p">,</span> <span class="mf">2</span><span class="p">]}]</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">loads</span><span class="p">(</span><span class="s">'["foo", {"bar":["baz", null, 1.0, 2]}]'</span><span class="p">)</span> <span class="o">==</span> <span class="n">obj</span>
|
|
||||||
<span class="go">True</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">loads</span><span class="p">(</span><span class="s">'"</span><span class="se">\\</span><span class="s">"foo</span><span class="se">\\</span><span class="s">bar"'</span><span class="p">)</span> <span class="o">==</span> <span class="s">u'"foo</span><span class="se">\x08</span><span class="s">ar'</span>
|
|
||||||
<span class="go">True</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">from</span> <span class="nn">StringIO</span> <span class="k">import</span> <span class="n">StringIO</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">io</span> <span class="o">=</span> <span class="n">StringIO</span><span class="p">(</span><span class="s">'["streaming API"]'</span><span class="p">)</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">load</span><span class="p">(</span><span class="n">io</span><span class="p">)[</span><span class="mf">0</span><span class="p">]</span> <span class="o">==</span> <span class="s">'streaming API'</span>
|
|
||||||
<span class="go">True</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Using Decimal instead of float:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">from</span> <span class="nn">decimal</span> <span class="k">import</span> <span class="n">Decimal</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">loads</span><span class="p">(</span><span class="s">'1.1'</span><span class="p">,</span> <span class="n">use_decimal</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span> <span class="o">==</span> <span class="n">Decimal</span><span class="p">(</span><span class="s">'1.1'</span><span class="p">)</span>
|
|
||||||
<span class="go">True</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">(</span><span class="n">Decimal</span><span class="p">(</span><span class="s">'1.1'</span><span class="p">),</span> <span class="n">use_decimal</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span> <span class="o">==</span> <span class="s">'1.1'</span>
|
|
||||||
<span class="go">True</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Specializing JSON object decoding:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">def</span> <span class="nf">as_complex</span><span class="p">(</span><span class="n">dct</span><span class="p">):</span>
|
|
||||||
<span class="gp">... </span> <span class="k">if</span> <span class="s">'__complex__'</span> <span class="ow">in</span> <span class="n">dct</span><span class="p">:</span>
|
|
||||||
<span class="gp">... </span> <span class="k">return</span> <span class="nb">complex</span><span class="p">(</span><span class="n">dct</span><span class="p">[</span><span class="s">'real'</span><span class="p">],</span> <span class="n">dct</span><span class="p">[</span><span class="s">'imag'</span><span class="p">])</span>
|
|
||||||
<span class="gp">... </span> <span class="k">return</span> <span class="n">dct</span>
|
|
||||||
<span class="gp">...</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">loads</span><span class="p">(</span><span class="s">'{"__complex__": true, "real": 1, "imag": 2}'</span><span class="p">,</span>
|
|
||||||
<span class="gp">... </span> <span class="n">object_hook</span><span class="o">=</span><span class="n">as_complex</span><span class="p">)</span>
|
|
||||||
<span class="go">(1+2j)</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">import</span> <span class="nn">decimal</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">loads</span><span class="p">(</span><span class="s">'1.1'</span><span class="p">,</span> <span class="n">parse_float</span><span class="o">=</span><span class="n">decimal</span><span class="o">.</span><span class="n">Decimal</span><span class="p">)</span> <span class="o">==</span> <span class="n">decimal</span><span class="o">.</span><span class="n">Decimal</span><span class="p">(</span><span class="s">'1.1'</span><span class="p">)</span>
|
|
||||||
<span class="go">True</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Specializing JSON object encoding:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="k">def</span> <span class="nf">encode_complex</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
|
|
||||||
<span class="gp">... </span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="nb">complex</span><span class="p">):</span>
|
|
||||||
<span class="gp">... </span> <span class="k">return</span> <span class="p">[</span><span class="n">obj</span><span class="o">.</span><span class="n">real</span><span class="p">,</span> <span class="n">obj</span><span class="o">.</span><span class="n">imag</span><span class="p">]</span>
|
|
||||||
<span class="gp">... </span> <span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="nb">repr</span><span class="p">(</span><span class="n">o</span><span class="p">)</span> <span class="o">+</span> <span class="s">" is not JSON serializable"</span><span class="p">)</span>
|
|
||||||
<span class="gp">...</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">dumps</span><span class="p">(</span><span class="mf">2</span> <span class="o">+</span> <span class="mf">1</span><span class="n">j</span><span class="p">,</span> <span class="n">default</span><span class="o">=</span><span class="n">encode_complex</span><span class="p">)</span>
|
|
||||||
<span class="go">'[2.0, 1.0]'</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">JSONEncoder</span><span class="p">(</span><span class="n">default</span><span class="o">=</span><span class="n">encode_complex</span><span class="p">)</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="mf">2</span> <span class="o">+</span> <span class="mf">1</span><span class="n">j</span><span class="p">)</span>
|
|
||||||
<span class="go">'[2.0, 1.0]'</span>
|
|
||||||
<span class="gp">>>> </span><span class="s">''</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">json</span><span class="o">.</span><span class="n">JSONEncoder</span><span class="p">(</span><span class="n">default</span><span class="o">=</span><span class="n">encode_complex</span><span class="p">)</span><span class="o">.</span><span class="n">iterencode</span><span class="p">(</span><span class="mf">2</span> <span class="o">+</span> <span class="mf">1</span><span class="n">j</span><span class="p">))</span>
|
|
||||||
<span class="go">'[2.0, 1.0]'</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Using <tt class="xref docutils literal"><span class="pre">simplejson.tool</span></tt> from the shell to validate and pretty-print:</p>
|
|
||||||
<div class="highlight"><pre>$ echo '{"json":"obj"}' | python -m simplejson.tool
|
|
||||||
{
|
|
||||||
"json": "obj"
|
|
||||||
}
|
|
||||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
|
||||||
Expecting property name: line 1 column 2 (char 2)
|
|
||||||
</pre></div>
|
|
||||||
<div class="admonition note">
|
|
||||||
<p class="first admonition-title">Note</p>
|
|
||||||
<p class="last">The JSON produced by this module’s default settings is a subset of
|
|
||||||
YAML, so it may be used as a serializer for that as well.</p>
|
|
||||||
</div>
|
|
||||||
<div class="section" id="basic-usage">
|
|
||||||
<h2 id="basic-usage">Basic Usage<a class="headerlink" href="#basic-usage" title="Permalink to this headline">¶</a></h2>
|
|
||||||
<dl class="function">
|
|
||||||
<dt id="simplejson.dump">
|
|
||||||
<!--[simplejson.dump]--><tt class="descclassname">simplejson.</tt><tt class="descname">dump</tt><big>(</big><em>obj</em>, <em>fp</em><span class="optional">[</span>, <em>skipkeys</em><span class="optional">[</span>, <em>ensure_ascii</em><span class="optional">[</span>, <em>check_circular</em><span class="optional">[</span>, <em>allow_nan</em><span class="optional">[</span>, <em>cls</em><span class="optional">[</span>, <em>indent</em><span class="optional">[</span>, <em>separators</em><span class="optional">[</span>, <em>encoding</em><span class="optional">[</span>, <em>default</em><span class="optional">[</span>, <em>use_decimal</em><span class="optional">[</span>, <em>**kw</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#simplejson.dump" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Serialize <em>obj</em> as a JSON formatted stream to <em>fp</em> (a <tt class="docutils literal"><span class="pre">.write()</span></tt>-supporting
|
|
||||||
file-like object).</p>
|
|
||||||
<p>If <em>skipkeys</em> is true (default: <tt class="xref docutils literal"><span class="pre">False</span></tt>), then dict keys that are not
|
|
||||||
of a basic type (<tt class="xref docutils literal"><span class="pre">str</span></tt>, <tt class="xref docutils literal"><span class="pre">unicode</span></tt>, <tt class="xref docutils literal"><span class="pre">int</span></tt>, <tt class="xref docutils literal"><span class="pre">long</span></tt>,
|
|
||||||
<tt class="xref docutils literal"><span class="pre">float</span></tt>, <tt class="xref docutils literal"><span class="pre">bool</span></tt>, <tt class="xref docutils literal"><span class="pre">None</span></tt>) will be skipped instead of raising a
|
|
||||||
<tt class="xref docutils literal"><span class="pre">TypeError</span></tt>.</p>
|
|
||||||
<p>If <em>ensure_ascii</em> is false (default: <tt class="xref docutils literal"><span class="pre">True</span></tt>), then some chunks written
|
|
||||||
to <em>fp</em> may be <tt class="xref docutils literal"><span class="pre">unicode</span></tt> instances, subject to normal Python
|
|
||||||
<tt class="xref docutils literal"><span class="pre">str</span></tt> to <tt class="xref docutils literal"><span class="pre">unicode</span></tt> coercion rules. Unless <tt class="docutils literal"><span class="pre">fp.write()</span></tt>
|
|
||||||
explicitly understands <tt class="xref docutils literal"><span class="pre">unicode</span></tt> (as in <tt class="xref docutils literal"><span class="pre">codecs.getwriter()</span></tt>) this
|
|
||||||
is likely to cause an error. It’s best to leave the default settings, because
|
|
||||||
they are safe and it is highly optimized.</p>
|
|
||||||
<p>If <em>check_circular</em> is false (default: <tt class="xref docutils literal"><span class="pre">True</span></tt>), then the circular
|
|
||||||
reference check for container types will be skipped and a circular reference
|
|
||||||
will result in an <tt class="xref docutils literal"><span class="pre">OverflowError</span></tt> (or worse).</p>
|
|
||||||
<p>If <em>allow_nan</em> is false (default: <tt class="xref docutils literal"><span class="pre">True</span></tt>), then it will be a
|
|
||||||
<tt class="xref docutils literal"><span class="pre">ValueError</span></tt> to serialize out of range <tt class="xref docutils literal"><span class="pre">float</span></tt> values (<tt class="docutils literal"><span class="pre">nan</span></tt>,
|
|
||||||
<tt class="docutils literal"><span class="pre">inf</span></tt>, <tt class="docutils literal"><span class="pre">-inf</span></tt>) in strict compliance of the JSON specification.
|
|
||||||
If <em>allow_nan</em> is true, their JavaScript equivalents will be used
|
|
||||||
(<tt class="docutils literal"><span class="pre">NaN</span></tt>, <tt class="docutils literal"><span class="pre">Infinity</span></tt>, <tt class="docutils literal"><span class="pre">-Infinity</span></tt>).</p>
|
|
||||||
<p>If <em>indent</em> is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. <tt class="xref docutils literal"><span class="pre">None</span></tt> (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.</p>
|
|
||||||
<p>
|
|
||||||
<span class="versionmodified">Changed in version 2.1.0: </span>Changed <em>indent</em> from an integer number of spaces to a string.</p>
|
|
||||||
<p>If specified, <em>separators</em> should be an <tt class="docutils literal"><span class="pre">(item_separator,</span> <span class="pre">dict_separator)</span></tt>
|
|
||||||
tuple. By default, <tt class="docutils literal"><span class="pre">(',</span> <span class="pre">',</span> <span class="pre">':</span> <span class="pre">')</span></tt> are used. To get the most compact JSON
|
|
||||||
representation, you should specify <tt class="docutils literal"><span class="pre">(',',</span> <span class="pre">':')</span></tt> to eliminate whitespace.</p>
|
|
||||||
<p><em>encoding</em> is the character encoding for str instances, default is
|
|
||||||
<tt class="docutils literal"><span class="pre">'utf-8'</span></tt>.</p>
|
|
||||||
<p><em>default(obj)</em> is a function that should return a serializable version of
|
|
||||||
<em>obj</em> or raise <tt class="xref docutils literal"><span class="pre">TypeError</span></tt>. The default simply raises <tt class="xref docutils literal"><span class="pre">TypeError</span></tt>.</p>
|
|
||||||
<p>To use a custom <a title="simplejson.JSONEncoder" class="reference internal" href="#simplejson.JSONEncoder"><tt class="xref docutils literal"><span class="pre">JSONEncoder</span></tt></a> subclass (e.g. one that overrides the
|
|
||||||
<tt class="xref docutils literal"><span class="pre">default()</span></tt> method to serialize additional types), specify it with the
|
|
||||||
<em>cls</em> kwarg.</p>
|
|
||||||
<p>If <em>use_decimal</em> is true (default: <tt class="xref docutils literal"><span class="pre">False</span></tt>) then <tt class="xref docutils literal"><span class="pre">decimal.Decimal</span></tt>
|
|
||||||
will be natively serialized to JSON with full precision.</p>
|
|
||||||
<p>
|
|
||||||
<span class="versionmodified">Changed in version 2.1.0: </span><em>use_decimal</em> is new in 2.1.0.<div class="admonition note">
|
|
||||||
<p class="first admonition-title">Note</p>
|
|
||||||
<p class="last">JSON is not a framed protocol so unlike <tt class="xref docutils literal"><span class="pre">pickle</span></tt> or <tt class="xref docutils literal"><span class="pre">marshal</span></tt> it
|
|
||||||
does not make sense to serialize more than one JSON document without some
|
|
||||||
container protocol to delimit them.</p>
|
|
||||||
</div>
|
|
||||||
</p>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="function">
|
|
||||||
<dt id="simplejson.dumps">
|
|
||||||
<!--[simplejson.dumps]--><tt class="descclassname">simplejson.</tt><tt class="descname">dumps</tt><big>(</big><em>obj</em><span class="optional">[</span>, <em>skipkeys</em><span class="optional">[</span>, <em>ensure_ascii</em><span class="optional">[</span>, <em>check_circular</em><span class="optional">[</span>, <em>allow_nan</em><span class="optional">[</span>, <em>cls</em><span class="optional">[</span>, <em>indent</em><span class="optional">[</span>, <em>separators</em><span class="optional">[</span>, <em>encoding</em><span class="optional">[</span>, <em>default</em><span class="optional">[</span>, <em>use_decimal</em><span class="optional">[</span>, <em>**kw</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#simplejson.dumps" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Serialize <em>obj</em> to a JSON formatted <tt class="xref docutils literal"><span class="pre">str</span></tt>.</p>
|
|
||||||
<p>If <em>ensure_ascii</em> is false, then the return value will be a
|
|
||||||
<tt class="xref docutils literal"><span class="pre">unicode</span></tt> instance. The other arguments have the same meaning as in
|
|
||||||
<a title="simplejson.dump" class="reference internal" href="#simplejson.dump"><tt class="xref docutils literal"><span class="pre">dump()</span></tt></a>. Note that the default <em>ensure_ascii</em> setting has much
|
|
||||||
better performance.</p>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="function">
|
|
||||||
<dt id="simplejson.load">
|
|
||||||
<!--[simplejson.load]--><tt class="descclassname">simplejson.</tt><tt class="descname">load</tt><big>(</big><em>fp</em><span class="optional">[</span>, <em>encoding</em><span class="optional">[</span>, <em>cls</em><span class="optional">[</span>, <em>object_hook</em><span class="optional">[</span>, <em>parse_float</em><span class="optional">[</span>, <em>parse_int</em><span class="optional">[</span>, <em>parse_constant</em><span class="optional">[</span>, <em>object_pairs_hook</em><span class="optional">[</span>, <em>use_decimal</em><span class="optional">[</span>, <em>**kw</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#simplejson.load" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Deserialize <em>fp</em> (a <tt class="docutils literal"><span class="pre">.read()</span></tt>-supporting file-like object containing a JSON
|
|
||||||
document) to a Python object.</p>
|
|
||||||
<p>If the contents of <em>fp</em> are encoded with an ASCII based encoding other than
|
|
||||||
UTF-8 (e.g. latin-1), then an appropriate <em>encoding</em> name must be specified.
|
|
||||||
Encodings that are not ASCII based (such as UCS-2) are not allowed, and
|
|
||||||
should be wrapped with <tt class="docutils literal"><span class="pre">codecs.getreader(fp)(encoding)</span></tt>, or simply decoded
|
|
||||||
to a <tt class="xref docutils literal"><span class="pre">unicode</span></tt> object and passed to <a title="simplejson.loads" class="reference internal" href="#simplejson.loads"><tt class="xref docutils literal"><span class="pre">loads()</span></tt></a>. The default
|
|
||||||
setting of <tt class="docutils literal"><span class="pre">'utf-8'</span></tt> is fastest and should be using whenever possible.</p>
|
|
||||||
<p>If <em>fp.read()</em> returns <tt class="xref docutils literal"><span class="pre">str</span></tt> then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as <tt class="xref docutils literal"><span class="pre">str</span></tt> for performance and
|
|
||||||
memory reasons. If your code expects only <tt class="xref docutils literal"><span class="pre">unicode</span></tt> the appropriate
|
|
||||||
solution is to wrap fp with a reader as demonstrated above.</p>
|
|
||||||
<p><em>object_hook</em> is an optional function that will be called with the result of
|
|
||||||
any object literal decode (a <tt class="xref docutils literal"><span class="pre">dict</span></tt>). The return value of
|
|
||||||
<em>object_hook</em> will be used instead of the <tt class="xref docutils literal"><span class="pre">dict</span></tt>. This feature can be used
|
|
||||||
to implement custom decoders (e.g. JSON-RPC class hinting).</p>
|
|
||||||
<p><em>object_pairs_hook</em> is an optional function that will be called with the
|
|
||||||
result of any object literal decode with an ordered list of pairs. The
|
|
||||||
return value of <em>object_pairs_hook</em> will be used instead of the
|
|
||||||
<tt class="xref docutils literal"><span class="pre">dict</span></tt>. This feature can be used to implement custom decoders that
|
|
||||||
rely on the order that the key and value pairs are decoded (for example,
|
|
||||||
<tt class="xref docutils literal"><span class="pre">collections.OrderedDict</span></tt> will remember the order of insertion). If
|
|
||||||
<em>object_hook</em> is also defined, the <em>object_pairs_hook</em> takes priority.</p>
|
|
||||||
<p>
|
|
||||||
<span class="versionmodified">Changed in version 2.1.0: </span>Added support for <em>object_pairs_hook</em>.</p>
|
|
||||||
<p><em>parse_float</em>, if specified, will be called with the string of every JSON
|
|
||||||
float to be decoded. By default, this is equivalent to <tt class="docutils literal"><span class="pre">float(num_str)</span></tt>.
|
|
||||||
This can be used to use another datatype or parser for JSON floats
|
|
||||||
(e.g. <tt class="xref docutils literal"><span class="pre">decimal.Decimal</span></tt>).</p>
|
|
||||||
<p><em>parse_int</em>, if specified, will be called with the string of every JSON int
|
|
||||||
to be decoded. By default, this is equivalent to <tt class="docutils literal"><span class="pre">int(num_str)</span></tt>. This can
|
|
||||||
be used to use another datatype or parser for JSON integers
|
|
||||||
(e.g. <tt class="xref docutils literal"><span class="pre">float</span></tt>).</p>
|
|
||||||
<p><em>parse_constant</em>, if specified, will be called with one of the following
|
|
||||||
strings: <tt class="docutils literal"><span class="pre">'-Infinity'</span></tt>, <tt class="docutils literal"><span class="pre">'Infinity'</span></tt>, <tt class="docutils literal"><span class="pre">'NaN'</span></tt>. This can be used to
|
|
||||||
raise an exception if invalid JSON numbers are encountered.</p>
|
|
||||||
<p>If <em>use_decimal</em> is true (default: <tt class="xref docutils literal"><span class="pre">False</span></tt>) then <em>parse_float</em> is set to
|
|
||||||
<tt class="xref docutils literal"><span class="pre">decimal.Decimal</span></tt>. This is a convenience for parity with the
|
|
||||||
<a title="simplejson.dump" class="reference internal" href="#simplejson.dump"><tt class="xref docutils literal"><span class="pre">dump()</span></tt></a> parameter.</p>
|
|
||||||
<p>
|
|
||||||
<span class="versionmodified">Changed in version 2.1.0: </span><em>use_decimal</em> is new in 2.1.0.</p>
|
|
||||||
<p>To use a custom <a title="simplejson.JSONDecoder" class="reference internal" href="#simplejson.JSONDecoder"><tt class="xref docutils literal"><span class="pre">JSONDecoder</span></tt></a> subclass, specify it with the <tt class="docutils literal"><span class="pre">cls</span></tt>
|
|
||||||
kwarg. Additional keyword arguments will be passed to the constructor of the
|
|
||||||
class.</p>
|
|
||||||
<blockquote>
|
|
||||||
<div class="admonition note">
|
|
||||||
<p class="first admonition-title">Note</p>
|
|
||||||
<p class="last"><a title="simplejson.load" class="reference internal" href="#simplejson.load"><tt class="xref docutils literal"><span class="pre">load()</span></tt></a> will read the rest of the file-like object as a string and
|
|
||||||
then call <a title="simplejson.loads" class="reference internal" href="#simplejson.loads"><tt class="xref docutils literal"><span class="pre">loads()</span></tt></a>. It does not stop at the end of the first valid
|
|
||||||
JSON document it finds and it will raise an error if there is anything
|
|
||||||
other than whitespace after the document. Except for files containing
|
|
||||||
only one JSON document, it is recommended to use <a title="simplejson.loads" class="reference internal" href="#simplejson.loads"><tt class="xref docutils literal"><span class="pre">loads()</span></tt></a>.</p>
|
|
||||||
</div>
|
|
||||||
</blockquote>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="function">
|
|
||||||
<dt id="simplejson.loads">
|
|
||||||
<!--[simplejson.loads]--><tt class="descclassname">simplejson.</tt><tt class="descname">loads</tt><big>(</big><em>s</em><span class="optional">[</span>, <em>encoding</em><span class="optional">[</span>, <em>cls</em><span class="optional">[</span>, <em>object_hook</em><span class="optional">[</span>, <em>parse_float</em><span class="optional">[</span>, <em>parse_int</em><span class="optional">[</span>, <em>parse_constant</em><span class="optional">[</span>, <em>object_pairs_hook</em><span class="optional">[</span>, <em>use_decimal</em><span class="optional">[</span>, <em>**kw</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#simplejson.loads" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Deserialize <em>s</em> (a <tt class="xref docutils literal"><span class="pre">str</span></tt> or <tt class="xref docutils literal"><span class="pre">unicode</span></tt> instance containing a JSON
|
|
||||||
document) to a Python object.</p>
|
|
||||||
<p>If <em>s</em> is a <tt class="xref docutils literal"><span class="pre">str</span></tt> instance and is encoded with an ASCII based encoding
|
|
||||||
other than UTF-8 (e.g. latin-1), then an appropriate <em>encoding</em> name must be
|
|
||||||
specified. Encodings that are not ASCII based (such as UCS-2) are not
|
|
||||||
allowed and should be decoded to <tt class="xref docutils literal"><span class="pre">unicode</span></tt> first.</p>
|
|
||||||
<p>If <em>s</em> is a <tt class="xref docutils literal"><span class="pre">str</span></tt> then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as <tt class="xref docutils literal"><span class="pre">str</span></tt> for performance and
|
|
||||||
memory reasons. If your code expects only <tt class="xref docutils literal"><span class="pre">unicode</span></tt> the appropriate
|
|
||||||
solution is decode <em>s</em> to <tt class="xref docutils literal"><span class="pre">unicode</span></tt> prior to calling loads.</p>
|
|
||||||
<p>The other arguments have the same meaning as in <a title="simplejson.load" class="reference internal" href="#simplejson.load"><tt class="xref docutils literal"><span class="pre">load()</span></tt></a>.</p>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
<div class="section" id="encoders-and-decoders">
|
|
||||||
<h2 id="encoders-and-decoders">Encoders and decoders<a class="headerlink" href="#encoders-and-decoders" title="Permalink to this headline">¶</a></h2>
|
|
||||||
<dl class="class">
|
|
||||||
<dt id="simplejson.JSONDecoder">
|
|
||||||
<!--[simplejson.JSONDecoder]-->class <tt class="descclassname">simplejson.</tt><tt class="descname">JSONDecoder</tt><big>(</big><span class="optional">[</span><em>encoding</em><span class="optional">[</span>, <em>object_hook</em><span class="optional">[</span>, <em>parse_float</em><span class="optional">[</span>, <em>parse_int</em><span class="optional">[</span>, <em>parse_constant</em><span class="optional">[</span>, <em>object_pairs_hook</em><span class="optional">[</span>, <em>strict</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#simplejson.JSONDecoder" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Simple JSON decoder.</p>
|
|
||||||
<p>Performs the following translations in decoding by default:</p>
|
|
||||||
<table border="1" class="docutils">
|
|
||||||
<colgroup>
|
|
||||||
<col width="44%" />
|
|
||||||
<col width="56%" />
|
|
||||||
</colgroup>
|
|
||||||
<thead valign="bottom">
|
|
||||||
<tr><th class="head">JSON</th>
|
|
||||||
<th class="head">Python</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody valign="top">
|
|
||||||
<tr><td>object</td>
|
|
||||||
<td>dict</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>array</td>
|
|
||||||
<td>list</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>string</td>
|
|
||||||
<td>unicode</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>number (int)</td>
|
|
||||||
<td>int, long</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>number (real)</td>
|
|
||||||
<td>float</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>true</td>
|
|
||||||
<td>True</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>false</td>
|
|
||||||
<td>False</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>null</td>
|
|
||||||
<td>None</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
<p>It also understands <tt class="docutils literal"><span class="pre">NaN</span></tt>, <tt class="docutils literal"><span class="pre">Infinity</span></tt>, and <tt class="docutils literal"><span class="pre">-Infinity</span></tt> as their
|
|
||||||
corresponding <tt class="docutils literal"><span class="pre">float</span></tt> values, which is outside the JSON spec.</p>
|
|
||||||
<p><em>encoding</em> determines the encoding used to interpret any <tt class="xref docutils literal"><span class="pre">str</span></tt> objects
|
|
||||||
decoded by this instance (<tt class="docutils literal"><span class="pre">'utf-8'</span></tt> by default). It has no effect when decoding
|
|
||||||
<tt class="xref docutils literal"><span class="pre">unicode</span></tt> objects.</p>
|
|
||||||
<p>Note that currently only encodings that are a superset of ASCII work, strings
|
|
||||||
of other encodings should be passed in as <tt class="xref docutils literal"><span class="pre">unicode</span></tt>.</p>
|
|
||||||
<p><em>object_hook</em> is an optional function that will be called with the result of
|
|
||||||
every JSON object decoded and its return value will be used in place of the
|
|
||||||
given <tt class="xref docutils literal"><span class="pre">dict</span></tt>. This can be used to provide custom deserializations
|
|
||||||
(e.g. to support JSON-RPC class hinting).</p>
|
|
||||||
<p><em>object_pairs_hook</em> is an optional function that will be called with the
|
|
||||||
result of any object literal decode with an ordered list of pairs. The
|
|
||||||
return value of <em>object_pairs_hook</em> will be used instead of the
|
|
||||||
<tt class="xref docutils literal"><span class="pre">dict</span></tt>. This feature can be used to implement custom decoders that
|
|
||||||
rely on the order that the key and value pairs are decoded (for example,
|
|
||||||
<tt class="xref docutils literal"><span class="pre">collections.OrderedDict</span></tt> will remember the order of insertion). If
|
|
||||||
<em>object_hook</em> is also defined, the <em>object_pairs_hook</em> takes priority.</p>
|
|
||||||
<p>
|
|
||||||
<span class="versionmodified">Changed in version 2.1.0: </span>Added support for <em>object_pairs_hook</em>.</p>
|
|
||||||
<p><em>parse_float</em>, if specified, will be called with the string of every JSON
|
|
||||||
float to be decoded. By default, this is equivalent to <tt class="docutils literal"><span class="pre">float(num_str)</span></tt>.
|
|
||||||
This can be used to use another datatype or parser for JSON floats
|
|
||||||
(e.g. <tt class="xref docutils literal"><span class="pre">decimal.Decimal</span></tt>).</p>
|
|
||||||
<p><em>parse_int</em>, if specified, will be called with the string of every JSON int
|
|
||||||
to be decoded. By default, this is equivalent to <tt class="docutils literal"><span class="pre">int(num_str)</span></tt>. This can
|
|
||||||
be used to use another datatype or parser for JSON integers
|
|
||||||
(e.g. <tt class="xref docutils literal"><span class="pre">float</span></tt>).</p>
|
|
||||||
<p><em>parse_constant</em>, if specified, will be called with one of the following
|
|
||||||
strings: <tt class="docutils literal"><span class="pre">'-Infinity'</span></tt>, <tt class="docutils literal"><span class="pre">'Infinity'</span></tt>, <tt class="docutils literal"><span class="pre">'NaN'</span></tt>. This can be used to
|
|
||||||
raise an exception if invalid JSON numbers are encountered.</p>
|
|
||||||
<p><em>strict</em> controls the parser’s behavior when it encounters an invalid
|
|
||||||
control character in a string. The default setting of <tt class="xref docutils literal"><span class="pre">True</span></tt> means that
|
|
||||||
unescaped control characters are parse errors, if <tt class="xref docutils literal"><span class="pre">False</span></tt> then control
|
|
||||||
characters will be allowed in strings.</p>
|
|
||||||
<dl class="method">
|
|
||||||
<dt id="simplejson.JSONDecoder.decode">
|
|
||||||
<!--[simplejson.JSONDecoder.decode]--><tt class="descname">decode</tt><big>(</big><em>s</em><big>)</big><a class="headerlink" href="#simplejson.JSONDecoder.decode" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Return the Python representation of <em>s</em> (a <tt class="xref docutils literal"><span class="pre">str</span></tt> or
|
|
||||||
<tt class="xref docutils literal"><span class="pre">unicode</span></tt> instance containing a JSON document)</p>
|
|
||||||
<p>If <em>s</em> is a <tt class="xref docutils literal"><span class="pre">str</span></tt> then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as <tt class="xref docutils literal"><span class="pre">str</span></tt> for performance and
|
|
||||||
memory reasons. If your code expects only <tt class="xref docutils literal"><span class="pre">unicode</span></tt> the
|
|
||||||
appropriate solution is decode <em>s</em> to <tt class="xref docutils literal"><span class="pre">unicode</span></tt> prior to calling
|
|
||||||
decode.</p>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="method">
|
|
||||||
<dt id="simplejson.JSONDecoder.raw_decode">
|
|
||||||
<!--[simplejson.JSONDecoder.raw_decode]--><tt class="descname">raw_decode</tt><big>(</big><em>s</em><big>)</big><a class="headerlink" href="#simplejson.JSONDecoder.raw_decode" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Decode a JSON document from <em>s</em> (a <tt class="xref docutils literal"><span class="pre">str</span></tt> or <tt class="xref docutils literal"><span class="pre">unicode</span></tt>
|
|
||||||
beginning with a JSON document) and return a 2-tuple of the Python
|
|
||||||
representation and the index in <em>s</em> where the document ended.</p>
|
|
||||||
<p>This can be used to decode a JSON document from a string that may have
|
|
||||||
extraneous data at the end.</p>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="class">
|
|
||||||
<dt id="simplejson.JSONEncoder">
|
|
||||||
<!--[simplejson.JSONEncoder]-->class <tt class="descclassname">simplejson.</tt><tt class="descname">JSONEncoder</tt><big>(</big><span class="optional">[</span><em>skipkeys</em><span class="optional">[</span>, <em>ensure_ascii</em><span class="optional">[</span>, <em>check_circular</em><span class="optional">[</span>, <em>allow_nan</em><span class="optional">[</span>, <em>sort_keys</em><span class="optional">[</span>, <em>indent</em><span class="optional">[</span>, <em>separators</em><span class="optional">[</span>, <em>encoding</em><span class="optional">[</span>, <em>default</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#simplejson.JSONEncoder" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Extensible JSON encoder for Python data structures.</p>
|
|
||||||
<p>Supports the following objects and types by default:</p>
|
|
||||||
<table border="1" class="docutils">
|
|
||||||
<colgroup>
|
|
||||||
<col width="56%" />
|
|
||||||
<col width="44%" />
|
|
||||||
</colgroup>
|
|
||||||
<thead valign="bottom">
|
|
||||||
<tr><th class="head">Python</th>
|
|
||||||
<th class="head">JSON</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody valign="top">
|
|
||||||
<tr><td>dict</td>
|
|
||||||
<td>object</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>list, tuple</td>
|
|
||||||
<td>array</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>str, unicode</td>
|
|
||||||
<td>string</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>int, long, float</td>
|
|
||||||
<td>number</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>True</td>
|
|
||||||
<td>true</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>False</td>
|
|
||||||
<td>false</td>
|
|
||||||
</tr>
|
|
||||||
<tr><td>None</td>
|
|
||||||
<td>null</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
<p>To extend this to recognize other objects, subclass and implement a
|
|
||||||
<a title="simplejson.JSONEncoder.default" class="reference internal" href="#simplejson.JSONEncoder.default"><tt class="xref docutils literal"><span class="pre">default()</span></tt></a> method with another method that returns a serializable object
|
|
||||||
for <tt class="docutils literal"><span class="pre">o</span></tt> if possible, otherwise it should call the superclass implementation
|
|
||||||
(to raise <tt class="xref docutils literal"><span class="pre">TypeError</span></tt>).</p>
|
|
||||||
<p>If <em>skipkeys</em> is false (the default), then it is a <tt class="xref docutils literal"><span class="pre">TypeError</span></tt> to
|
|
||||||
attempt encoding of keys that are not str, int, long, float or None. If
|
|
||||||
<em>skipkeys</em> is true, such items are simply skipped.</p>
|
|
||||||
<p>If <em>ensure_ascii</em> is true (the default), the output is guaranteed to be
|
|
||||||
<tt class="xref docutils literal"><span class="pre">str</span></tt> objects with all incoming unicode characters escaped. If
|
|
||||||
<em>ensure_ascii</em> is false, the output will be a unicode object.</p>
|
|
||||||
<p>If <em>check_circular</em> is false (the default), then lists, dicts, and custom
|
|
||||||
encoded objects will be checked for circular references during encoding to
|
|
||||||
prevent an infinite recursion (which would cause an <tt class="xref docutils literal"><span class="pre">OverflowError</span></tt>).
|
|
||||||
Otherwise, no such check takes place.</p>
|
|
||||||
<p>If <em>allow_nan</em> is true (the default), then <tt class="docutils literal"><span class="pre">NaN</span></tt>, <tt class="docutils literal"><span class="pre">Infinity</span></tt>, and
|
|
||||||
<tt class="docutils literal"><span class="pre">-Infinity</span></tt> will be encoded as such. This behavior is not JSON
|
|
||||||
specification compliant, but is consistent with most JavaScript based
|
|
||||||
encoders and decoders. Otherwise, it will be a <tt class="xref docutils literal"><span class="pre">ValueError</span></tt> to encode
|
|
||||||
such floats.</p>
|
|
||||||
<p>If <em>sort_keys</em> is true (not the default), then the output of dictionaries
|
|
||||||
will be sorted by key; this is useful for regression tests to ensure that
|
|
||||||
JSON serializations can be compared on a day-to-day basis.</p>
|
|
||||||
<p>If <em>indent</em> is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. <tt class="xref docutils literal"><span class="pre">None</span></tt> (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.</p>
|
|
||||||
<p>
|
|
||||||
<span class="versionmodified">Changed in version 2.1.0: </span>Changed <em>indent</em> from an integer number of spaces to a string.</p>
|
|
||||||
<p>If specified, <em>separators</em> should be an <tt class="docutils literal"><span class="pre">(item_separator,</span> <span class="pre">key_separator)</span></tt>
|
|
||||||
tuple. By default, <tt class="docutils literal"><span class="pre">(',</span> <span class="pre">',</span> <span class="pre">':</span> <span class="pre">')</span></tt> are used. To get the most compact JSON
|
|
||||||
representation, you should specify <tt class="docutils literal"><span class="pre">(',',</span> <span class="pre">':')</span></tt> to eliminate whitespace.</p>
|
|
||||||
<p>If specified, <em>default</em> should be a function that gets called for objects
|
|
||||||
that can’t otherwise be serialized. It should return a JSON encodable
|
|
||||||
version of the object or raise a <tt class="xref docutils literal"><span class="pre">TypeError</span></tt>.</p>
|
|
||||||
<p>If <em>encoding</em> is not <tt class="xref docutils literal"><span class="pre">None</span></tt>, then all input strings will be transformed
|
|
||||||
into unicode using that encoding prior to JSON-encoding. The default is
|
|
||||||
<tt class="docutils literal"><span class="pre">'utf-8'</span></tt>.</p>
|
|
||||||
<dl class="method">
|
|
||||||
<dt id="simplejson.JSONEncoder.default">
|
|
||||||
<!--[simplejson.JSONEncoder.default]--><tt class="descname">default</tt><big>(</big><em>o</em><big>)</big><a class="headerlink" href="#simplejson.JSONEncoder.default" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Implement this method in a subclass such that it returns a serializable
|
|
||||||
object for <em>o</em>, or calls the base implementation (to raise a
|
|
||||||
<tt class="xref docutils literal"><span class="pre">TypeError</span></tt>).</p>
|
|
||||||
<p>For example, to support arbitrary iterators, you could implement default
|
|
||||||
like this:</p>
|
|
||||||
<div class="highlight"><pre><span class="k">def</span> <span class="nf">default</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">o</span><span class="p">):</span>
|
|
||||||
<span class="k">try</span><span class="p">:</span>
|
|
||||||
<span class="n">iterable</span> <span class="o">=</span> <span class="nb">iter</span><span class="p">(</span><span class="n">o</span><span class="p">)</span>
|
|
||||||
<span class="k">except</span> <span class="ne">TypeError</span><span class="p">:</span>
|
|
||||||
<span class="k">pass</span>
|
|
||||||
<span class="k">else</span><span class="p">:</span>
|
|
||||||
<span class="k">return</span> <span class="nb">list</span><span class="p">(</span><span class="n">iterable</span><span class="p">)</span>
|
|
||||||
<span class="k">return</span> <span class="n">JSONEncoder</span><span class="o">.</span><span class="n">default</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">o</span><span class="p">)</span>
|
|
||||||
</pre></div>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="method">
|
|
||||||
<dt id="simplejson.JSONEncoder.encode">
|
|
||||||
<!--[simplejson.JSONEncoder.encode]--><tt class="descname">encode</tt><big>(</big><em>o</em><big>)</big><a class="headerlink" href="#simplejson.JSONEncoder.encode" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Return a JSON string representation of a Python data structure, <em>o</em>. For
|
|
||||||
example:</p>
|
|
||||||
<div class="highlight"><pre><span class="gp">>>> </span><span class="k">import</span> <span class="nn">simplejson</span> <span class="k">as</span> <span class="nn">json</span>
|
|
||||||
<span class="gp">>>> </span><span class="n">json</span><span class="o">.</span><span class="n">JSONEncoder</span><span class="p">()</span><span class="o">.</span><span class="n">encode</span><span class="p">({</span><span class="s">"foo"</span><span class="p">:</span> <span class="p">[</span><span class="s">"bar"</span><span class="p">,</span> <span class="s">"baz"</span><span class="p">]})</span>
|
|
||||||
<span class="go">'{"foo": ["bar", "baz"]}'</span>
|
|
||||||
</pre></div>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="method">
|
|
||||||
<dt id="simplejson.JSONEncoder.iterencode">
|
|
||||||
<!--[simplejson.JSONEncoder.iterencode]--><tt class="descname">iterencode</tt><big>(</big><em>o</em><big>)</big><a class="headerlink" href="#simplejson.JSONEncoder.iterencode" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Encode the given object, <em>o</em>, and yield each string representation as
|
|
||||||
available. For example:</p>
|
|
||||||
<div class="highlight"><pre><span class="k">for</span> <span class="n">chunk</span> <span class="ow">in</span> <span class="n">JSONEncoder</span><span class="p">()</span><span class="o">.</span><span class="n">iterencode</span><span class="p">(</span><span class="n">bigobject</span><span class="p">):</span>
|
|
||||||
<span class="n">mysocket</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">chunk</span><span class="p">)</span>
|
|
||||||
</pre></div>
|
|
||||||
<p>Note that <a title="simplejson.JSONEncoder.encode" class="reference internal" href="#simplejson.JSONEncoder.encode"><tt class="xref docutils literal"><span class="pre">encode()</span></tt></a> has much better performance than
|
|
||||||
<a title="simplejson.JSONEncoder.iterencode" class="reference internal" href="#simplejson.JSONEncoder.iterencode"><tt class="xref docutils literal"><span class="pre">iterencode()</span></tt></a>.</p>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
<dl class="class">
|
|
||||||
<dt id="simplejson.JSONEncoderForHTML">
|
|
||||||
<!--[simplejson.JSONEncoderForHTML]-->class <tt class="descclassname">simplejson.</tt><tt class="descname">JSONEncoderForHTML</tt><big>(</big><span class="optional">[</span><em>skipkeys</em><span class="optional">[</span>, <em>ensure_ascii</em><span class="optional">[</span>, <em>check_circular</em><span class="optional">[</span>, <em>allow_nan</em><span class="optional">[</span>, <em>sort_keys</em><span class="optional">[</span>, <em>indent</em><span class="optional">[</span>, <em>separators</em><span class="optional">[</span>, <em>encoding</em><span class="optional">[</span>, <em>default</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#simplejson.JSONEncoderForHTML" title="Permalink to this definition">¶</a></dt>
|
|
||||||
<dd><p>Subclass of <a title="simplejson.JSONEncoder" class="reference internal" href="#simplejson.JSONEncoder"><tt class="xref docutils literal"><span class="pre">JSONEncoder</span></tt></a> that escapes &, <, and > for embedding in HTML.</p>
|
|
||||||
<p>
|
|
||||||
<span class="versionmodified">Changed in version 2.1.0: </span>New in 2.1.0</p>
|
|
||||||
</dd></dl>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="sphinxsidebar">
|
|
||||||
<div class="sphinxsidebarwrapper">
|
|
||||||
<h3>Table Of Contents</h3>
|
|
||||||
<ul>
|
|
||||||
<li><a class="reference external" href=""><tt class="docutils literal"><span class="pre">simplejson</span></tt> — JSON encoder and decoder</a><ul>
|
|
||||||
<li><a class="reference external" href="#basic-usage">Basic Usage</a></li>
|
|
||||||
<li><a class="reference external" href="#encoders-and-decoders">Encoders and decoders</a></li>
|
|
||||||
</ul>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<h3>This Page</h3>
|
|
||||||
<ul class="this-page-menu">
|
|
||||||
<li><a href="_sources/index.txt">Show Source</a></li>
|
|
||||||
</ul>
|
|
||||||
<h3>Quick search</h3>
|
|
||||||
<form class="search" action="search.html" method="get">
|
|
||||||
<input type="text" name="q" size="18" /> <input type="submit" value="Go" />
|
|
||||||
<input type="hidden" name="check_keywords" value="yes" />
|
|
||||||
<input type="hidden" name="area" value="default" />
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="clearer"></div>
|
|
||||||
</div>
|
|
||||||
<div class="related">
|
|
||||||
<h3>Navigation</h3>
|
|
||||||
<ul>
|
|
||||||
<li class="right" style="margin-right: 10px">
|
|
||||||
<a href="genindex.html" title="General Index"
|
|
||||||
accesskey="I">index</a></li>
|
|
||||||
<li><a href="">simplejson v2.1.1 documentation</a> »</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="footer">
|
|
||||||
© Copyright 2008, Bob Ippolito.
|
|
||||||
Last updated on Mar 31, 2010.
|
|
||||||
Created using <a href="http://sphinx.pocoo.org/">Sphinx</a>.
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,16 +0,0 @@
|
||||||
# Sphinx inventory version 1
|
|
||||||
# Project: simplejson
|
|
||||||
# Version: 2.1
|
|
||||||
simplejson mod index.html
|
|
||||||
simplejson.JSONEncoderForHTML class index.html
|
|
||||||
simplejson.JSONDecoder.raw_decode method index.html
|
|
||||||
simplejson.dump function index.html
|
|
||||||
simplejson.loads function index.html
|
|
||||||
simplejson.JSONDecoder class index.html
|
|
||||||
simplejson.dumps function index.html
|
|
||||||
simplejson.JSONDecoder.decode method index.html
|
|
||||||
simplejson.JSONEncoder.default method index.html
|
|
||||||
simplejson.load function index.html
|
|
||||||
simplejson.JSONEncoder class index.html
|
|
||||||
simplejson.JSONEncoder.iterencode method index.html
|
|
||||||
simplejson.JSONEncoder.encode method index.html
|
|
|
@ -1,81 +0,0 @@
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
<head>
|
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
|
||||||
<title>Search — simplejson v2.1.1 documentation</title>
|
|
||||||
<link rel="stylesheet" href="_static/default.css" type="text/css" />
|
|
||||||
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
|
|
||||||
<script type="text/javascript">
|
|
||||||
var DOCUMENTATION_OPTIONS = {
|
|
||||||
URL_ROOT: '',
|
|
||||||
VERSION: '2.1.1',
|
|
||||||
COLLAPSE_MODINDEX: false,
|
|
||||||
FILE_SUFFIX: '.html'
|
|
||||||
};
|
|
||||||
</script>
|
|
||||||
<script type="text/javascript" src="_static/jquery.js"></script>
|
|
||||||
<script type="text/javascript" src="_static/interface.js"></script>
|
|
||||||
<script type="text/javascript" src="_static/doctools.js"></script>
|
|
||||||
<link rel="contents" title="Global table of contents" href="contents.html" />
|
|
||||||
<link rel="index" title="Global index" href="genindex.html" />
|
|
||||||
<link rel="search" title="Search" href="" />
|
|
||||||
<link rel="top" title="simplejson v2.1.1 documentation" href="index.html" />
|
|
||||||
<script type="text/javascript" src="_static/searchtools.js"></script>
|
|
||||||
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="related">
|
|
||||||
<h3>Navigation</h3>
|
|
||||||
<ul>
|
|
||||||
<li class="right" style="margin-right: 10px">
|
|
||||||
<a href="genindex.html" title="General Index"
|
|
||||||
accesskey="I">index</a></li>
|
|
||||||
<li><a href="index.html">simplejson v2.1.1 documentation</a> »</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="document">
|
|
||||||
<div class="documentwrapper">
|
|
||||||
<div class="bodywrapper">
|
|
||||||
<div class="body">
|
|
||||||
|
|
||||||
<h1 id="search-documentation">Search</h1>
|
|
||||||
<p>
|
|
||||||
From here you can search these documents. Enter your search
|
|
||||||
words into the box below and click "search". Note that the search
|
|
||||||
function will automatically search for all of the words. Pages
|
|
||||||
containing less words won't appear in the result list.
|
|
||||||
</p>
|
|
||||||
<form action="" method="get">
|
|
||||||
<input type="text" name="q" value="" />
|
|
||||||
<input type="submit" value="search" />
|
|
||||||
</form>
|
|
||||||
|
|
||||||
<div id="search-results">
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="sphinxsidebar">
|
|
||||||
<div class="sphinxsidebarwrapper">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="clearer"></div>
|
|
||||||
</div>
|
|
||||||
<div class="related">
|
|
||||||
<h3>Navigation</h3>
|
|
||||||
<ul>
|
|
||||||
<li class="right" style="margin-right: 10px">
|
|
||||||
<a href="genindex.html" title="General Index"
|
|
||||||
accesskey="I">index</a></li>
|
|
||||||
<li><a href="index.html">simplejson v2.1.1 documentation</a> »</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="footer">
|
|
||||||
© Copyright 2008, Bob Ippolito.
|
|
||||||
Last updated on Mar 31, 2010.
|
|
||||||
Created using <a href="http://sphinx.pocoo.org/">Sphinx</a>.
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1 +0,0 @@
|
||||||
Search.setIndex({desctypes:{"0":"class","1":"method","2":"function"},terms:{represent:0,all:0,code:0,skip:0,interchang:0,signific:0,follow:0,compact:0,typeerror:0,decim:0,rstrip:0,sens:0,spec:0,isinst:0,liter:0,everi:0,string:0,raw_decod:0,fals:0,jsonencod:0,level:0,list:0,iter:0,"try":0,item:0,prevent:0,pass:0,compat:0,index:0,compar:0,current:0,version:0,"new":0,method:0,item_separ:0,elimin:0,full:0,abov:0,valu:0,num_str:0,convert:0,precis:0,prior:0,chang:0,overrid:0,modul:0,api:0,coercion:0,select:0,highli:0,from:0,would:0,memori:0,call:0,recommend:0,type:0,more:0,sort:0,unescap:0,must:0,none:0,join:0,bigobject:0,work:0,skipkei:0,wors:0,can:0,def:0,control:0,stream:0,accept:0,object_pairs_hook:0,serial:0,alwai:[],end:0,newlin:0,anoth:0,write:0,instead:0,simpl:0,circular:0,recogn:0,earlier:0,mai:0,data:0,demonstr:0,attempt:0,correspond:0,marshal:0,caus:0,jsonencoderforhtml:0,maintain:0,allow:0,order:0,becaus:0,hierarchi:0,paramet:0,better:0,yaml:0,html:0,them:0,"return":0,thei:0,python:0,safe:0,dai:0,allow_nan:0,jsondecod:0,superset:0,name:0,anyth:0,edit:0,separ:0,getvalu:0,each:0,unicod:0,mean:0,subset:0,chunk:0,wrap:0,expect:0,special:0,out:0,"3rd":0,space:0,content:0,reader:0,print:0,after:0,infin:0,standard:0,reason:0,base:0,dictionari:0,element:0,org:0,basi:0,indent:0,could:0,place:0,outsid:0,regress:0,first:0,rang:0,arrai:0,number:0,echo:0,unlik:0,prioriti:0,given:0,reli:0,conveni:0,shell:0,option:0,tool:0,specifi:0,pars:0,than:0,keyword:0,whenev:0,provid:0,structur:0,charact:0,str:0,pre:0,encode_complex:0,ani:0,have:0,"null":0,equival:0,self:0,note:0,also:0,exampl:0,take:0,which:0,even:0,begin:0,unless:0,normal:0,object:0,most:0,getread:0,pair:0,"class":0,doe:0,determin:0,serializ:0,speedup:0,syntax:0,find:0,onli:0,explicitli:0,parse_float:0,pretti:0,solut:0,should:0,iterencod:0,dict:0,object_hook:0,get:0,familiar:0,stop:0,repr:0,nativ:0,fastest:0,key_separ:0,bar:0,baz:0,yield:0,contain:0,where:0,valid:0,set:0,dump:0,frame:0,datatyp:0,result:0,best:0,subject:0,infinit:0,kei:0,dict_separ:0,written:0,"import":0,latin:0,extend:0,parse_const:0,javascript:0,extens:0,embed:0,addit:0,delimit:0,instanc:0,mani:0,ecma:0,load:0,simpli:0,rpc:0,getwrit:0,"__complex__":0,json:0,much:0,interpret:0,basic:0,valueerror:0,imag:0,argument:0,understand:0,input:0,sort_kei:0,those:[],"case":[],x08ar:0,properti:0,defin:0,behavior:0,error:0,ordereddict:0,advantag:0,kwarg:0,lightweight:0,incom:0,ascii:0,u1234:0,perform:0,make:0,same:0,member:0,complex:0,decod:0,document:0,http:0,optim:0,nest:0,effect:0,rais:0,user:0,extern:0,implement:0,appropri:0,well:0,pickl:0,without:0,thi:0,protocol:0,mysocket:0,rest:0,as_complex:0,parse_int:0,expos:0,hint:0,except:0,codec:0,els:0,real:0,format:0,read:0,recurs:0,insert:0,like:0,specif:0,arbitrari:0,docutil:0,whitespac:0,integ:0,collect:0,output:0,encount:0,some:0,check_circular:0,pariti:0,superclass:0,guarante:0,librari:0,subclass:0,when:0,leav:0,foo:0,refer:0,usag:0,dct:0,obj:0,column:0,splitlin:0,constructor:0,produc:0,"float":0,encod:0,ensur:0,your:0,span:0,complianc:0,support:0,transform:0,"long":0,custom:0,avail:0,strict:0,compliant:0,overflowerror:0,"function":0,simplejson:0,tupl:0,use_decim:0,translat:0,line:0,"true":0,notat:0,utf:0,consist:0,possibl:0,"default":0,otherwis:0,ensure_ascii:0,featur:0,"int":0,dure:0,parser:0,"char":0,extran:0,file:0,inf:0,check:0,nan:0,invalid:0,other:0,bool:0,rememb:0,test:0,you:0,deseri:0,repeat:0,stringio:0,rule:0,escap:0,backward:0},titles:["<tt class=\"docutils literal\"><span class=\"pre\">simplejson</span></tt> — JSON encoder and decoder"],modules:{simplejson:0},descrefs:{"simplejson.JSONEncoder":{"default":[0,1],encode:[0,1],iterencode:[0,1]},"simplejson.JSONDecoder":{decode:[0,1],raw_decode:[0,1]},simplejson:{load:[0,2],JSONEncoder:[0,0],dump:[0,2],JSONDecoder:[0,0],dumps:[0,2],JSONEncoderForHTML:[0,0],loads:[0,2]}},filenames:["index"]})
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -1,284 +0,0 @@
|
||||||
#!python
|
|
||||||
"""Bootstrap setuptools installation
|
|
||||||
|
|
||||||
If you want to use setuptools in your package's setup.py, just include this
|
|
||||||
file in the same directory with it, and add this to the top of your setup.py::
|
|
||||||
|
|
||||||
from ez_setup import use_setuptools
|
|
||||||
use_setuptools()
|
|
||||||
|
|
||||||
If you want to require a specific version of setuptools, set a download
|
|
||||||
mirror, or use an alternate download directory, you can do so by supplying
|
|
||||||
the appropriate options to ``use_setuptools()``.
|
|
||||||
|
|
||||||
This file can also be run as a script to install or upgrade setuptools.
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
DEFAULT_VERSION = "0.6c11"
|
|
||||||
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
|
|
||||||
|
|
||||||
md5_data = {
|
|
||||||
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
|
|
||||||
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
|
|
||||||
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
|
|
||||||
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
|
|
||||||
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
|
|
||||||
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
|
|
||||||
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
|
|
||||||
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
|
|
||||||
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
|
|
||||||
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
|
|
||||||
'setuptools-0.6c10-py2.3.egg': 'ce1e2ab5d3a0256456d9fc13800a7090',
|
|
||||||
'setuptools-0.6c10-py2.4.egg': '57d6d9d6e9b80772c59a53a8433a5dd4',
|
|
||||||
'setuptools-0.6c10-py2.5.egg': 'de46ac8b1c97c895572e5e8596aeb8c7',
|
|
||||||
'setuptools-0.6c10-py2.6.egg': '58ea40aef06da02ce641495523a0b7f5',
|
|
||||||
'setuptools-0.6c11-py2.3.egg': '2baeac6e13d414a9d28e7ba5b5a596de',
|
|
||||||
'setuptools-0.6c11-py2.4.egg': 'bd639f9b0eac4c42497034dec2ec0c2b',
|
|
||||||
'setuptools-0.6c11-py2.5.egg': '64c94f3bf7a72a13ec83e0b24f2749b2',
|
|
||||||
'setuptools-0.6c11-py2.6.egg': 'bfa92100bd772d5a213eedd356d64086',
|
|
||||||
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
|
|
||||||
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
|
|
||||||
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
|
|
||||||
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
|
|
||||||
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
|
|
||||||
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
|
|
||||||
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
|
|
||||||
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
|
|
||||||
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
|
|
||||||
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
|
|
||||||
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
|
|
||||||
'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
|
|
||||||
'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
|
|
||||||
'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
|
|
||||||
'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
|
|
||||||
'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
|
|
||||||
'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
|
|
||||||
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
|
|
||||||
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
|
|
||||||
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
|
|
||||||
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
|
|
||||||
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
|
|
||||||
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
|
|
||||||
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
|
|
||||||
}
|
|
||||||
|
|
||||||
import sys, os
|
|
||||||
try: from hashlib import md5
|
|
||||||
except ImportError: from md5 import md5
|
|
||||||
|
|
||||||
def _validate_md5(egg_name, data):
|
|
||||||
if egg_name in md5_data:
|
|
||||||
digest = md5(data).hexdigest()
|
|
||||||
if digest != md5_data[egg_name]:
|
|
||||||
print >>sys.stderr, (
|
|
||||||
"md5 validation of %s failed! (Possible download problem?)"
|
|
||||||
% egg_name
|
|
||||||
)
|
|
||||||
sys.exit(2)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def use_setuptools(
|
|
||||||
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
|
|
||||||
download_delay=15
|
|
||||||
):
|
|
||||||
"""Automatically find/download setuptools and make it available on sys.path
|
|
||||||
|
|
||||||
`version` should be a valid setuptools version number that is available
|
|
||||||
as an egg for download under the `download_base` URL (which should end with
|
|
||||||
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
|
|
||||||
it is not already available. If `download_delay` is specified, it should
|
|
||||||
be the number of seconds that will be paused before initiating a download,
|
|
||||||
should one be required. If an older version of setuptools is installed,
|
|
||||||
this routine will print a message to ``sys.stderr`` and raise SystemExit in
|
|
||||||
an attempt to abort the calling script.
|
|
||||||
"""
|
|
||||||
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
|
|
||||||
def do_download():
|
|
||||||
egg = download_setuptools(version, download_base, to_dir, download_delay)
|
|
||||||
sys.path.insert(0, egg)
|
|
||||||
import setuptools; setuptools.bootstrap_install_from = egg
|
|
||||||
try:
|
|
||||||
import pkg_resources
|
|
||||||
except ImportError:
|
|
||||||
return do_download()
|
|
||||||
try:
|
|
||||||
pkg_resources.require("setuptools>="+version); return
|
|
||||||
except pkg_resources.VersionConflict, e:
|
|
||||||
if was_imported:
|
|
||||||
print >>sys.stderr, (
|
|
||||||
"The required version of setuptools (>=%s) is not available, and\n"
|
|
||||||
"can't be installed while this script is running. Please install\n"
|
|
||||||
" a more recent version first, using 'easy_install -U setuptools'."
|
|
||||||
"\n\n(Currently using %r)"
|
|
||||||
) % (version, e.args[0])
|
|
||||||
sys.exit(2)
|
|
||||||
else:
|
|
||||||
del pkg_resources, sys.modules['pkg_resources'] # reload ok
|
|
||||||
return do_download()
|
|
||||||
except pkg_resources.DistributionNotFound:
|
|
||||||
return do_download()
|
|
||||||
|
|
||||||
def download_setuptools(
|
|
||||||
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
|
|
||||||
delay = 15
|
|
||||||
):
|
|
||||||
"""Download setuptools from a specified location and return its filename
|
|
||||||
|
|
||||||
`version` should be a valid setuptools version number that is available
|
|
||||||
as an egg for download under the `download_base` URL (which should end
|
|
||||||
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
|
||||||
`delay` is the number of seconds to pause before an actual download attempt.
|
|
||||||
"""
|
|
||||||
import urllib2, shutil
|
|
||||||
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
|
|
||||||
url = download_base + egg_name
|
|
||||||
saveto = os.path.join(to_dir, egg_name)
|
|
||||||
src = dst = None
|
|
||||||
if not os.path.exists(saveto): # Avoid repeated downloads
|
|
||||||
try:
|
|
||||||
from distutils import log
|
|
||||||
if delay:
|
|
||||||
log.warn("""
|
|
||||||
---------------------------------------------------------------------------
|
|
||||||
This script requires setuptools version %s to run (even to display
|
|
||||||
help). I will attempt to download it for you (from
|
|
||||||
%s), but
|
|
||||||
you may need to enable firewall access for this script first.
|
|
||||||
I will start the download in %d seconds.
|
|
||||||
|
|
||||||
(Note: if this machine does not have network access, please obtain the file
|
|
||||||
|
|
||||||
%s
|
|
||||||
|
|
||||||
and place it in this directory before rerunning this script.)
|
|
||||||
---------------------------------------------------------------------------""",
|
|
||||||
version, download_base, delay, url
|
|
||||||
); from time import sleep; sleep(delay)
|
|
||||||
log.warn("Downloading %s", url)
|
|
||||||
src = urllib2.urlopen(url)
|
|
||||||
# Read/write all in one block, so we don't create a corrupt file
|
|
||||||
# if the download is interrupted.
|
|
||||||
data = _validate_md5(egg_name, src.read())
|
|
||||||
dst = open(saveto,"wb"); dst.write(data)
|
|
||||||
finally:
|
|
||||||
if src: src.close()
|
|
||||||
if dst: dst.close()
|
|
||||||
return os.path.realpath(saveto)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv, version=DEFAULT_VERSION):
|
|
||||||
"""Install or upgrade setuptools and EasyInstall"""
|
|
||||||
try:
|
|
||||||
import setuptools
|
|
||||||
except ImportError:
|
|
||||||
egg = None
|
|
||||||
try:
|
|
||||||
egg = download_setuptools(version, delay=0)
|
|
||||||
sys.path.insert(0,egg)
|
|
||||||
from setuptools.command.easy_install import main
|
|
||||||
return main(list(argv)+[egg]) # we're done here
|
|
||||||
finally:
|
|
||||||
if egg and os.path.exists(egg):
|
|
||||||
os.unlink(egg)
|
|
||||||
else:
|
|
||||||
if setuptools.__version__ == '0.0.1':
|
|
||||||
print >>sys.stderr, (
|
|
||||||
"You have an obsolete version of setuptools installed. Please\n"
|
|
||||||
"remove it from your system entirely before rerunning this script."
|
|
||||||
)
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
req = "setuptools>="+version
|
|
||||||
import pkg_resources
|
|
||||||
try:
|
|
||||||
pkg_resources.require(req)
|
|
||||||
except pkg_resources.VersionConflict:
|
|
||||||
try:
|
|
||||||
from setuptools.command.easy_install import main
|
|
||||||
except ImportError:
|
|
||||||
from easy_install import main
|
|
||||||
main(list(argv)+[download_setuptools(delay=0)])
|
|
||||||
sys.exit(0) # try to force an exit
|
|
||||||
else:
|
|
||||||
if argv:
|
|
||||||
from setuptools.command.easy_install import main
|
|
||||||
main(argv)
|
|
||||||
else:
|
|
||||||
print "Setuptools version",version,"or greater has been installed."
|
|
||||||
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
|
|
||||||
|
|
||||||
def update_md5(filenames):
|
|
||||||
"""Update our built-in md5 registry"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
base = os.path.basename(name)
|
|
||||||
f = open(name,'rb')
|
|
||||||
md5_data[base] = md5(f.read()).hexdigest()
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
data = [" %r: %r,\n" % it for it in md5_data.items()]
|
|
||||||
data.sort()
|
|
||||||
repl = "".join(data)
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
srcfile = inspect.getsourcefile(sys.modules[__name__])
|
|
||||||
f = open(srcfile, 'rb'); src = f.read(); f.close()
|
|
||||||
|
|
||||||
match = re.search("\nmd5_data = {\n([^}]+)}", src)
|
|
||||||
if not match:
|
|
||||||
print >>sys.stderr, "Internal error!"
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
src = src[:match.start(1)] + repl + src[match.end(1):]
|
|
||||||
f = open(srcfile,'w')
|
|
||||||
f.write(src)
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__=='__main__':
|
|
||||||
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
|
|
||||||
update_md5(sys.argv[2:])
|
|
||||||
else:
|
|
||||||
main(sys.argv[1:])
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,503 +0,0 @@
|
||||||
:mod:`simplejson` --- JSON encoder and decoder
|
|
||||||
==============================================
|
|
||||||
|
|
||||||
.. module:: simplejson
|
|
||||||
:synopsis: Encode and decode the JSON format.
|
|
||||||
.. moduleauthor:: Bob Ippolito <bob@redivi.com>
|
|
||||||
.. sectionauthor:: Bob Ippolito <bob@redivi.com>
|
|
||||||
|
|
||||||
JSON (JavaScript Object Notation) <http://json.org> is a subset of JavaScript
|
|
||||||
syntax (ECMA-262 3rd edition) used as a lightweight data interchange format.
|
|
||||||
|
|
||||||
:mod:`simplejson` exposes an API familiar to users of the standard library
|
|
||||||
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
|
|
||||||
version of the :mod:`json` library contained in Python 2.6, but maintains
|
|
||||||
compatibility with Python 2.5 and (currently) has
|
|
||||||
significant performance advantages, even without using the optional C
|
|
||||||
extension for speedups.
|
|
||||||
|
|
||||||
Encoding basic Python object hierarchies::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
|
||||||
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
|
||||||
>>> print json.dumps("\"foo\bar")
|
|
||||||
"\"foo\bar"
|
|
||||||
>>> print json.dumps(u'\u1234')
|
|
||||||
"\u1234"
|
|
||||||
>>> print json.dumps('\\')
|
|
||||||
"\\"
|
|
||||||
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
|
|
||||||
{"a": 0, "b": 0, "c": 0}
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO()
|
|
||||||
>>> json.dump(['streaming API'], io)
|
|
||||||
>>> io.getvalue()
|
|
||||||
'["streaming API"]'
|
|
||||||
|
|
||||||
Compact encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
|
|
||||||
'[1,2,3,{"4":5,"6":7}]'
|
|
||||||
|
|
||||||
Pretty printing::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4 * ' ')
|
|
||||||
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
|
|
||||||
{
|
|
||||||
"4": 5,
|
|
||||||
"6": 7
|
|
||||||
}
|
|
||||||
|
|
||||||
Decoding JSON::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
|
|
||||||
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
|
||||||
True
|
|
||||||
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
|
|
||||||
True
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO('["streaming API"]')
|
|
||||||
>>> json.load(io)[0] == 'streaming API'
|
|
||||||
True
|
|
||||||
|
|
||||||
Using Decimal instead of float::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> from decimal import Decimal
|
|
||||||
>>> json.loads('1.1', use_decimal=True) == Decimal('1.1')
|
|
||||||
True
|
|
||||||
>>> json.dumps(Decimal('1.1'), use_decimal=True) == '1.1'
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object decoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def as_complex(dct):
|
|
||||||
... if '__complex__' in dct:
|
|
||||||
... return complex(dct['real'], dct['imag'])
|
|
||||||
... return dct
|
|
||||||
...
|
|
||||||
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
|
||||||
... object_hook=as_complex)
|
|
||||||
(1+2j)
|
|
||||||
>>> import decimal
|
|
||||||
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def encode_complex(obj):
|
|
||||||
... if isinstance(obj, complex):
|
|
||||||
... return [obj.real, obj.imag]
|
|
||||||
... raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
...
|
|
||||||
>>> json.dumps(2 + 1j, default=encode_complex)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
|
|
||||||
|
|
||||||
.. highlight:: none
|
|
||||||
|
|
||||||
Using :mod:`simplejson.tool` from the shell to validate and pretty-print::
|
|
||||||
|
|
||||||
$ echo '{"json":"obj"}' | python -m simplejson.tool
|
|
||||||
{
|
|
||||||
"json": "obj"
|
|
||||||
}
|
|
||||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
|
||||||
Expecting property name: line 1 column 2 (char 2)
|
|
||||||
|
|
||||||
.. highlight:: python
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The JSON produced by this module's default settings is a subset of
|
|
||||||
YAML, so it may be used as a serializer for that as well.
|
|
||||||
|
|
||||||
|
|
||||||
Basic Usage
|
|
||||||
-----------
|
|
||||||
|
|
||||||
.. function:: dump(obj, fp[, skipkeys[, ensure_ascii[, check_circular[, allow_nan[, cls[, indent[, separators[, encoding[, default[, use_decimal[, **kw]]]]]]]]]]])
|
|
||||||
|
|
||||||
Serialize *obj* as a JSON formatted stream to *fp* (a ``.write()``-supporting
|
|
||||||
file-like object).
|
|
||||||
|
|
||||||
If *skipkeys* is true (default: ``False``), then dict keys that are not
|
|
||||||
of a basic type (:class:`str`, :class:`unicode`, :class:`int`, :class:`long`,
|
|
||||||
:class:`float`, :class:`bool`, ``None``) will be skipped instead of raising a
|
|
||||||
:exc:`TypeError`.
|
|
||||||
|
|
||||||
If *ensure_ascii* is false (default: ``True``), then some chunks written
|
|
||||||
to *fp* may be :class:`unicode` instances, subject to normal Python
|
|
||||||
:class:`str` to :class:`unicode` coercion rules. Unless ``fp.write()``
|
|
||||||
explicitly understands :class:`unicode` (as in :func:`codecs.getwriter`) this
|
|
||||||
is likely to cause an error. It's best to leave the default settings, because
|
|
||||||
they are safe and it is highly optimized.
|
|
||||||
|
|
||||||
If *check_circular* is false (default: ``True``), then the circular
|
|
||||||
reference check for container types will be skipped and a circular reference
|
|
||||||
will result in an :exc:`OverflowError` (or worse).
|
|
||||||
|
|
||||||
If *allow_nan* is false (default: ``True``), then it will be a
|
|
||||||
:exc:`ValueError` to serialize out of range :class:`float` values (``nan``,
|
|
||||||
``inf``, ``-inf``) in strict compliance of the JSON specification.
|
|
||||||
If *allow_nan* is true, their JavaScript equivalents will be used
|
|
||||||
(``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If *indent* is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. ``None`` (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Changed *indent* from an integer number of spaces to a string.
|
|
||||||
|
|
||||||
If specified, *separators* should be an ``(item_separator, dict_separator)``
|
|
||||||
tuple. By default, ``(', ', ': ')`` are used. To get the most compact JSON
|
|
||||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
|
||||||
|
|
||||||
*encoding* is the character encoding for str instances, default is
|
|
||||||
``'utf-8'``.
|
|
||||||
|
|
||||||
*default(obj)* is a function that should return a serializable version of
|
|
||||||
*obj* or raise :exc:`TypeError`. The default simply raises :exc:`TypeError`.
|
|
||||||
|
|
||||||
To use a custom :class:`JSONEncoder` subclass (e.g. one that overrides the
|
|
||||||
:meth:`default` method to serialize additional types), specify it with the
|
|
||||||
*cls* kwarg.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then :class:`decimal.Decimal`
|
|
||||||
will be natively serialized to JSON with full precision.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
*use_decimal* is new in 2.1.0.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
JSON is not a framed protocol so unlike :mod:`pickle` or :mod:`marshal` it
|
|
||||||
does not make sense to serialize more than one JSON document without some
|
|
||||||
container protocol to delimit them.
|
|
||||||
|
|
||||||
|
|
||||||
.. function:: dumps(obj[, skipkeys[, ensure_ascii[, check_circular[, allow_nan[, cls[, indent[, separators[, encoding[, default[, use_decimal[, **kw]]]]]]]]]]])
|
|
||||||
|
|
||||||
Serialize *obj* to a JSON formatted :class:`str`.
|
|
||||||
|
|
||||||
If *ensure_ascii* is false, then the return value will be a
|
|
||||||
:class:`unicode` instance. The other arguments have the same meaning as in
|
|
||||||
:func:`dump`. Note that the default *ensure_ascii* setting has much
|
|
||||||
better performance.
|
|
||||||
|
|
||||||
|
|
||||||
.. function:: load(fp[, encoding[, cls[, object_hook[, parse_float[, parse_int[, parse_constant[, object_pairs_hook[, use_decimal[, **kw]]]]]]]]])
|
|
||||||
|
|
||||||
Deserialize *fp* (a ``.read()``-supporting file-like object containing a JSON
|
|
||||||
document) to a Python object.
|
|
||||||
|
|
||||||
If the contents of *fp* are encoded with an ASCII based encoding other than
|
|
||||||
UTF-8 (e.g. latin-1), then an appropriate *encoding* name must be specified.
|
|
||||||
Encodings that are not ASCII based (such as UCS-2) are not allowed, and
|
|
||||||
should be wrapped with ``codecs.getreader(fp)(encoding)``, or simply decoded
|
|
||||||
to a :class:`unicode` object and passed to :func:`loads`. The default
|
|
||||||
setting of ``'utf-8'`` is fastest and should be using whenever possible.
|
|
||||||
|
|
||||||
If *fp.read()* returns :class:`str` then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as :class:`str` for performance and
|
|
||||||
memory reasons. If your code expects only :class:`unicode` the appropriate
|
|
||||||
solution is to wrap fp with a reader as demonstrated above.
|
|
||||||
|
|
||||||
*object_hook* is an optional function that will be called with the result of
|
|
||||||
any object literal decode (a :class:`dict`). The return value of
|
|
||||||
*object_hook* will be used instead of the :class:`dict`. This feature can be used
|
|
||||||
to implement custom decoders (e.g. JSON-RPC class hinting).
|
|
||||||
|
|
||||||
*object_pairs_hook* is an optional function that will be called with the
|
|
||||||
result of any object literal decode with an ordered list of pairs. The
|
|
||||||
return value of *object_pairs_hook* will be used instead of the
|
|
||||||
:class:`dict`. This feature can be used to implement custom decoders that
|
|
||||||
rely on the order that the key and value pairs are decoded (for example,
|
|
||||||
:class:`collections.OrderedDict` will remember the order of insertion). If
|
|
||||||
*object_hook* is also defined, the *object_pairs_hook* takes priority.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Added support for *object_pairs_hook*.
|
|
||||||
|
|
||||||
*parse_float*, if specified, will be called with the string of every JSON
|
|
||||||
float to be decoded. By default, this is equivalent to ``float(num_str)``.
|
|
||||||
This can be used to use another datatype or parser for JSON floats
|
|
||||||
(e.g. :class:`decimal.Decimal`).
|
|
||||||
|
|
||||||
*parse_int*, if specified, will be called with the string of every JSON int
|
|
||||||
to be decoded. By default, this is equivalent to ``int(num_str)``. This can
|
|
||||||
be used to use another datatype or parser for JSON integers
|
|
||||||
(e.g. :class:`float`).
|
|
||||||
|
|
||||||
*parse_constant*, if specified, will be called with one of the following
|
|
||||||
strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be used to
|
|
||||||
raise an exception if invalid JSON numbers are encountered.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then *parse_float* is set to
|
|
||||||
:class:`decimal.Decimal`. This is a convenience for parity with the
|
|
||||||
:func:`dump` parameter.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
*use_decimal* is new in 2.1.0.
|
|
||||||
|
|
||||||
To use a custom :class:`JSONDecoder` subclass, specify it with the ``cls``
|
|
||||||
kwarg. Additional keyword arguments will be passed to the constructor of the
|
|
||||||
class.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
:func:`load` will read the rest of the file-like object as a string and
|
|
||||||
then call :func:`loads`. It does not stop at the end of the first valid
|
|
||||||
JSON document it finds and it will raise an error if there is anything
|
|
||||||
other than whitespace after the document. Except for files containing
|
|
||||||
only one JSON document, it is recommended to use :func:`loads`.
|
|
||||||
|
|
||||||
|
|
||||||
.. function:: loads(s[, encoding[, cls[, object_hook[, parse_float[, parse_int[, parse_constant[, object_pairs_hook[, use_decimal[, **kw]]]]]]]]])
|
|
||||||
|
|
||||||
Deserialize *s* (a :class:`str` or :class:`unicode` instance containing a JSON
|
|
||||||
document) to a Python object.
|
|
||||||
|
|
||||||
If *s* is a :class:`str` instance and is encoded with an ASCII based encoding
|
|
||||||
other than UTF-8 (e.g. latin-1), then an appropriate *encoding* name must be
|
|
||||||
specified. Encodings that are not ASCII based (such as UCS-2) are not
|
|
||||||
allowed and should be decoded to :class:`unicode` first.
|
|
||||||
|
|
||||||
If *s* is a :class:`str` then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as :class:`str` for performance and
|
|
||||||
memory reasons. If your code expects only :class:`unicode` the appropriate
|
|
||||||
solution is decode *s* to :class:`unicode` prior to calling loads.
|
|
||||||
|
|
||||||
The other arguments have the same meaning as in :func:`load`.
|
|
||||||
|
|
||||||
|
|
||||||
Encoders and decoders
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
.. class:: JSONDecoder([encoding[, object_hook[, parse_float[, parse_int[, parse_constant[, object_pairs_hook[, strict]]]]]]])
|
|
||||||
|
|
||||||
Simple JSON decoder.
|
|
||||||
|
|
||||||
Performs the following translations in decoding by default:
|
|
||||||
|
|
||||||
+---------------+-------------------+
|
|
||||||
| JSON | Python |
|
|
||||||
+===============+===================+
|
|
||||||
| object | dict |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| array | list |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| string | unicode |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (int) | int, long |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (real) | float |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| true | True |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| false | False |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| null | None |
|
|
||||||
+---------------+-------------------+
|
|
||||||
|
|
||||||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as their
|
|
||||||
corresponding ``float`` values, which is outside the JSON spec.
|
|
||||||
|
|
||||||
*encoding* determines the encoding used to interpret any :class:`str` objects
|
|
||||||
decoded by this instance (``'utf-8'`` by default). It has no effect when decoding
|
|
||||||
:class:`unicode` objects.
|
|
||||||
|
|
||||||
Note that currently only encodings that are a superset of ASCII work, strings
|
|
||||||
of other encodings should be passed in as :class:`unicode`.
|
|
||||||
|
|
||||||
*object_hook* is an optional function that will be called with the result of
|
|
||||||
every JSON object decoded and its return value will be used in place of the
|
|
||||||
given :class:`dict`. This can be used to provide custom deserializations
|
|
||||||
(e.g. to support JSON-RPC class hinting).
|
|
||||||
|
|
||||||
*object_pairs_hook* is an optional function that will be called with the
|
|
||||||
result of any object literal decode with an ordered list of pairs. The
|
|
||||||
return value of *object_pairs_hook* will be used instead of the
|
|
||||||
:class:`dict`. This feature can be used to implement custom decoders that
|
|
||||||
rely on the order that the key and value pairs are decoded (for example,
|
|
||||||
:class:`collections.OrderedDict` will remember the order of insertion). If
|
|
||||||
*object_hook* is also defined, the *object_pairs_hook* takes priority.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Added support for *object_pairs_hook*.
|
|
||||||
|
|
||||||
*parse_float*, if specified, will be called with the string of every JSON
|
|
||||||
float to be decoded. By default, this is equivalent to ``float(num_str)``.
|
|
||||||
This can be used to use another datatype or parser for JSON floats
|
|
||||||
(e.g. :class:`decimal.Decimal`).
|
|
||||||
|
|
||||||
*parse_int*, if specified, will be called with the string of every JSON int
|
|
||||||
to be decoded. By default, this is equivalent to ``int(num_str)``. This can
|
|
||||||
be used to use another datatype or parser for JSON integers
|
|
||||||
(e.g. :class:`float`).
|
|
||||||
|
|
||||||
*parse_constant*, if specified, will be called with one of the following
|
|
||||||
strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be used to
|
|
||||||
raise an exception if invalid JSON numbers are encountered.
|
|
||||||
|
|
||||||
*strict* controls the parser's behavior when it encounters an invalid
|
|
||||||
control character in a string. The default setting of ``True`` means that
|
|
||||||
unescaped control characters are parse errors, if ``False`` then control
|
|
||||||
characters will be allowed in strings.
|
|
||||||
|
|
||||||
.. method:: decode(s)
|
|
||||||
|
|
||||||
Return the Python representation of *s* (a :class:`str` or
|
|
||||||
:class:`unicode` instance containing a JSON document)
|
|
||||||
|
|
||||||
If *s* is a :class:`str` then decoded JSON strings that contain
|
|
||||||
only ASCII characters may be parsed as :class:`str` for performance and
|
|
||||||
memory reasons. If your code expects only :class:`unicode` the
|
|
||||||
appropriate solution is decode *s* to :class:`unicode` prior to calling
|
|
||||||
decode.
|
|
||||||
|
|
||||||
.. method:: raw_decode(s)
|
|
||||||
|
|
||||||
Decode a JSON document from *s* (a :class:`str` or :class:`unicode`
|
|
||||||
beginning with a JSON document) and return a 2-tuple of the Python
|
|
||||||
representation and the index in *s* where the document ended.
|
|
||||||
|
|
||||||
This can be used to decode a JSON document from a string that may have
|
|
||||||
extraneous data at the end.
|
|
||||||
|
|
||||||
|
|
||||||
.. class:: JSONEncoder([skipkeys[, ensure_ascii[, check_circular[, allow_nan[, sort_keys[, indent[, separators[, encoding[, default]]]]]]]]])
|
|
||||||
|
|
||||||
Extensible JSON encoder for Python data structures.
|
|
||||||
|
|
||||||
Supports the following objects and types by default:
|
|
||||||
|
|
||||||
+-------------------+---------------+
|
|
||||||
| Python | JSON |
|
|
||||||
+===================+===============+
|
|
||||||
| dict | object |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| list, tuple | array |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| str, unicode | string |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| int, long, float | number |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| True | true |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| False | false |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| None | null |
|
|
||||||
+-------------------+---------------+
|
|
||||||
|
|
||||||
To extend this to recognize other objects, subclass and implement a
|
|
||||||
:meth:`default` method with another method that returns a serializable object
|
|
||||||
for ``o`` if possible, otherwise it should call the superclass implementation
|
|
||||||
(to raise :exc:`TypeError`).
|
|
||||||
|
|
||||||
If *skipkeys* is false (the default), then it is a :exc:`TypeError` to
|
|
||||||
attempt encoding of keys that are not str, int, long, float or None. If
|
|
||||||
*skipkeys* is true, such items are simply skipped.
|
|
||||||
|
|
||||||
If *ensure_ascii* is true (the default), the output is guaranteed to be
|
|
||||||
:class:`str` objects with all incoming unicode characters escaped. If
|
|
||||||
*ensure_ascii* is false, the output will be a unicode object.
|
|
||||||
|
|
||||||
If *check_circular* is false (the default), then lists, dicts, and custom
|
|
||||||
encoded objects will be checked for circular references during encoding to
|
|
||||||
prevent an infinite recursion (which would cause an :exc:`OverflowError`).
|
|
||||||
Otherwise, no such check takes place.
|
|
||||||
|
|
||||||
If *allow_nan* is true (the default), then ``NaN``, ``Infinity``, and
|
|
||||||
``-Infinity`` will be encoded as such. This behavior is not JSON
|
|
||||||
specification compliant, but is consistent with most JavaScript based
|
|
||||||
encoders and decoders. Otherwise, it will be a :exc:`ValueError` to encode
|
|
||||||
such floats.
|
|
||||||
|
|
||||||
If *sort_keys* is true (not the default), then the output of dictionaries
|
|
||||||
will be sorted by key; this is useful for regression tests to ensure that
|
|
||||||
JSON serializations can be compared on a day-to-day basis.
|
|
||||||
|
|
||||||
If *indent* is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. ``None`` (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
Changed *indent* from an integer number of spaces to a string.
|
|
||||||
|
|
||||||
If specified, *separators* should be an ``(item_separator, key_separator)``
|
|
||||||
tuple. By default, ``(', ', ': ')`` are used. To get the most compact JSON
|
|
||||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
|
||||||
|
|
||||||
If specified, *default* should be a function that gets called for objects
|
|
||||||
that can't otherwise be serialized. It should return a JSON encodable
|
|
||||||
version of the object or raise a :exc:`TypeError`.
|
|
||||||
|
|
||||||
If *encoding* is not ``None``, then all input strings will be transformed
|
|
||||||
into unicode using that encoding prior to JSON-encoding. The default is
|
|
||||||
``'utf-8'``.
|
|
||||||
|
|
||||||
|
|
||||||
.. method:: default(o)
|
|
||||||
|
|
||||||
Implement this method in a subclass such that it returns a serializable
|
|
||||||
object for *o*, or calls the base implementation (to raise a
|
|
||||||
:exc:`TypeError`).
|
|
||||||
|
|
||||||
For example, to support arbitrary iterators, you could implement default
|
|
||||||
like this::
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
try:
|
|
||||||
iterable = iter(o)
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return list(iterable)
|
|
||||||
return JSONEncoder.default(self, o)
|
|
||||||
|
|
||||||
|
|
||||||
.. method:: encode(o)
|
|
||||||
|
|
||||||
Return a JSON string representation of a Python data structure, *o*. For
|
|
||||||
example::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.JSONEncoder().encode({"foo": ["bar", "baz"]})
|
|
||||||
'{"foo": ["bar", "baz"]}'
|
|
||||||
|
|
||||||
|
|
||||||
.. method:: iterencode(o)
|
|
||||||
|
|
||||||
Encode the given object, *o*, and yield each string representation as
|
|
||||||
available. For example::
|
|
||||||
|
|
||||||
for chunk in JSONEncoder().iterencode(bigobject):
|
|
||||||
mysocket.write(chunk)
|
|
||||||
|
|
||||||
Note that :meth:`encode` has much better performance than
|
|
||||||
:meth:`iterencode`.
|
|
||||||
|
|
||||||
.. class:: JSONEncoderForHTML([skipkeys[, ensure_ascii[, check_circular[, allow_nan[, sort_keys[, indent[, separators[, encoding[, default]]]]]]]]])
|
|
||||||
|
|
||||||
Subclass of :class:`JSONEncoder` that escapes &, <, and > for embedding in HTML.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.1.0
|
|
||||||
New in 2.1.0
|
|
|
@ -1,17 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
SPHINX_BUILD = 'sphinx-build'
|
|
||||||
|
|
||||||
DOCTREES_DIR = 'build/doctrees'
|
|
||||||
HTML_DIR = 'docs'
|
|
||||||
for dirname in DOCTREES_DIR, HTML_DIR:
|
|
||||||
if not os.path.exists(dirname):
|
|
||||||
os.makedirs(dirname)
|
|
||||||
|
|
||||||
res = subprocess.call([
|
|
||||||
SPHINX_BUILD, '-d', DOCTREES_DIR, '-b', 'html', '.', 'docs',
|
|
||||||
])
|
|
||||||
raise SystemExit(res)
|
|
|
@ -1,5 +0,0 @@
|
||||||
[egg_info]
|
|
||||||
tag_build =
|
|
||||||
tag_date = 0
|
|
||||||
tag_svn_revision = 0
|
|
||||||
|
|
|
@ -1,117 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import sys
|
|
||||||
try:
|
|
||||||
import setuptools
|
|
||||||
except ImportError:
|
|
||||||
from ez_setup import use_setuptools
|
|
||||||
use_setuptools()
|
|
||||||
|
|
||||||
from setuptools import setup, find_packages, Extension, Feature
|
|
||||||
from distutils.command.build_ext import build_ext
|
|
||||||
from distutils.errors import CCompilerError, DistutilsExecError, \
|
|
||||||
DistutilsPlatformError
|
|
||||||
|
|
||||||
VERSION = '2.1.1'
|
|
||||||
DESCRIPTION = "Simple, fast, extensible JSON encoder/decoder for Python"
|
|
||||||
LONG_DESCRIPTION = """
|
|
||||||
simplejson is a simple, fast, complete, correct and extensible
|
|
||||||
JSON <http://json.org> encoder and decoder for Python 2.5+. It is
|
|
||||||
pure Python code with no dependencies, but includes an optional C
|
|
||||||
extension for a serious speed boost.
|
|
||||||
|
|
||||||
simplejson is the externally maintained development version of the
|
|
||||||
json library included with Python 2.6 and Python 3.0, but maintains
|
|
||||||
backwards compatibility with Python 2.5.
|
|
||||||
|
|
||||||
The encoder may be subclassed to provide serialization in any kind of
|
|
||||||
situation, without any special support by the objects to be serialized
|
|
||||||
(somewhat like pickle).
|
|
||||||
|
|
||||||
The decoder can handle incoming JSON strings of any specified encoding
|
|
||||||
(UTF-8 by default).
|
|
||||||
"""
|
|
||||||
|
|
||||||
CLASSIFIERS = filter(None, map(str.strip,
|
|
||||||
"""
|
|
||||||
Intended Audience :: Developers
|
|
||||||
License :: OSI Approved :: MIT License
|
|
||||||
Programming Language :: Python
|
|
||||||
Topic :: Software Development :: Libraries :: Python Modules
|
|
||||||
""".splitlines()))
|
|
||||||
|
|
||||||
|
|
||||||
speedups = Feature(
|
|
||||||
"optional C speed-enhancement module",
|
|
||||||
standard=True,
|
|
||||||
ext_modules = [
|
|
||||||
Extension("simplejson._speedups", ["simplejson/_speedups.c"]),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
if sys.platform == 'win32' and sys.version_info > (2, 6):
|
|
||||||
# 2.6's distutils.msvc9compiler can raise an IOError when failing to
|
|
||||||
# find the compiler
|
|
||||||
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError,
|
|
||||||
IOError)
|
|
||||||
else:
|
|
||||||
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
|
|
||||||
|
|
||||||
class BuildFailed(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ve_build_ext(build_ext):
|
|
||||||
# This class allows C extension building to fail.
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
try:
|
|
||||||
build_ext.run(self)
|
|
||||||
except DistutilsPlatformError, x:
|
|
||||||
raise BuildFailed()
|
|
||||||
|
|
||||||
def build_extension(self, ext):
|
|
||||||
try:
|
|
||||||
build_ext.build_extension(self, ext)
|
|
||||||
except ext_errors, x:
|
|
||||||
raise BuildFailed()
|
|
||||||
|
|
||||||
def run_setup(with_binary):
|
|
||||||
if with_binary:
|
|
||||||
features = {'speedups': speedups}
|
|
||||||
else:
|
|
||||||
features = {}
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name="simplejson",
|
|
||||||
version=VERSION,
|
|
||||||
description=DESCRIPTION,
|
|
||||||
long_description=LONG_DESCRIPTION,
|
|
||||||
classifiers=CLASSIFIERS,
|
|
||||||
author="Bob Ippolito",
|
|
||||||
author_email="bob@redivi.com",
|
|
||||||
url="http://undefined.org/python/#simplejson",
|
|
||||||
license="MIT License",
|
|
||||||
packages=find_packages(exclude=['ez_setup']),
|
|
||||||
platforms=['any'],
|
|
||||||
test_suite="simplejson.tests.all_tests_suite",
|
|
||||||
zip_safe=True,
|
|
||||||
features=features,
|
|
||||||
cmdclass={'build_ext': ve_build_ext},
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
run_setup(False)
|
|
||||||
except BuildFailed:
|
|
||||||
BUILD_EXT_WARNING = "WARNING: The C extension could not be compiled, speedups are not enabled."
|
|
||||||
print '*' * 75
|
|
||||||
print BUILD_EXT_WARNING
|
|
||||||
print "Failure information, if any, is above."
|
|
||||||
print "I'm retrying the build without the C extension now."
|
|
||||||
print '*' * 75
|
|
||||||
|
|
||||||
run_setup(False)
|
|
||||||
|
|
||||||
print '*' * 75
|
|
||||||
print BUILD_EXT_WARNING
|
|
||||||
print "Plain-Python installation succeeded."
|
|
||||||
print '*' * 75
|
|
|
@ -1,437 +0,0 @@
|
||||||
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
|
|
||||||
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
|
||||||
interchange format.
|
|
||||||
|
|
||||||
:mod:`simplejson` exposes an API familiar to users of the standard library
|
|
||||||
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
|
|
||||||
version of the :mod:`json` library contained in Python 2.6, but maintains
|
|
||||||
compatibility with Python 2.4 and Python 2.5 and (currently) has
|
|
||||||
significant performance advantages, even without using the optional C
|
|
||||||
extension for speedups.
|
|
||||||
|
|
||||||
Encoding basic Python object hierarchies::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
|
||||||
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
|
||||||
>>> print json.dumps("\"foo\bar")
|
|
||||||
"\"foo\bar"
|
|
||||||
>>> print json.dumps(u'\u1234')
|
|
||||||
"\u1234"
|
|
||||||
>>> print json.dumps('\\')
|
|
||||||
"\\"
|
|
||||||
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
|
|
||||||
{"a": 0, "b": 0, "c": 0}
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO()
|
|
||||||
>>> json.dump(['streaming API'], io)
|
|
||||||
>>> io.getvalue()
|
|
||||||
'["streaming API"]'
|
|
||||||
|
|
||||||
Compact encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
|
|
||||||
'[1,2,3,{"4":5,"6":7}]'
|
|
||||||
|
|
||||||
Pretty printing::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
|
|
||||||
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
|
|
||||||
{
|
|
||||||
"4": 5,
|
|
||||||
"6": 7
|
|
||||||
}
|
|
||||||
|
|
||||||
Decoding JSON::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
|
|
||||||
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
|
||||||
True
|
|
||||||
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
|
|
||||||
True
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO('["streaming API"]')
|
|
||||||
>>> json.load(io)[0] == 'streaming API'
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object decoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def as_complex(dct):
|
|
||||||
... if '__complex__' in dct:
|
|
||||||
... return complex(dct['real'], dct['imag'])
|
|
||||||
... return dct
|
|
||||||
...
|
|
||||||
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
|
||||||
... object_hook=as_complex)
|
|
||||||
(1+2j)
|
|
||||||
>>> from decimal import Decimal
|
|
||||||
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def encode_complex(obj):
|
|
||||||
... if isinstance(obj, complex):
|
|
||||||
... return [obj.real, obj.imag]
|
|
||||||
... raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
...
|
|
||||||
>>> json.dumps(2 + 1j, default=encode_complex)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
|
|
||||||
|
|
||||||
Using simplejson.tool from the shell to validate and pretty-print::
|
|
||||||
|
|
||||||
$ echo '{"json":"obj"}' | python -m simplejson.tool
|
|
||||||
{
|
|
||||||
"json": "obj"
|
|
||||||
}
|
|
||||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
|
||||||
Expecting property name: line 1 column 2 (char 2)
|
|
||||||
"""
|
|
||||||
__version__ = '2.1.1'
|
|
||||||
__all__ = [
|
|
||||||
'dump', 'dumps', 'load', 'loads',
|
|
||||||
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
|
|
||||||
'OrderedDict',
|
|
||||||
]
|
|
||||||
|
|
||||||
__author__ = 'Bob Ippolito <bob@redivi.com>'
|
|
||||||
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
from decoder import JSONDecoder, JSONDecodeError
|
|
||||||
from encoder import JSONEncoder
|
|
||||||
def _import_OrderedDict():
|
|
||||||
import collections
|
|
||||||
try:
|
|
||||||
return collections.OrderedDict
|
|
||||||
except AttributeError:
|
|
||||||
import ordered_dict
|
|
||||||
return ordered_dict.OrderedDict
|
|
||||||
OrderedDict = _import_OrderedDict()
|
|
||||||
|
|
||||||
def _import_c_make_encoder():
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import make_encoder
|
|
||||||
return make_encoder
|
|
||||||
except ImportError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
_default_encoder = JSONEncoder(
|
|
||||||
skipkeys=False,
|
|
||||||
ensure_ascii=True,
|
|
||||||
check_circular=True,
|
|
||||||
allow_nan=True,
|
|
||||||
indent=None,
|
|
||||||
separators=None,
|
|
||||||
encoding='utf-8',
|
|
||||||
default=None,
|
|
||||||
use_decimal=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
|
|
||||||
allow_nan=True, cls=None, indent=None, separators=None,
|
|
||||||
encoding='utf-8', default=None, use_decimal=False, **kw):
|
|
||||||
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
|
||||||
``.write()``-supporting file-like object).
|
|
||||||
|
|
||||||
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
|
||||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
|
||||||
will be skipped instead of raising a ``TypeError``.
|
|
||||||
|
|
||||||
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
|
|
||||||
may be ``unicode`` instances, subject to normal Python ``str`` to
|
|
||||||
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
|
|
||||||
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
|
|
||||||
to cause an error.
|
|
||||||
|
|
||||||
If ``check_circular`` is false, then the circular reference check
|
|
||||||
for container types will be skipped and a circular reference will
|
|
||||||
result in an ``OverflowError`` (or worse).
|
|
||||||
|
|
||||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
|
||||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
|
||||||
in strict compliance of the JSON specification, instead of using the
|
|
||||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If *indent* is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. ``None`` (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.
|
|
||||||
|
|
||||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
|
||||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
|
||||||
``(',', ':')`` is the most compact JSON representation.
|
|
||||||
|
|
||||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
|
||||||
|
|
||||||
``default(obj)`` is a function that should return a serializable version
|
|
||||||
of obj or raise TypeError. The default simply raises TypeError.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then decimal.Decimal
|
|
||||||
will be natively serialized to JSON with full precision.
|
|
||||||
|
|
||||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
|
||||||
``.default()`` method to serialize additional types), specify it with
|
|
||||||
the ``cls`` kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# cached encoder
|
|
||||||
if (not skipkeys and ensure_ascii and
|
|
||||||
check_circular and allow_nan and
|
|
||||||
cls is None and indent is None and separators is None and
|
|
||||||
encoding == 'utf-8' and default is None and not kw):
|
|
||||||
iterable = _default_encoder.iterencode(obj)
|
|
||||||
else:
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONEncoder
|
|
||||||
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
|
||||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
|
||||||
separators=separators, encoding=encoding,
|
|
||||||
default=default, use_decimal=use_decimal, **kw).iterencode(obj)
|
|
||||||
# could accelerate with writelines in some versions of Python, at
|
|
||||||
# a debuggability cost
|
|
||||||
for chunk in iterable:
|
|
||||||
fp.write(chunk)
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
|
|
||||||
allow_nan=True, cls=None, indent=None, separators=None,
|
|
||||||
encoding='utf-8', default=None, use_decimal=False, **kw):
|
|
||||||
"""Serialize ``obj`` to a JSON formatted ``str``.
|
|
||||||
|
|
||||||
If ``skipkeys`` is false then ``dict`` keys that are not basic types
|
|
||||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
|
||||||
will be skipped instead of raising a ``TypeError``.
|
|
||||||
|
|
||||||
If ``ensure_ascii`` is false, then the return value will be a
|
|
||||||
``unicode`` instance subject to normal Python ``str`` to ``unicode``
|
|
||||||
coercion rules instead of being escaped to an ASCII ``str``.
|
|
||||||
|
|
||||||
If ``check_circular`` is false, then the circular reference check
|
|
||||||
for container types will be skipped and a circular reference will
|
|
||||||
result in an ``OverflowError`` (or worse).
|
|
||||||
|
|
||||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
|
||||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
|
||||||
strict compliance of the JSON specification, instead of using the
|
|
||||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If ``indent`` is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. ``None`` (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.
|
|
||||||
|
|
||||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
|
||||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
|
||||||
``(',', ':')`` is the most compact JSON representation.
|
|
||||||
|
|
||||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
|
||||||
|
|
||||||
``default(obj)`` is a function that should return a serializable version
|
|
||||||
of obj or raise TypeError. The default simply raises TypeError.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then decimal.Decimal
|
|
||||||
will be natively serialized to JSON with full precision.
|
|
||||||
|
|
||||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
|
||||||
``.default()`` method to serialize additional types), specify it with
|
|
||||||
the ``cls`` kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# cached encoder
|
|
||||||
if (not skipkeys and ensure_ascii and
|
|
||||||
check_circular and allow_nan and
|
|
||||||
cls is None and indent is None and separators is None and
|
|
||||||
encoding == 'utf-8' and default is None and not use_decimal
|
|
||||||
and not kw):
|
|
||||||
return _default_encoder.encode(obj)
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONEncoder
|
|
||||||
return cls(
|
|
||||||
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
|
||||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
|
||||||
separators=separators, encoding=encoding, default=default,
|
|
||||||
use_decimal=use_decimal, **kw).encode(obj)
|
|
||||||
|
|
||||||
|
|
||||||
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
|
|
||||||
object_pairs_hook=None)
|
|
||||||
|
|
||||||
|
|
||||||
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, object_pairs_hook=None,
|
|
||||||
use_decimal=False, **kw):
|
|
||||||
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
|
||||||
a JSON document) to a Python object.
|
|
||||||
|
|
||||||
*encoding* determines the encoding used to interpret any
|
|
||||||
:class:`str` objects decoded by this instance (``'utf-8'`` by
|
|
||||||
default). It has no effect when decoding :class:`unicode` objects.
|
|
||||||
|
|
||||||
Note that currently only encodings that are a superset of ASCII work,
|
|
||||||
strings of other encodings should be passed in as :class:`unicode`.
|
|
||||||
|
|
||||||
*object_hook*, if specified, will be called with the result of every
|
|
||||||
JSON object decoded and its return value will be used in place of the
|
|
||||||
given :class:`dict`. This can be used to provide custom
|
|
||||||
deserializations (e.g. to support JSON-RPC class hinting).
|
|
||||||
|
|
||||||
*object_pairs_hook* is an optional function that will be called with
|
|
||||||
the result of any object literal decode with an ordered list of pairs.
|
|
||||||
The return value of *object_pairs_hook* will be used instead of the
|
|
||||||
:class:`dict`. This feature can be used to implement custom decoders
|
|
||||||
that rely on the order that the key and value pairs are decoded (for
|
|
||||||
example, :func:`collections.OrderedDict` will remember the order of
|
|
||||||
insertion). If *object_hook* is also defined, the *object_pairs_hook*
|
|
||||||
takes priority.
|
|
||||||
|
|
||||||
*parse_float*, if specified, will be called with the string of every
|
|
||||||
JSON float to be decoded. By default, this is equivalent to
|
|
||||||
``float(num_str)``. This can be used to use another datatype or parser
|
|
||||||
for JSON floats (e.g. :class:`decimal.Decimal`).
|
|
||||||
|
|
||||||
*parse_int*, if specified, will be called with the string of every
|
|
||||||
JSON int to be decoded. By default, this is equivalent to
|
|
||||||
``int(num_str)``. This can be used to use another datatype or parser
|
|
||||||
for JSON integers (e.g. :class:`float`).
|
|
||||||
|
|
||||||
*parse_constant*, if specified, will be called with one of the
|
|
||||||
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
|
|
||||||
can be used to raise an exception if invalid JSON numbers are
|
|
||||||
encountered.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then it implies
|
|
||||||
parse_float=decimal.Decimal for parity with ``dump``.
|
|
||||||
|
|
||||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
|
||||||
kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return loads(fp.read(),
|
|
||||||
encoding=encoding, cls=cls, object_hook=object_hook,
|
|
||||||
parse_float=parse_float, parse_int=parse_int,
|
|
||||||
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
|
|
||||||
use_decimal=use_decimal, **kw)
|
|
||||||
|
|
||||||
|
|
||||||
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, object_pairs_hook=None,
|
|
||||||
use_decimal=False, **kw):
|
|
||||||
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
|
||||||
document) to a Python object.
|
|
||||||
|
|
||||||
*encoding* determines the encoding used to interpret any
|
|
||||||
:class:`str` objects decoded by this instance (``'utf-8'`` by
|
|
||||||
default). It has no effect when decoding :class:`unicode` objects.
|
|
||||||
|
|
||||||
Note that currently only encodings that are a superset of ASCII work,
|
|
||||||
strings of other encodings should be passed in as :class:`unicode`.
|
|
||||||
|
|
||||||
*object_hook*, if specified, will be called with the result of every
|
|
||||||
JSON object decoded and its return value will be used in place of the
|
|
||||||
given :class:`dict`. This can be used to provide custom
|
|
||||||
deserializations (e.g. to support JSON-RPC class hinting).
|
|
||||||
|
|
||||||
*object_pairs_hook* is an optional function that will be called with
|
|
||||||
the result of any object literal decode with an ordered list of pairs.
|
|
||||||
The return value of *object_pairs_hook* will be used instead of the
|
|
||||||
:class:`dict`. This feature can be used to implement custom decoders
|
|
||||||
that rely on the order that the key and value pairs are decoded (for
|
|
||||||
example, :func:`collections.OrderedDict` will remember the order of
|
|
||||||
insertion). If *object_hook* is also defined, the *object_pairs_hook*
|
|
||||||
takes priority.
|
|
||||||
|
|
||||||
*parse_float*, if specified, will be called with the string of every
|
|
||||||
JSON float to be decoded. By default, this is equivalent to
|
|
||||||
``float(num_str)``. This can be used to use another datatype or parser
|
|
||||||
for JSON floats (e.g. :class:`decimal.Decimal`).
|
|
||||||
|
|
||||||
*parse_int*, if specified, will be called with the string of every
|
|
||||||
JSON int to be decoded. By default, this is equivalent to
|
|
||||||
``int(num_str)``. This can be used to use another datatype or parser
|
|
||||||
for JSON integers (e.g. :class:`float`).
|
|
||||||
|
|
||||||
*parse_constant*, if specified, will be called with one of the
|
|
||||||
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
|
|
||||||
can be used to raise an exception if invalid JSON numbers are
|
|
||||||
encountered.
|
|
||||||
|
|
||||||
If *use_decimal* is true (default: ``False``) then it implies
|
|
||||||
parse_float=decimal.Decimal for parity with ``dump``.
|
|
||||||
|
|
||||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
|
||||||
kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if (cls is None and encoding is None and object_hook is None and
|
|
||||||
parse_int is None and parse_float is None and
|
|
||||||
parse_constant is None and object_pairs_hook is None
|
|
||||||
and not use_decimal and not kw):
|
|
||||||
return _default_decoder.decode(s)
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONDecoder
|
|
||||||
if object_hook is not None:
|
|
||||||
kw['object_hook'] = object_hook
|
|
||||||
if object_pairs_hook is not None:
|
|
||||||
kw['object_pairs_hook'] = object_pairs_hook
|
|
||||||
if parse_float is not None:
|
|
||||||
kw['parse_float'] = parse_float
|
|
||||||
if parse_int is not None:
|
|
||||||
kw['parse_int'] = parse_int
|
|
||||||
if parse_constant is not None:
|
|
||||||
kw['parse_constant'] = parse_constant
|
|
||||||
if use_decimal:
|
|
||||||
if parse_float is not None:
|
|
||||||
raise TypeError("use_decimal=True implies parse_float=Decimal")
|
|
||||||
kw['parse_float'] = Decimal
|
|
||||||
return cls(encoding=encoding, **kw).decode(s)
|
|
||||||
|
|
||||||
|
|
||||||
def _toggle_speedups(enabled):
|
|
||||||
import simplejson.decoder as dec
|
|
||||||
import simplejson.encoder as enc
|
|
||||||
import simplejson.scanner as scan
|
|
||||||
c_make_encoder = _import_c_make_encoder()
|
|
||||||
if enabled:
|
|
||||||
dec.scanstring = dec.c_scanstring or dec.py_scanstring
|
|
||||||
enc.c_make_encoder = c_make_encoder
|
|
||||||
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
|
|
||||||
enc.py_encode_basestring_ascii)
|
|
||||||
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
|
|
||||||
else:
|
|
||||||
dec.scanstring = dec.py_scanstring
|
|
||||||
enc.c_make_encoder = None
|
|
||||||
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
|
|
||||||
scan.make_scanner = scan.py_make_scanner
|
|
||||||
dec.make_scanner = scan.make_scanner
|
|
||||||
global _default_decoder
|
|
||||||
_default_decoder = JSONDecoder(
|
|
||||||
encoding=None,
|
|
||||||
object_hook=None,
|
|
||||||
object_pairs_hook=None,
|
|
||||||
)
|
|
||||||
global _default_encoder
|
|
||||||
_default_encoder = JSONEncoder(
|
|
||||||
skipkeys=False,
|
|
||||||
ensure_ascii=True,
|
|
||||||
check_circular=True,
|
|
||||||
allow_nan=True,
|
|
||||||
indent=None,
|
|
||||||
separators=None,
|
|
||||||
encoding='utf-8',
|
|
||||||
default=None,
|
|
||||||
)
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,421 +0,0 @@
|
||||||
"""Implementation of JSONDecoder
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import struct
|
|
||||||
|
|
||||||
from simplejson.scanner import make_scanner
|
|
||||||
def _import_c_scanstring():
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import scanstring
|
|
||||||
return scanstring
|
|
||||||
except ImportError:
|
|
||||||
return None
|
|
||||||
c_scanstring = _import_c_scanstring()
|
|
||||||
|
|
||||||
__all__ = ['JSONDecoder']
|
|
||||||
|
|
||||||
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
|
||||||
|
|
||||||
def _floatconstants():
|
|
||||||
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
|
|
||||||
# The struct module in Python 2.4 would get frexp() out of range here
|
|
||||||
# when an endian is specified in the format string. Fixed in Python 2.5+
|
|
||||||
if sys.byteorder != 'big':
|
|
||||||
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
|
|
||||||
nan, inf = struct.unpack('dd', _BYTES)
|
|
||||||
return nan, inf, -inf
|
|
||||||
|
|
||||||
NaN, PosInf, NegInf = _floatconstants()
|
|
||||||
|
|
||||||
|
|
||||||
class JSONDecodeError(ValueError):
|
|
||||||
"""Subclass of ValueError with the following additional properties:
|
|
||||||
|
|
||||||
msg: The unformatted error message
|
|
||||||
doc: The JSON document being parsed
|
|
||||||
pos: The start index of doc where parsing failed
|
|
||||||
end: The end index of doc where parsing failed (may be None)
|
|
||||||
lineno: The line corresponding to pos
|
|
||||||
colno: The column corresponding to pos
|
|
||||||
endlineno: The line corresponding to end (may be None)
|
|
||||||
endcolno: The column corresponding to end (may be None)
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, msg, doc, pos, end=None):
|
|
||||||
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
|
|
||||||
self.msg = msg
|
|
||||||
self.doc = doc
|
|
||||||
self.pos = pos
|
|
||||||
self.end = end
|
|
||||||
self.lineno, self.colno = linecol(doc, pos)
|
|
||||||
if end is not None:
|
|
||||||
self.endlineno, self.endcolno = linecol(doc, pos)
|
|
||||||
else:
|
|
||||||
self.endlineno, self.endcolno = None, None
|
|
||||||
|
|
||||||
|
|
||||||
def linecol(doc, pos):
|
|
||||||
lineno = doc.count('\n', 0, pos) + 1
|
|
||||||
if lineno == 1:
|
|
||||||
colno = pos
|
|
||||||
else:
|
|
||||||
colno = pos - doc.rindex('\n', 0, pos)
|
|
||||||
return lineno, colno
|
|
||||||
|
|
||||||
|
|
||||||
def errmsg(msg, doc, pos, end=None):
|
|
||||||
# Note that this function is called from _speedups
|
|
||||||
lineno, colno = linecol(doc, pos)
|
|
||||||
if end is None:
|
|
||||||
#fmt = '{0}: line {1} column {2} (char {3})'
|
|
||||||
#return fmt.format(msg, lineno, colno, pos)
|
|
||||||
fmt = '%s: line %d column %d (char %d)'
|
|
||||||
return fmt % (msg, lineno, colno, pos)
|
|
||||||
endlineno, endcolno = linecol(doc, end)
|
|
||||||
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
|
|
||||||
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
|
|
||||||
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
|
|
||||||
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
|
|
||||||
|
|
||||||
|
|
||||||
_CONSTANTS = {
|
|
||||||
'-Infinity': NegInf,
|
|
||||||
'Infinity': PosInf,
|
|
||||||
'NaN': NaN,
|
|
||||||
}
|
|
||||||
|
|
||||||
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
|
|
||||||
BACKSLASH = {
|
|
||||||
'"': u'"', '\\': u'\\', '/': u'/',
|
|
||||||
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
|
|
||||||
}
|
|
||||||
|
|
||||||
DEFAULT_ENCODING = "utf-8"
|
|
||||||
|
|
||||||
def py_scanstring(s, end, encoding=None, strict=True,
|
|
||||||
_b=BACKSLASH, _m=STRINGCHUNK.match):
|
|
||||||
"""Scan the string s for a JSON string. End is the index of the
|
|
||||||
character in s after the quote that started the JSON string.
|
|
||||||
Unescapes all valid JSON string escape sequences and raises ValueError
|
|
||||||
on attempt to decode an invalid string. If strict is False then literal
|
|
||||||
control characters are allowed in the string.
|
|
||||||
|
|
||||||
Returns a tuple of the decoded string and the index of the character in s
|
|
||||||
after the end quote."""
|
|
||||||
if encoding is None:
|
|
||||||
encoding = DEFAULT_ENCODING
|
|
||||||
chunks = []
|
|
||||||
_append = chunks.append
|
|
||||||
begin = end - 1
|
|
||||||
while 1:
|
|
||||||
chunk = _m(s, end)
|
|
||||||
if chunk is None:
|
|
||||||
raise JSONDecodeError(
|
|
||||||
"Unterminated string starting at", s, begin)
|
|
||||||
end = chunk.end()
|
|
||||||
content, terminator = chunk.groups()
|
|
||||||
# Content is contains zero or more unescaped string characters
|
|
||||||
if content:
|
|
||||||
if not isinstance(content, unicode):
|
|
||||||
content = unicode(content, encoding)
|
|
||||||
_append(content)
|
|
||||||
# Terminator is the end of string, a literal control character,
|
|
||||||
# or a backslash denoting that an escape sequence follows
|
|
||||||
if terminator == '"':
|
|
||||||
break
|
|
||||||
elif terminator != '\\':
|
|
||||||
if strict:
|
|
||||||
msg = "Invalid control character %r at" % (terminator,)
|
|
||||||
#msg = "Invalid control character {0!r} at".format(terminator)
|
|
||||||
raise JSONDecodeError(msg, s, end)
|
|
||||||
else:
|
|
||||||
_append(terminator)
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
esc = s[end]
|
|
||||||
except IndexError:
|
|
||||||
raise JSONDecodeError(
|
|
||||||
"Unterminated string starting at", s, begin)
|
|
||||||
# If not a unicode escape sequence, must be in the lookup table
|
|
||||||
if esc != 'u':
|
|
||||||
try:
|
|
||||||
char = _b[esc]
|
|
||||||
except KeyError:
|
|
||||||
msg = "Invalid \\escape: " + repr(esc)
|
|
||||||
raise JSONDecodeError(msg, s, end)
|
|
||||||
end += 1
|
|
||||||
else:
|
|
||||||
# Unicode escape sequence
|
|
||||||
esc = s[end + 1:end + 5]
|
|
||||||
next_end = end + 5
|
|
||||||
if len(esc) != 4:
|
|
||||||
msg = "Invalid \\uXXXX escape"
|
|
||||||
raise JSONDecodeError(msg, s, end)
|
|
||||||
uni = int(esc, 16)
|
|
||||||
# Check for surrogate pair on UCS-4 systems
|
|
||||||
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
|
|
||||||
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
|
|
||||||
if not s[end + 5:end + 7] == '\\u':
|
|
||||||
raise JSONDecodeError(msg, s, end)
|
|
||||||
esc2 = s[end + 7:end + 11]
|
|
||||||
if len(esc2) != 4:
|
|
||||||
raise JSONDecodeError(msg, s, end)
|
|
||||||
uni2 = int(esc2, 16)
|
|
||||||
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
|
|
||||||
next_end += 6
|
|
||||||
char = unichr(uni)
|
|
||||||
end = next_end
|
|
||||||
# Append the unescaped character
|
|
||||||
_append(char)
|
|
||||||
return u''.join(chunks), end
|
|
||||||
|
|
||||||
|
|
||||||
# Use speedup if available
|
|
||||||
scanstring = c_scanstring or py_scanstring
|
|
||||||
|
|
||||||
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
|
|
||||||
WHITESPACE_STR = ' \t\n\r'
|
|
||||||
|
|
||||||
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
|
|
||||||
object_pairs_hook, memo=None,
|
|
||||||
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
|
||||||
# Backwards compatibility
|
|
||||||
if memo is None:
|
|
||||||
memo = {}
|
|
||||||
memo_get = memo.setdefault
|
|
||||||
pairs = []
|
|
||||||
# Use a slice to prevent IndexError from being raised, the following
|
|
||||||
# check will raise a more specific ValueError if the string is empty
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Normally we expect nextchar == '"'
|
|
||||||
if nextchar != '"':
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Trivial empty object
|
|
||||||
if nextchar == '}':
|
|
||||||
if object_pairs_hook is not None:
|
|
||||||
result = object_pairs_hook(pairs)
|
|
||||||
return result, end
|
|
||||||
pairs = {}
|
|
||||||
if object_hook is not None:
|
|
||||||
pairs = object_hook(pairs)
|
|
||||||
return pairs, end + 1
|
|
||||||
elif nextchar != '"':
|
|
||||||
raise JSONDecodeError("Expecting property name", s, end)
|
|
||||||
end += 1
|
|
||||||
while True:
|
|
||||||
key, end = scanstring(s, end, encoding, strict)
|
|
||||||
key = memo_get(key, key)
|
|
||||||
|
|
||||||
# To skip some function call overhead we optimize the fast paths where
|
|
||||||
# the JSON key separator is ": " or just ":".
|
|
||||||
if s[end:end + 1] != ':':
|
|
||||||
end = _w(s, end).end()
|
|
||||||
if s[end:end + 1] != ':':
|
|
||||||
raise JSONDecodeError("Expecting : delimiter", s, end)
|
|
||||||
|
|
||||||
end += 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
if s[end] in _ws:
|
|
||||||
end += 1
|
|
||||||
if s[end] in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
value, end = scan_once(s, end)
|
|
||||||
except StopIteration:
|
|
||||||
raise JSONDecodeError("Expecting object", s, end)
|
|
||||||
pairs.append((key, value))
|
|
||||||
|
|
||||||
try:
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end]
|
|
||||||
except IndexError:
|
|
||||||
nextchar = ''
|
|
||||||
end += 1
|
|
||||||
|
|
||||||
if nextchar == '}':
|
|
||||||
break
|
|
||||||
elif nextchar != ',':
|
|
||||||
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
|
|
||||||
|
|
||||||
try:
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end += 1
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end]
|
|
||||||
except IndexError:
|
|
||||||
nextchar = ''
|
|
||||||
|
|
||||||
end += 1
|
|
||||||
if nextchar != '"':
|
|
||||||
raise JSONDecodeError("Expecting property name", s, end - 1)
|
|
||||||
|
|
||||||
if object_pairs_hook is not None:
|
|
||||||
result = object_pairs_hook(pairs)
|
|
||||||
return result, end
|
|
||||||
pairs = dict(pairs)
|
|
||||||
if object_hook is not None:
|
|
||||||
pairs = object_hook(pairs)
|
|
||||||
return pairs, end
|
|
||||||
|
|
||||||
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
|
||||||
values = []
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Look-ahead for trivial empty array
|
|
||||||
if nextchar == ']':
|
|
||||||
return values, end + 1
|
|
||||||
_append = values.append
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
value, end = scan_once(s, end)
|
|
||||||
except StopIteration:
|
|
||||||
raise JSONDecodeError("Expecting object", s, end)
|
|
||||||
_append(value)
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
end += 1
|
|
||||||
if nextchar == ']':
|
|
||||||
break
|
|
||||||
elif nextchar != ',':
|
|
||||||
raise JSONDecodeError("Expecting , delimiter", s, end)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if s[end] in _ws:
|
|
||||||
end += 1
|
|
||||||
if s[end] in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return values, end
|
|
||||||
|
|
||||||
class JSONDecoder(object):
|
|
||||||
"""Simple JSON <http://json.org> decoder
|
|
||||||
|
|
||||||
Performs the following translations in decoding by default:
|
|
||||||
|
|
||||||
+---------------+-------------------+
|
|
||||||
| JSON | Python |
|
|
||||||
+===============+===================+
|
|
||||||
| object | dict |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| array | list |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| string | unicode |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (int) | int, long |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (real) | float |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| true | True |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| false | False |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| null | None |
|
|
||||||
+---------------+-------------------+
|
|
||||||
|
|
||||||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
|
|
||||||
their corresponding ``float`` values, which is outside the JSON spec.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, encoding=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, strict=True,
|
|
||||||
object_pairs_hook=None):
|
|
||||||
"""
|
|
||||||
*encoding* determines the encoding used to interpret any
|
|
||||||
:class:`str` objects decoded by this instance (``'utf-8'`` by
|
|
||||||
default). It has no effect when decoding :class:`unicode` objects.
|
|
||||||
|
|
||||||
Note that currently only encodings that are a superset of ASCII work,
|
|
||||||
strings of other encodings should be passed in as :class:`unicode`.
|
|
||||||
|
|
||||||
*object_hook*, if specified, will be called with the result of every
|
|
||||||
JSON object decoded and its return value will be used in place of the
|
|
||||||
given :class:`dict`. This can be used to provide custom
|
|
||||||
deserializations (e.g. to support JSON-RPC class hinting).
|
|
||||||
|
|
||||||
*object_pairs_hook* is an optional function that will be called with
|
|
||||||
the result of any object literal decode with an ordered list of pairs.
|
|
||||||
The return value of *object_pairs_hook* will be used instead of the
|
|
||||||
:class:`dict`. This feature can be used to implement custom decoders
|
|
||||||
that rely on the order that the key and value pairs are decoded (for
|
|
||||||
example, :func:`collections.OrderedDict` will remember the order of
|
|
||||||
insertion). If *object_hook* is also defined, the *object_pairs_hook*
|
|
||||||
takes priority.
|
|
||||||
|
|
||||||
*parse_float*, if specified, will be called with the string of every
|
|
||||||
JSON float to be decoded. By default, this is equivalent to
|
|
||||||
``float(num_str)``. This can be used to use another datatype or parser
|
|
||||||
for JSON floats (e.g. :class:`decimal.Decimal`).
|
|
||||||
|
|
||||||
*parse_int*, if specified, will be called with the string of every
|
|
||||||
JSON int to be decoded. By default, this is equivalent to
|
|
||||||
``int(num_str)``. This can be used to use another datatype or parser
|
|
||||||
for JSON integers (e.g. :class:`float`).
|
|
||||||
|
|
||||||
*parse_constant*, if specified, will be called with one of the
|
|
||||||
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
|
|
||||||
can be used to raise an exception if invalid JSON numbers are
|
|
||||||
encountered.
|
|
||||||
|
|
||||||
*strict* controls the parser's behavior when it encounters an
|
|
||||||
invalid control character in a string. The default setting of
|
|
||||||
``True`` means that unescaped control characters are parse errors, if
|
|
||||||
``False`` then control characters will be allowed in strings.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.encoding = encoding
|
|
||||||
self.object_hook = object_hook
|
|
||||||
self.object_pairs_hook = object_pairs_hook
|
|
||||||
self.parse_float = parse_float or float
|
|
||||||
self.parse_int = parse_int or int
|
|
||||||
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
|
|
||||||
self.strict = strict
|
|
||||||
self.parse_object = JSONObject
|
|
||||||
self.parse_array = JSONArray
|
|
||||||
self.parse_string = scanstring
|
|
||||||
self.memo = {}
|
|
||||||
self.scan_once = make_scanner(self)
|
|
||||||
|
|
||||||
def decode(self, s, _w=WHITESPACE.match):
|
|
||||||
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
|
|
||||||
instance containing a JSON document)
|
|
||||||
|
|
||||||
"""
|
|
||||||
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
|
|
||||||
end = _w(s, end).end()
|
|
||||||
if end != len(s):
|
|
||||||
raise JSONDecodeError("Extra data", s, end, len(s))
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def raw_decode(self, s, idx=0):
|
|
||||||
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
|
|
||||||
beginning with a JSON document) and return a 2-tuple of the Python
|
|
||||||
representation and the index in ``s`` where the document ended.
|
|
||||||
|
|
||||||
This can be used to decode a JSON document from a string that may
|
|
||||||
have extraneous data at the end.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
obj, end = self.scan_once(s, idx)
|
|
||||||
except StopIteration:
|
|
||||||
raise JSONDecodeError("No JSON object could be decoded", s, idx)
|
|
||||||
return obj, end
|
|
|
@ -1,501 +0,0 @@
|
||||||
"""Implementation of JSONEncoder
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
def _import_speedups():
|
|
||||||
try:
|
|
||||||
from simplejson import _speedups
|
|
||||||
return _speedups.encode_basestring_ascii, _speedups.make_encoder
|
|
||||||
except ImportError:
|
|
||||||
return None, None
|
|
||||||
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
|
|
||||||
|
|
||||||
from simplejson.decoder import PosInf
|
|
||||||
|
|
||||||
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
|
|
||||||
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
|
||||||
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
|
||||||
ESCAPE_DCT = {
|
|
||||||
'\\': '\\\\',
|
|
||||||
'"': '\\"',
|
|
||||||
'\b': '\\b',
|
|
||||||
'\f': '\\f',
|
|
||||||
'\n': '\\n',
|
|
||||||
'\r': '\\r',
|
|
||||||
'\t': '\\t',
|
|
||||||
}
|
|
||||||
for i in range(0x20):
|
|
||||||
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
|
||||||
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
|
||||||
|
|
||||||
FLOAT_REPR = repr
|
|
||||||
|
|
||||||
def encode_basestring(s):
|
|
||||||
"""Return a JSON representation of a Python string
|
|
||||||
|
|
||||||
"""
|
|
||||||
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
|
||||||
s = s.decode('utf-8')
|
|
||||||
def replace(match):
|
|
||||||
return ESCAPE_DCT[match.group(0)]
|
|
||||||
return u'"' + ESCAPE.sub(replace, s) + u'"'
|
|
||||||
|
|
||||||
|
|
||||||
def py_encode_basestring_ascii(s):
|
|
||||||
"""Return an ASCII-only JSON representation of a Python string
|
|
||||||
|
|
||||||
"""
|
|
||||||
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
|
||||||
s = s.decode('utf-8')
|
|
||||||
def replace(match):
|
|
||||||
s = match.group(0)
|
|
||||||
try:
|
|
||||||
return ESCAPE_DCT[s]
|
|
||||||
except KeyError:
|
|
||||||
n = ord(s)
|
|
||||||
if n < 0x10000:
|
|
||||||
#return '\\u{0:04x}'.format(n)
|
|
||||||
return '\\u%04x' % (n,)
|
|
||||||
else:
|
|
||||||
# surrogate pair
|
|
||||||
n -= 0x10000
|
|
||||||
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
|
||||||
s2 = 0xdc00 | (n & 0x3ff)
|
|
||||||
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
|
||||||
return '\\u%04x\\u%04x' % (s1, s2)
|
|
||||||
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
|
||||||
|
|
||||||
|
|
||||||
encode_basestring_ascii = (
|
|
||||||
c_encode_basestring_ascii or py_encode_basestring_ascii)
|
|
||||||
|
|
||||||
class JSONEncoder(object):
|
|
||||||
"""Extensible JSON <http://json.org> encoder for Python data structures.
|
|
||||||
|
|
||||||
Supports the following objects and types by default:
|
|
||||||
|
|
||||||
+-------------------+---------------+
|
|
||||||
| Python | JSON |
|
|
||||||
+===================+===============+
|
|
||||||
| dict | object |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| list, tuple | array |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| str, unicode | string |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| int, long, float | number |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| True | true |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| False | false |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| None | null |
|
|
||||||
+-------------------+---------------+
|
|
||||||
|
|
||||||
To extend this to recognize other objects, subclass and implement a
|
|
||||||
``.default()`` method with another method that returns a serializable
|
|
||||||
object for ``o`` if possible, otherwise it should call the superclass
|
|
||||||
implementation (to raise ``TypeError``).
|
|
||||||
|
|
||||||
"""
|
|
||||||
item_separator = ', '
|
|
||||||
key_separator = ': '
|
|
||||||
def __init__(self, skipkeys=False, ensure_ascii=True,
|
|
||||||
check_circular=True, allow_nan=True, sort_keys=False,
|
|
||||||
indent=None, separators=None, encoding='utf-8', default=None,
|
|
||||||
use_decimal=False):
|
|
||||||
"""Constructor for JSONEncoder, with sensible defaults.
|
|
||||||
|
|
||||||
If skipkeys is false, then it is a TypeError to attempt
|
|
||||||
encoding of keys that are not str, int, long, float or None. If
|
|
||||||
skipkeys is True, such items are simply skipped.
|
|
||||||
|
|
||||||
If ensure_ascii is true, the output is guaranteed to be str
|
|
||||||
objects with all incoming unicode characters escaped. If
|
|
||||||
ensure_ascii is false, the output will be unicode object.
|
|
||||||
|
|
||||||
If check_circular is true, then lists, dicts, and custom encoded
|
|
||||||
objects will be checked for circular references during encoding to
|
|
||||||
prevent an infinite recursion (which would cause an OverflowError).
|
|
||||||
Otherwise, no such check takes place.
|
|
||||||
|
|
||||||
If allow_nan is true, then NaN, Infinity, and -Infinity will be
|
|
||||||
encoded as such. This behavior is not JSON specification compliant,
|
|
||||||
but is consistent with most JavaScript based encoders and decoders.
|
|
||||||
Otherwise, it will be a ValueError to encode such floats.
|
|
||||||
|
|
||||||
If sort_keys is true, then the output of dictionaries will be
|
|
||||||
sorted by key; this is useful for regression tests to ensure
|
|
||||||
that JSON serializations can be compared on a day-to-day basis.
|
|
||||||
|
|
||||||
If indent is a string, then JSON array elements and object members
|
|
||||||
will be pretty-printed with a newline followed by that string repeated
|
|
||||||
for each level of nesting. ``None`` (the default) selects the most compact
|
|
||||||
representation without any newlines. For backwards compatibility with
|
|
||||||
versions of simplejson earlier than 2.1.0, an integer is also accepted
|
|
||||||
and is converted to a string with that many spaces.
|
|
||||||
|
|
||||||
If specified, separators should be a (item_separator, key_separator)
|
|
||||||
tuple. The default is (', ', ': '). To get the most compact JSON
|
|
||||||
representation you should specify (',', ':') to eliminate whitespace.
|
|
||||||
|
|
||||||
If specified, default is a function that gets called for objects
|
|
||||||
that can't otherwise be serialized. It should return a JSON encodable
|
|
||||||
version of the object or raise a ``TypeError``.
|
|
||||||
|
|
||||||
If encoding is not None, then all input strings will be
|
|
||||||
transformed into unicode using that encoding prior to JSON-encoding.
|
|
||||||
The default is UTF-8.
|
|
||||||
|
|
||||||
If use_decimal is true (not the default), ``decimal.Decimal`` will
|
|
||||||
be supported directly by the encoder. For the inverse, decode JSON
|
|
||||||
with ``parse_float=decimal.Decimal``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.skipkeys = skipkeys
|
|
||||||
self.ensure_ascii = ensure_ascii
|
|
||||||
self.check_circular = check_circular
|
|
||||||
self.allow_nan = allow_nan
|
|
||||||
self.sort_keys = sort_keys
|
|
||||||
self.use_decimal = use_decimal
|
|
||||||
if isinstance(indent, (int, long)):
|
|
||||||
indent = ' ' * indent
|
|
||||||
self.indent = indent
|
|
||||||
if separators is not None:
|
|
||||||
self.item_separator, self.key_separator = separators
|
|
||||||
if default is not None:
|
|
||||||
self.default = default
|
|
||||||
self.encoding = encoding
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
"""Implement this method in a subclass such that it returns
|
|
||||||
a serializable object for ``o``, or calls the base implementation
|
|
||||||
(to raise a ``TypeError``).
|
|
||||||
|
|
||||||
For example, to support arbitrary iterators, you could
|
|
||||||
implement default like this::
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
try:
|
|
||||||
iterable = iter(o)
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return list(iterable)
|
|
||||||
return JSONEncoder.default(self, o)
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
|
|
||||||
def encode(self, o):
|
|
||||||
"""Return a JSON string representation of a Python data structure.
|
|
||||||
|
|
||||||
>>> from simplejson import JSONEncoder
|
|
||||||
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
|
||||||
'{"foo": ["bar", "baz"]}'
|
|
||||||
|
|
||||||
"""
|
|
||||||
# This is for extremely simple cases and benchmarks.
|
|
||||||
if isinstance(o, basestring):
|
|
||||||
if isinstance(o, str):
|
|
||||||
_encoding = self.encoding
|
|
||||||
if (_encoding is not None
|
|
||||||
and not (_encoding == 'utf-8')):
|
|
||||||
o = o.decode(_encoding)
|
|
||||||
if self.ensure_ascii:
|
|
||||||
return encode_basestring_ascii(o)
|
|
||||||
else:
|
|
||||||
return encode_basestring(o)
|
|
||||||
# This doesn't pass the iterator directly to ''.join() because the
|
|
||||||
# exceptions aren't as detailed. The list call should be roughly
|
|
||||||
# equivalent to the PySequence_Fast that ''.join() would do.
|
|
||||||
chunks = self.iterencode(o, _one_shot=True)
|
|
||||||
if not isinstance(chunks, (list, tuple)):
|
|
||||||
chunks = list(chunks)
|
|
||||||
if self.ensure_ascii:
|
|
||||||
return ''.join(chunks)
|
|
||||||
else:
|
|
||||||
return u''.join(chunks)
|
|
||||||
|
|
||||||
def iterencode(self, o, _one_shot=False):
|
|
||||||
"""Encode the given object and yield each string
|
|
||||||
representation as available.
|
|
||||||
|
|
||||||
For example::
|
|
||||||
|
|
||||||
for chunk in JSONEncoder().iterencode(bigobject):
|
|
||||||
mysocket.write(chunk)
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.check_circular:
|
|
||||||
markers = {}
|
|
||||||
else:
|
|
||||||
markers = None
|
|
||||||
if self.ensure_ascii:
|
|
||||||
_encoder = encode_basestring_ascii
|
|
||||||
else:
|
|
||||||
_encoder = encode_basestring
|
|
||||||
if self.encoding != 'utf-8':
|
|
||||||
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
|
||||||
if isinstance(o, str):
|
|
||||||
o = o.decode(_encoding)
|
|
||||||
return _orig_encoder(o)
|
|
||||||
|
|
||||||
def floatstr(o, allow_nan=self.allow_nan,
|
|
||||||
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
|
|
||||||
# Check for specials. Note that this type of test is processor
|
|
||||||
# and/or platform-specific, so do tests which don't depend on
|
|
||||||
# the internals.
|
|
||||||
|
|
||||||
if o != o:
|
|
||||||
text = 'NaN'
|
|
||||||
elif o == _inf:
|
|
||||||
text = 'Infinity'
|
|
||||||
elif o == _neginf:
|
|
||||||
text = '-Infinity'
|
|
||||||
else:
|
|
||||||
return _repr(o)
|
|
||||||
|
|
||||||
if not allow_nan:
|
|
||||||
raise ValueError(
|
|
||||||
"Out of range float values are not JSON compliant: " +
|
|
||||||
repr(o))
|
|
||||||
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
key_memo = {}
|
|
||||||
if (_one_shot and c_make_encoder is not None
|
|
||||||
and not self.indent and not self.sort_keys):
|
|
||||||
_iterencode = c_make_encoder(
|
|
||||||
markers, self.default, _encoder, self.indent,
|
|
||||||
self.key_separator, self.item_separator, self.sort_keys,
|
|
||||||
self.skipkeys, self.allow_nan, key_memo, self.use_decimal)
|
|
||||||
else:
|
|
||||||
_iterencode = _make_iterencode(
|
|
||||||
markers, self.default, _encoder, self.indent, floatstr,
|
|
||||||
self.key_separator, self.item_separator, self.sort_keys,
|
|
||||||
self.skipkeys, _one_shot, self.use_decimal)
|
|
||||||
try:
|
|
||||||
return _iterencode(o, 0)
|
|
||||||
finally:
|
|
||||||
key_memo.clear()
|
|
||||||
|
|
||||||
|
|
||||||
class JSONEncoderForHTML(JSONEncoder):
|
|
||||||
"""An encoder that produces JSON safe to embed in HTML.
|
|
||||||
|
|
||||||
To embed JSON content in, say, a script tag on a web page, the
|
|
||||||
characters &, < and > should be escaped. They cannot be escaped
|
|
||||||
with the usual entities (e.g. &) because they are not expanded
|
|
||||||
within <script> tags.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def encode(self, o):
|
|
||||||
# Override JSONEncoder.encode because it has hacks for
|
|
||||||
# performance that make things more complicated.
|
|
||||||
chunks = self.iterencode(o, True)
|
|
||||||
if self.ensure_ascii:
|
|
||||||
return ''.join(chunks)
|
|
||||||
else:
|
|
||||||
return u''.join(chunks)
|
|
||||||
|
|
||||||
def iterencode(self, o, _one_shot=False):
|
|
||||||
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
|
|
||||||
for chunk in chunks:
|
|
||||||
chunk = chunk.replace('&', '\\u0026')
|
|
||||||
chunk = chunk.replace('<', '\\u003c')
|
|
||||||
chunk = chunk.replace('>', '\\u003e')
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
|
|
||||||
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
|
|
||||||
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
|
|
||||||
_use_decimal,
|
|
||||||
## HACK: hand-optimized bytecode; turn globals into locals
|
|
||||||
False=False,
|
|
||||||
True=True,
|
|
||||||
ValueError=ValueError,
|
|
||||||
basestring=basestring,
|
|
||||||
Decimal=Decimal,
|
|
||||||
dict=dict,
|
|
||||||
float=float,
|
|
||||||
id=id,
|
|
||||||
int=int,
|
|
||||||
isinstance=isinstance,
|
|
||||||
list=list,
|
|
||||||
long=long,
|
|
||||||
str=str,
|
|
||||||
tuple=tuple,
|
|
||||||
):
|
|
||||||
|
|
||||||
def _iterencode_list(lst, _current_indent_level):
|
|
||||||
if not lst:
|
|
||||||
yield '[]'
|
|
||||||
return
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(lst)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = lst
|
|
||||||
buf = '['
|
|
||||||
if _indent is not None:
|
|
||||||
_current_indent_level += 1
|
|
||||||
newline_indent = '\n' + (_indent * _current_indent_level)
|
|
||||||
separator = _item_separator + newline_indent
|
|
||||||
buf += newline_indent
|
|
||||||
else:
|
|
||||||
newline_indent = None
|
|
||||||
separator = _item_separator
|
|
||||||
first = True
|
|
||||||
for value in lst:
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
buf = separator
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
yield buf + _encoder(value)
|
|
||||||
elif value is None:
|
|
||||||
yield buf + 'null'
|
|
||||||
elif value is True:
|
|
||||||
yield buf + 'true'
|
|
||||||
elif value is False:
|
|
||||||
yield buf + 'false'
|
|
||||||
elif isinstance(value, (int, long)):
|
|
||||||
yield buf + str(value)
|
|
||||||
elif isinstance(value, float):
|
|
||||||
yield buf + _floatstr(value)
|
|
||||||
elif _use_decimal and isinstance(value, Decimal):
|
|
||||||
yield buf + str(value)
|
|
||||||
else:
|
|
||||||
yield buf
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
chunks = _iterencode_list(value, _current_indent_level)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
chunks = _iterencode_dict(value, _current_indent_level)
|
|
||||||
else:
|
|
||||||
chunks = _iterencode(value, _current_indent_level)
|
|
||||||
for chunk in chunks:
|
|
||||||
yield chunk
|
|
||||||
if newline_indent is not None:
|
|
||||||
_current_indent_level -= 1
|
|
||||||
yield '\n' + (_indent * _current_indent_level)
|
|
||||||
yield ']'
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
def _iterencode_dict(dct, _current_indent_level):
|
|
||||||
if not dct:
|
|
||||||
yield '{}'
|
|
||||||
return
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(dct)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = dct
|
|
||||||
yield '{'
|
|
||||||
if _indent is not None:
|
|
||||||
_current_indent_level += 1
|
|
||||||
newline_indent = '\n' + (_indent * _current_indent_level)
|
|
||||||
item_separator = _item_separator + newline_indent
|
|
||||||
yield newline_indent
|
|
||||||
else:
|
|
||||||
newline_indent = None
|
|
||||||
item_separator = _item_separator
|
|
||||||
first = True
|
|
||||||
if _sort_keys:
|
|
||||||
items = dct.items()
|
|
||||||
items.sort(key=lambda kv: kv[0])
|
|
||||||
else:
|
|
||||||
items = dct.iteritems()
|
|
||||||
for key, value in items:
|
|
||||||
if isinstance(key, basestring):
|
|
||||||
pass
|
|
||||||
# JavaScript is weakly typed for these, so it makes sense to
|
|
||||||
# also allow them. Many encoders seem to do something like this.
|
|
||||||
elif isinstance(key, float):
|
|
||||||
key = _floatstr(key)
|
|
||||||
elif key is True:
|
|
||||||
key = 'true'
|
|
||||||
elif key is False:
|
|
||||||
key = 'false'
|
|
||||||
elif key is None:
|
|
||||||
key = 'null'
|
|
||||||
elif isinstance(key, (int, long)):
|
|
||||||
key = str(key)
|
|
||||||
elif _skipkeys:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise TypeError("key " + repr(key) + " is not a string")
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
yield item_separator
|
|
||||||
yield _encoder(key)
|
|
||||||
yield _key_separator
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
yield _encoder(value)
|
|
||||||
elif value is None:
|
|
||||||
yield 'null'
|
|
||||||
elif value is True:
|
|
||||||
yield 'true'
|
|
||||||
elif value is False:
|
|
||||||
yield 'false'
|
|
||||||
elif isinstance(value, (int, long)):
|
|
||||||
yield str(value)
|
|
||||||
elif isinstance(value, float):
|
|
||||||
yield _floatstr(value)
|
|
||||||
elif _use_decimal and isinstance(value, Decimal):
|
|
||||||
yield str(value)
|
|
||||||
else:
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
chunks = _iterencode_list(value, _current_indent_level)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
chunks = _iterencode_dict(value, _current_indent_level)
|
|
||||||
else:
|
|
||||||
chunks = _iterencode(value, _current_indent_level)
|
|
||||||
for chunk in chunks:
|
|
||||||
yield chunk
|
|
||||||
if newline_indent is not None:
|
|
||||||
_current_indent_level -= 1
|
|
||||||
yield '\n' + (_indent * _current_indent_level)
|
|
||||||
yield '}'
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
def _iterencode(o, _current_indent_level):
|
|
||||||
if isinstance(o, basestring):
|
|
||||||
yield _encoder(o)
|
|
||||||
elif o is None:
|
|
||||||
yield 'null'
|
|
||||||
elif o is True:
|
|
||||||
yield 'true'
|
|
||||||
elif o is False:
|
|
||||||
yield 'false'
|
|
||||||
elif isinstance(o, (int, long)):
|
|
||||||
yield str(o)
|
|
||||||
elif isinstance(o, float):
|
|
||||||
yield _floatstr(o)
|
|
||||||
elif isinstance(o, (list, tuple)):
|
|
||||||
for chunk in _iterencode_list(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
elif isinstance(o, dict):
|
|
||||||
for chunk in _iterencode_dict(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
elif _use_decimal and isinstance(o, Decimal):
|
|
||||||
yield str(o)
|
|
||||||
else:
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(o)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = o
|
|
||||||
o = _default(o)
|
|
||||||
for chunk in _iterencode(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
return _iterencode
|
|
|
@ -1,119 +0,0 @@
|
||||||
"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger
|
|
||||||
|
|
||||||
http://code.activestate.com/recipes/576693/
|
|
||||||
|
|
||||||
"""
|
|
||||||
from UserDict import DictMixin
|
|
||||||
|
|
||||||
# Modified from original to support Python 2.4, see
|
|
||||||
# http://code.google.com/p/simplejson/issues/detail?id=53
|
|
||||||
try:
|
|
||||||
all
|
|
||||||
except NameError:
|
|
||||||
def all(seq):
|
|
||||||
for elem in seq:
|
|
||||||
if not elem:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
class OrderedDict(dict, DictMixin):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwds):
|
|
||||||
if len(args) > 1:
|
|
||||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
|
||||||
try:
|
|
||||||
self.__end
|
|
||||||
except AttributeError:
|
|
||||||
self.clear()
|
|
||||||
self.update(*args, **kwds)
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
self.__end = end = []
|
|
||||||
end += [None, end, end] # sentinel node for doubly linked list
|
|
||||||
self.__map = {} # key --> [key, prev, next]
|
|
||||||
dict.clear(self)
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
if key not in self:
|
|
||||||
end = self.__end
|
|
||||||
curr = end[1]
|
|
||||||
curr[2] = end[1] = self.__map[key] = [key, curr, end]
|
|
||||||
dict.__setitem__(self, key, value)
|
|
||||||
|
|
||||||
def __delitem__(self, key):
|
|
||||||
dict.__delitem__(self, key)
|
|
||||||
key, prev, next = self.__map.pop(key)
|
|
||||||
prev[2] = next
|
|
||||||
next[1] = prev
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
end = self.__end
|
|
||||||
curr = end[2]
|
|
||||||
while curr is not end:
|
|
||||||
yield curr[0]
|
|
||||||
curr = curr[2]
|
|
||||||
|
|
||||||
def __reversed__(self):
|
|
||||||
end = self.__end
|
|
||||||
curr = end[1]
|
|
||||||
while curr is not end:
|
|
||||||
yield curr[0]
|
|
||||||
curr = curr[1]
|
|
||||||
|
|
||||||
def popitem(self, last=True):
|
|
||||||
if not self:
|
|
||||||
raise KeyError('dictionary is empty')
|
|
||||||
# Modified from original to support Python 2.4, see
|
|
||||||
# http://code.google.com/p/simplejson/issues/detail?id=53
|
|
||||||
if last:
|
|
||||||
key = reversed(self).next()
|
|
||||||
else:
|
|
||||||
key = iter(self).next()
|
|
||||||
value = self.pop(key)
|
|
||||||
return key, value
|
|
||||||
|
|
||||||
def __reduce__(self):
|
|
||||||
items = [[k, self[k]] for k in self]
|
|
||||||
tmp = self.__map, self.__end
|
|
||||||
del self.__map, self.__end
|
|
||||||
inst_dict = vars(self).copy()
|
|
||||||
self.__map, self.__end = tmp
|
|
||||||
if inst_dict:
|
|
||||||
return (self.__class__, (items,), inst_dict)
|
|
||||||
return self.__class__, (items,)
|
|
||||||
|
|
||||||
def keys(self):
|
|
||||||
return list(self)
|
|
||||||
|
|
||||||
setdefault = DictMixin.setdefault
|
|
||||||
update = DictMixin.update
|
|
||||||
pop = DictMixin.pop
|
|
||||||
values = DictMixin.values
|
|
||||||
items = DictMixin.items
|
|
||||||
iterkeys = DictMixin.iterkeys
|
|
||||||
itervalues = DictMixin.itervalues
|
|
||||||
iteritems = DictMixin.iteritems
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
if not self:
|
|
||||||
return '%s()' % (self.__class__.__name__,)
|
|
||||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
return self.__class__(self)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fromkeys(cls, iterable, value=None):
|
|
||||||
d = cls()
|
|
||||||
for key in iterable:
|
|
||||||
d[key] = value
|
|
||||||
return d
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if isinstance(other, OrderedDict):
|
|
||||||
return len(self)==len(other) and \
|
|
||||||
all(p==q for p, q in zip(self.items(), other.items()))
|
|
||||||
return dict.__eq__(self, other)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
|
@ -1,77 +0,0 @@
|
||||||
"""JSON token scanner
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
def _import_c_make_scanner():
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import make_scanner
|
|
||||||
return make_scanner
|
|
||||||
except ImportError:
|
|
||||||
return None
|
|
||||||
c_make_scanner = _import_c_make_scanner()
|
|
||||||
|
|
||||||
__all__ = ['make_scanner']
|
|
||||||
|
|
||||||
NUMBER_RE = re.compile(
|
|
||||||
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
|
|
||||||
(re.VERBOSE | re.MULTILINE | re.DOTALL))
|
|
||||||
|
|
||||||
def py_make_scanner(context):
|
|
||||||
parse_object = context.parse_object
|
|
||||||
parse_array = context.parse_array
|
|
||||||
parse_string = context.parse_string
|
|
||||||
match_number = NUMBER_RE.match
|
|
||||||
encoding = context.encoding
|
|
||||||
strict = context.strict
|
|
||||||
parse_float = context.parse_float
|
|
||||||
parse_int = context.parse_int
|
|
||||||
parse_constant = context.parse_constant
|
|
||||||
object_hook = context.object_hook
|
|
||||||
object_pairs_hook = context.object_pairs_hook
|
|
||||||
memo = context.memo
|
|
||||||
|
|
||||||
def _scan_once(string, idx):
|
|
||||||
try:
|
|
||||||
nextchar = string[idx]
|
|
||||||
except IndexError:
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
if nextchar == '"':
|
|
||||||
return parse_string(string, idx + 1, encoding, strict)
|
|
||||||
elif nextchar == '{':
|
|
||||||
return parse_object((string, idx + 1), encoding, strict,
|
|
||||||
_scan_once, object_hook, object_pairs_hook, memo)
|
|
||||||
elif nextchar == '[':
|
|
||||||
return parse_array((string, idx + 1), _scan_once)
|
|
||||||
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
|
|
||||||
return None, idx + 4
|
|
||||||
elif nextchar == 't' and string[idx:idx + 4] == 'true':
|
|
||||||
return True, idx + 4
|
|
||||||
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
|
|
||||||
return False, idx + 5
|
|
||||||
|
|
||||||
m = match_number(string, idx)
|
|
||||||
if m is not None:
|
|
||||||
integer, frac, exp = m.groups()
|
|
||||||
if frac or exp:
|
|
||||||
res = parse_float(integer + (frac or '') + (exp or ''))
|
|
||||||
else:
|
|
||||||
res = parse_int(integer)
|
|
||||||
return res, m.end()
|
|
||||||
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
|
|
||||||
return parse_constant('NaN'), idx + 3
|
|
||||||
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
|
|
||||||
return parse_constant('Infinity'), idx + 8
|
|
||||||
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
|
|
||||||
return parse_constant('-Infinity'), idx + 9
|
|
||||||
else:
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
def scan_once(string, idx):
|
|
||||||
try:
|
|
||||||
return _scan_once(string, idx)
|
|
||||||
finally:
|
|
||||||
memo.clear()
|
|
||||||
|
|
||||||
return scan_once
|
|
||||||
|
|
||||||
make_scanner = c_make_scanner or py_make_scanner
|
|
|
@ -1,63 +0,0 @@
|
||||||
import unittest
|
|
||||||
import doctest
|
|
||||||
|
|
||||||
|
|
||||||
class OptionalExtensionTestSuite(unittest.TestSuite):
|
|
||||||
def run(self, result):
|
|
||||||
import simplejson
|
|
||||||
run = unittest.TestSuite.run
|
|
||||||
run(self, result)
|
|
||||||
simplejson._toggle_speedups(False)
|
|
||||||
run(self, result)
|
|
||||||
simplejson._toggle_speedups(True)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def additional_tests(suite=None):
|
|
||||||
import simplejson
|
|
||||||
import simplejson.encoder
|
|
||||||
import simplejson.decoder
|
|
||||||
if suite is None:
|
|
||||||
suite = unittest.TestSuite()
|
|
||||||
for mod in (simplejson, simplejson.encoder, simplejson.decoder):
|
|
||||||
suite.addTest(doctest.DocTestSuite(mod))
|
|
||||||
suite.addTest(doctest.DocFileSuite('../../index.rst'))
|
|
||||||
return suite
|
|
||||||
|
|
||||||
|
|
||||||
def all_tests_suite():
|
|
||||||
suite = unittest.TestLoader().loadTestsFromNames([
|
|
||||||
'simplejson.tests.test_check_circular',
|
|
||||||
'simplejson.tests.test_decode',
|
|
||||||
'simplejson.tests.test_default',
|
|
||||||
'simplejson.tests.test_dump',
|
|
||||||
'simplejson.tests.test_encode_basestring_ascii',
|
|
||||||
'simplejson.tests.test_encode_for_html',
|
|
||||||
'simplejson.tests.test_fail',
|
|
||||||
'simplejson.tests.test_float',
|
|
||||||
'simplejson.tests.test_indent',
|
|
||||||
'simplejson.tests.test_pass1',
|
|
||||||
'simplejson.tests.test_pass2',
|
|
||||||
'simplejson.tests.test_pass3',
|
|
||||||
'simplejson.tests.test_recursion',
|
|
||||||
'simplejson.tests.test_scanstring',
|
|
||||||
'simplejson.tests.test_separators',
|
|
||||||
'simplejson.tests.test_speedups',
|
|
||||||
'simplejson.tests.test_unicode',
|
|
||||||
'simplejson.tests.test_decimal',
|
|
||||||
])
|
|
||||||
suite = additional_tests(suite)
|
|
||||||
return OptionalExtensionTestSuite([suite])
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
runner = unittest.TextTestRunner()
|
|
||||||
suite = all_tests_suite()
|
|
||||||
runner.run(suite)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
|
||||||
main()
|
|
|
@ -1,30 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
def default_iterable(obj):
|
|
||||||
return list(obj)
|
|
||||||
|
|
||||||
class TestCheckCircular(TestCase):
|
|
||||||
def test_circular_dict(self):
|
|
||||||
dct = {}
|
|
||||||
dct['a'] = dct
|
|
||||||
self.assertRaises(ValueError, json.dumps, dct)
|
|
||||||
|
|
||||||
def test_circular_list(self):
|
|
||||||
lst = []
|
|
||||||
lst.append(lst)
|
|
||||||
self.assertRaises(ValueError, json.dumps, lst)
|
|
||||||
|
|
||||||
def test_circular_composite(self):
|
|
||||||
dct2 = {}
|
|
||||||
dct2['a'] = []
|
|
||||||
dct2['a'].append(dct2)
|
|
||||||
self.assertRaises(ValueError, json.dumps, dct2)
|
|
||||||
|
|
||||||
def test_circular_default(self):
|
|
||||||
json.dumps([set()], default=default_iterable)
|
|
||||||
self.assertRaises(TypeError, json.dumps, [set()])
|
|
||||||
|
|
||||||
def test_circular_off_default(self):
|
|
||||||
json.dumps([set()], default=default_iterable, check_circular=False)
|
|
||||||
self.assertRaises(TypeError, json.dumps, [set()], check_circular=False)
|
|
|
@ -1,33 +0,0 @@
|
||||||
from decimal import Decimal
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
class TestDecimal(TestCase):
|
|
||||||
NUMS = "1.0", "10.00", "1.1", "1234567890.1234567890", "500"
|
|
||||||
def test_decimal_encode(self):
|
|
||||||
for d in map(Decimal, self.NUMS):
|
|
||||||
self.assertEquals(json.dumps(d, use_decimal=True), str(d))
|
|
||||||
|
|
||||||
def test_decimal_decode(self):
|
|
||||||
for s in self.NUMS:
|
|
||||||
self.assertEquals(json.loads(s, parse_float=Decimal), Decimal(s))
|
|
||||||
|
|
||||||
def test_decimal_roundtrip(self):
|
|
||||||
for d in map(Decimal, self.NUMS):
|
|
||||||
# The type might not be the same (int and Decimal) but they
|
|
||||||
# should still compare equal.
|
|
||||||
self.assertEquals(
|
|
||||||
json.loads(
|
|
||||||
json.dumps(d, use_decimal=True), parse_float=Decimal),
|
|
||||||
d)
|
|
||||||
self.assertEquals(
|
|
||||||
json.loads(
|
|
||||||
json.dumps([d], use_decimal=True), parse_float=Decimal),
|
|
||||||
[d])
|
|
||||||
|
|
||||||
def test_decimal_defaults(self):
|
|
||||||
d = Decimal(1)
|
|
||||||
# use_decimal=False is the default
|
|
||||||
self.assertRaises(TypeError, json.dumps, d, use_decimal=False)
|
|
||||||
self.assertRaises(TypeError, json.dumps, d)
|
|
|
@ -1,73 +0,0 @@
|
||||||
import decimal
|
|
||||||
from unittest import TestCase
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
from simplejson import OrderedDict
|
|
||||||
|
|
||||||
class TestDecode(TestCase):
|
|
||||||
if not hasattr(TestCase, 'assertIs'):
|
|
||||||
def assertIs(self, a, b):
|
|
||||||
self.assertTrue(a is b, '%r is %r' % (a, b))
|
|
||||||
|
|
||||||
def test_decimal(self):
|
|
||||||
rval = json.loads('1.1', parse_float=decimal.Decimal)
|
|
||||||
self.assertTrue(isinstance(rval, decimal.Decimal))
|
|
||||||
self.assertEquals(rval, decimal.Decimal('1.1'))
|
|
||||||
|
|
||||||
def test_float(self):
|
|
||||||
rval = json.loads('1', parse_int=float)
|
|
||||||
self.assertTrue(isinstance(rval, float))
|
|
||||||
self.assertEquals(rval, 1.0)
|
|
||||||
|
|
||||||
def test_decoder_optimizations(self):
|
|
||||||
# Several optimizations were made that skip over calls to
|
|
||||||
# the whitespace regex, so this test is designed to try and
|
|
||||||
# exercise the uncommon cases. The array cases are already covered.
|
|
||||||
rval = json.loads('{ "key" : "value" , "k":"v" }')
|
|
||||||
self.assertEquals(rval, {"key":"value", "k":"v"})
|
|
||||||
|
|
||||||
def test_empty_objects(self):
|
|
||||||
s = '{}'
|
|
||||||
self.assertEqual(json.loads(s), eval(s))
|
|
||||||
s = '[]'
|
|
||||||
self.assertEqual(json.loads(s), eval(s))
|
|
||||||
s = '""'
|
|
||||||
self.assertEqual(json.loads(s), eval(s))
|
|
||||||
|
|
||||||
def test_object_pairs_hook(self):
|
|
||||||
s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
|
|
||||||
p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4),
|
|
||||||
("qrt", 5), ("pad", 6), ("hoy", 7)]
|
|
||||||
self.assertEqual(json.loads(s), eval(s))
|
|
||||||
self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
|
|
||||||
self.assertEqual(json.load(StringIO(s),
|
|
||||||
object_pairs_hook=lambda x: x), p)
|
|
||||||
od = json.loads(s, object_pairs_hook=OrderedDict)
|
|
||||||
self.assertEqual(od, OrderedDict(p))
|
|
||||||
self.assertEqual(type(od), OrderedDict)
|
|
||||||
# the object_pairs_hook takes priority over the object_hook
|
|
||||||
self.assertEqual(json.loads(s,
|
|
||||||
object_pairs_hook=OrderedDict,
|
|
||||||
object_hook=lambda x: None),
|
|
||||||
OrderedDict(p))
|
|
||||||
|
|
||||||
def check_keys_reuse(self, source, loads):
|
|
||||||
rval = loads(source)
|
|
||||||
(a, b), (c, d) = sorted(rval[0]), sorted(rval[1])
|
|
||||||
self.assertIs(a, c)
|
|
||||||
self.assertIs(b, d)
|
|
||||||
|
|
||||||
def test_keys_reuse_str(self):
|
|
||||||
s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'.encode('utf8')
|
|
||||||
self.check_keys_reuse(s, json.loads)
|
|
||||||
|
|
||||||
def test_keys_reuse_unicode(self):
|
|
||||||
s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'
|
|
||||||
self.check_keys_reuse(s, json.loads)
|
|
||||||
|
|
||||||
def test_empty_strings(self):
|
|
||||||
self.assertEqual(json.loads('""'), "")
|
|
||||||
self.assertEqual(json.loads(u'""'), u"")
|
|
||||||
self.assertEqual(json.loads('[""]'), [""])
|
|
||||||
self.assertEqual(json.loads(u'[""]'), [u""])
|
|
|
@ -1,9 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
class TestDefault(TestCase):
|
|
||||||
def test_default(self):
|
|
||||||
self.assertEquals(
|
|
||||||
json.dumps(type, default=repr),
|
|
||||||
json.dumps(repr(type)))
|
|
|
@ -1,27 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
class TestDump(TestCase):
|
|
||||||
def test_dump(self):
|
|
||||||
sio = StringIO()
|
|
||||||
json.dump({}, sio)
|
|
||||||
self.assertEquals(sio.getvalue(), '{}')
|
|
||||||
|
|
||||||
def test_dumps(self):
|
|
||||||
self.assertEquals(json.dumps({}), '{}')
|
|
||||||
|
|
||||||
def test_encode_truefalse(self):
|
|
||||||
self.assertEquals(json.dumps(
|
|
||||||
{True: False, False: True}, sort_keys=True),
|
|
||||||
'{"false": true, "true": false}')
|
|
||||||
self.assertEquals(json.dumps(
|
|
||||||
{2: 3.0, 4.0: 5L, False: 1, 6L: True, "7": 0}, sort_keys=True),
|
|
||||||
'{"false": 1, "2": 3.0, "4.0": 5, "6": true, "7": 0}')
|
|
||||||
|
|
||||||
def test_ordered_dict(self):
|
|
||||||
# http://bugs.python.org/issue6105
|
|
||||||
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
|
|
||||||
s = json.dumps(json.OrderedDict(items))
|
|
||||||
self.assertEqual(s, '{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
|
|
|
@ -1,41 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson.encoder
|
|
||||||
|
|
||||||
CASES = [
|
|
||||||
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
|
|
||||||
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
|
||||||
(u'controls', '"controls"'),
|
|
||||||
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
|
||||||
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
|
|
||||||
(u' s p a c e d ', '" s p a c e d "'),
|
|
||||||
(u'\U0001d120', '"\\ud834\\udd20"'),
|
|
||||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
|
||||||
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
|
|
||||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
|
||||||
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
|
|
||||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
|
||||||
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
|
||||||
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
|
|
||||||
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
|
||||||
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
|
||||||
]
|
|
||||||
|
|
||||||
class TestEncodeBaseStringAscii(TestCase):
|
|
||||||
def test_py_encode_basestring_ascii(self):
|
|
||||||
self._test_encode_basestring_ascii(simplejson.encoder.py_encode_basestring_ascii)
|
|
||||||
|
|
||||||
def test_c_encode_basestring_ascii(self):
|
|
||||||
if not simplejson.encoder.c_encode_basestring_ascii:
|
|
||||||
return
|
|
||||||
self._test_encode_basestring_ascii(simplejson.encoder.c_encode_basestring_ascii)
|
|
||||||
|
|
||||||
def _test_encode_basestring_ascii(self, encode_basestring_ascii):
|
|
||||||
fname = encode_basestring_ascii.__name__
|
|
||||||
for input_string, expect in CASES:
|
|
||||||
result = encode_basestring_ascii(input_string)
|
|
||||||
#self.assertEquals(result, expect,
|
|
||||||
# '{0!r} != {1!r} for {2}({3!r})'.format(
|
|
||||||
# result, expect, fname, input_string))
|
|
||||||
self.assertEquals(result, expect,
|
|
||||||
'%r != %r for %s(%r)' % (result, expect, fname, input_string))
|
|
|
@ -1,32 +0,0 @@
|
||||||
import unittest
|
|
||||||
|
|
||||||
import simplejson.decoder
|
|
||||||
import simplejson.encoder
|
|
||||||
|
|
||||||
|
|
||||||
class TestEncodeForHTML(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.decoder = simplejson.decoder.JSONDecoder()
|
|
||||||
self.encoder = simplejson.encoder.JSONEncoderForHTML()
|
|
||||||
|
|
||||||
def test_basic_encode(self):
|
|
||||||
self.assertEqual(r'"\u0026"', self.encoder.encode('&'))
|
|
||||||
self.assertEqual(r'"\u003c"', self.encoder.encode('<'))
|
|
||||||
self.assertEqual(r'"\u003e"', self.encoder.encode('>'))
|
|
||||||
|
|
||||||
def test_basic_roundtrip(self):
|
|
||||||
for char in '&<>':
|
|
||||||
self.assertEqual(
|
|
||||||
char, self.decoder.decode(
|
|
||||||
self.encoder.encode(char)))
|
|
||||||
|
|
||||||
def test_prevent_script_breakout(self):
|
|
||||||
bad_string = '</script><script>alert("gotcha")</script>'
|
|
||||||
self.assertEqual(
|
|
||||||
r'"\u003c/script\u003e\u003cscript\u003e'
|
|
||||||
r'alert(\"gotcha\")\u003c/script\u003e"',
|
|
||||||
self.encoder.encode(bad_string))
|
|
||||||
self.assertEqual(
|
|
||||||
bad_string, self.decoder.decode(
|
|
||||||
self.encoder.encode(bad_string)))
|
|
|
@ -1,91 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
# Fri Dec 30 18:57:26 2005
|
|
||||||
JSONDOCS = [
|
|
||||||
# http://json.org/JSON_checker/test/fail1.json
|
|
||||||
'"A JSON payload should be an object or array, not a string."',
|
|
||||||
# http://json.org/JSON_checker/test/fail2.json
|
|
||||||
'["Unclosed array"',
|
|
||||||
# http://json.org/JSON_checker/test/fail3.json
|
|
||||||
'{unquoted_key: "keys must be quoted}',
|
|
||||||
# http://json.org/JSON_checker/test/fail4.json
|
|
||||||
'["extra comma",]',
|
|
||||||
# http://json.org/JSON_checker/test/fail5.json
|
|
||||||
'["double extra comma",,]',
|
|
||||||
# http://json.org/JSON_checker/test/fail6.json
|
|
||||||
'[ , "<-- missing value"]',
|
|
||||||
# http://json.org/JSON_checker/test/fail7.json
|
|
||||||
'["Comma after the close"],',
|
|
||||||
# http://json.org/JSON_checker/test/fail8.json
|
|
||||||
'["Extra close"]]',
|
|
||||||
# http://json.org/JSON_checker/test/fail9.json
|
|
||||||
'{"Extra comma": true,}',
|
|
||||||
# http://json.org/JSON_checker/test/fail10.json
|
|
||||||
'{"Extra value after close": true} "misplaced quoted value"',
|
|
||||||
# http://json.org/JSON_checker/test/fail11.json
|
|
||||||
'{"Illegal expression": 1 + 2}',
|
|
||||||
# http://json.org/JSON_checker/test/fail12.json
|
|
||||||
'{"Illegal invocation": alert()}',
|
|
||||||
# http://json.org/JSON_checker/test/fail13.json
|
|
||||||
'{"Numbers cannot have leading zeroes": 013}',
|
|
||||||
# http://json.org/JSON_checker/test/fail14.json
|
|
||||||
'{"Numbers cannot be hex": 0x14}',
|
|
||||||
# http://json.org/JSON_checker/test/fail15.json
|
|
||||||
'["Illegal backslash escape: \\x15"]',
|
|
||||||
# http://json.org/JSON_checker/test/fail16.json
|
|
||||||
'["Illegal backslash escape: \\\'"]',
|
|
||||||
# http://json.org/JSON_checker/test/fail17.json
|
|
||||||
'["Illegal backslash escape: \\017"]',
|
|
||||||
# http://json.org/JSON_checker/test/fail18.json
|
|
||||||
'[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
|
|
||||||
# http://json.org/JSON_checker/test/fail19.json
|
|
||||||
'{"Missing colon" null}',
|
|
||||||
# http://json.org/JSON_checker/test/fail20.json
|
|
||||||
'{"Double colon":: null}',
|
|
||||||
# http://json.org/JSON_checker/test/fail21.json
|
|
||||||
'{"Comma instead of colon", null}',
|
|
||||||
# http://json.org/JSON_checker/test/fail22.json
|
|
||||||
'["Colon instead of comma": false]',
|
|
||||||
# http://json.org/JSON_checker/test/fail23.json
|
|
||||||
'["Bad value", truth]',
|
|
||||||
# http://json.org/JSON_checker/test/fail24.json
|
|
||||||
"['single quote']",
|
|
||||||
# http://code.google.com/p/simplejson/issues/detail?id=3
|
|
||||||
u'["A\u001FZ control characters in string"]',
|
|
||||||
]
|
|
||||||
|
|
||||||
SKIPS = {
|
|
||||||
1: "why not have a string payload?",
|
|
||||||
18: "spec doesn't specify any nesting limitations",
|
|
||||||
}
|
|
||||||
|
|
||||||
class TestFail(TestCase):
|
|
||||||
def test_failures(self):
|
|
||||||
for idx, doc in enumerate(JSONDOCS):
|
|
||||||
idx = idx + 1
|
|
||||||
if idx in SKIPS:
|
|
||||||
json.loads(doc)
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
json.loads(doc)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
#self.fail("Expected failure for fail{0}.json: {1!r}".format(idx, doc))
|
|
||||||
self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
|
|
||||||
|
|
||||||
def test_array_decoder_issue46(self):
|
|
||||||
# http://code.google.com/p/simplejson/issues/detail?id=46
|
|
||||||
for doc in [u'[,]', '[,]']:
|
|
||||||
try:
|
|
||||||
json.loads(doc)
|
|
||||||
except json.JSONDecodeError, e:
|
|
||||||
self.assertEquals(e.pos, 1)
|
|
||||||
self.assertEquals(e.lineno, 1)
|
|
||||||
self.assertEquals(e.colno, 1)
|
|
||||||
except Exception, e:
|
|
||||||
self.fail("Unexpected exception raised %r %s" % (e, e))
|
|
||||||
else:
|
|
||||||
self.fail("Unexpected success parsing '[,]'")
|
|
|
@ -1,19 +0,0 @@
|
||||||
import math
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
class TestFloat(TestCase):
|
|
||||||
def test_floats(self):
|
|
||||||
for num in [1617161771.7650001, math.pi, math.pi**100,
|
|
||||||
math.pi**-100, 3.1]:
|
|
||||||
self.assertEquals(float(json.dumps(num)), num)
|
|
||||||
self.assertEquals(json.loads(json.dumps(num)), num)
|
|
||||||
self.assertEquals(json.loads(unicode(json.dumps(num))), num)
|
|
||||||
|
|
||||||
def test_ints(self):
|
|
||||||
for num in [1, 1L, 1<<32, 1<<64]:
|
|
||||||
self.assertEquals(json.dumps(num), str(num))
|
|
||||||
self.assertEquals(int(json.dumps(num)), num)
|
|
||||||
self.assertEquals(json.loads(json.dumps(num)), num)
|
|
||||||
self.assertEquals(json.loads(unicode(json.dumps(num))), num)
|
|
|
@ -1,53 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
class TestIndent(TestCase):
|
|
||||||
def test_indent(self):
|
|
||||||
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh',
|
|
||||||
'i-vhbjkhnth',
|
|
||||||
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
|
|
||||||
|
|
||||||
expect = textwrap.dedent("""\
|
|
||||||
[
|
|
||||||
\t[
|
|
||||||
\t\t"blorpie"
|
|
||||||
\t],
|
|
||||||
\t[
|
|
||||||
\t\t"whoops"
|
|
||||||
\t],
|
|
||||||
\t[],
|
|
||||||
\t"d-shtaeou",
|
|
||||||
\t"d-nthiouh",
|
|
||||||
\t"i-vhbjkhnth",
|
|
||||||
\t{
|
|
||||||
\t\t"nifty": 87
|
|
||||||
\t},
|
|
||||||
\t{
|
|
||||||
\t\t"field": "yes",
|
|
||||||
\t\t"morefield": false
|
|
||||||
\t}
|
|
||||||
]""")
|
|
||||||
|
|
||||||
|
|
||||||
d1 = json.dumps(h)
|
|
||||||
d2 = json.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
|
|
||||||
d3 = json.dumps(h, indent=' ', sort_keys=True, separators=(',', ': '))
|
|
||||||
d4 = json.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
|
|
||||||
|
|
||||||
h1 = json.loads(d1)
|
|
||||||
h2 = json.loads(d2)
|
|
||||||
h3 = json.loads(d3)
|
|
||||||
h4 = json.loads(d4)
|
|
||||||
|
|
||||||
self.assertEquals(h1, h)
|
|
||||||
self.assertEquals(h2, h)
|
|
||||||
self.assertEquals(h3, h)
|
|
||||||
self.assertEquals(h4, h)
|
|
||||||
self.assertEquals(d3, expect.replace('\t', ' '))
|
|
||||||
self.assertEquals(d4, expect.replace('\t', ' '))
|
|
||||||
# NOTE: Python 2.4 textwrap.dedent converts tabs to spaces,
|
|
||||||
# so the following is expected to fail. Python 2.4 is not a
|
|
||||||
# supported platform in simplejson 2.1.0+.
|
|
||||||
self.assertEquals(d2, expect)
|
|
|
@ -1,76 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
# from http://json.org/JSON_checker/test/pass1.json
|
|
||||||
JSON = r'''
|
|
||||||
[
|
|
||||||
"JSON Test Pattern pass1",
|
|
||||||
{"object with 1 member":["array with 1 element"]},
|
|
||||||
{},
|
|
||||||
[],
|
|
||||||
-42,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
null,
|
|
||||||
{
|
|
||||||
"integer": 1234567890,
|
|
||||||
"real": -9876.543210,
|
|
||||||
"e": 0.123456789e-12,
|
|
||||||
"E": 1.234567890E+34,
|
|
||||||
"": 23456789012E666,
|
|
||||||
"zero": 0,
|
|
||||||
"one": 1,
|
|
||||||
"space": " ",
|
|
||||||
"quote": "\"",
|
|
||||||
"backslash": "\\",
|
|
||||||
"controls": "\b\f\n\r\t",
|
|
||||||
"slash": "/ & \/",
|
|
||||||
"alpha": "abcdefghijklmnopqrstuvwyz",
|
|
||||||
"ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ",
|
|
||||||
"digit": "0123456789",
|
|
||||||
"special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?",
|
|
||||||
"hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A",
|
|
||||||
"true": true,
|
|
||||||
"false": false,
|
|
||||||
"null": null,
|
|
||||||
"array":[ ],
|
|
||||||
"object":{ },
|
|
||||||
"address": "50 St. James Street",
|
|
||||||
"url": "http://www.JSON.org/",
|
|
||||||
"comment": "// /* <!-- --",
|
|
||||||
"# -- --> */": " ",
|
|
||||||
" s p a c e d " :[1,2 , 3
|
|
||||||
|
|
||||||
,
|
|
||||||
|
|
||||||
4 , 5 , 6 ,7 ],
|
|
||||||
"compact": [1,2,3,4,5,6,7],
|
|
||||||
"jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}",
|
|
||||||
"quotes": "" \u0022 %22 0x22 034 "",
|
|
||||||
"\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"
|
|
||||||
: "A key can be any string"
|
|
||||||
},
|
|
||||||
0.5 ,98.6
|
|
||||||
,
|
|
||||||
99.44
|
|
||||||
,
|
|
||||||
|
|
||||||
1066
|
|
||||||
|
|
||||||
|
|
||||||
,"rosebud"]
|
|
||||||
'''
|
|
||||||
|
|
||||||
class TestPass1(TestCase):
|
|
||||||
def test_parse(self):
|
|
||||||
# test in/out equivalence and parsing
|
|
||||||
res = json.loads(JSON)
|
|
||||||
out = json.dumps(res)
|
|
||||||
self.assertEquals(res, json.loads(out))
|
|
||||||
try:
|
|
||||||
json.dumps(res, allow_nan=False)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.fail("23456789012E666 should be out of range")
|
|
|
@ -1,14 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
# from http://json.org/JSON_checker/test/pass2.json
|
|
||||||
JSON = r'''
|
|
||||||
[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]
|
|
||||||
'''
|
|
||||||
|
|
||||||
class TestPass2(TestCase):
|
|
||||||
def test_parse(self):
|
|
||||||
# test in/out equivalence and parsing
|
|
||||||
res = json.loads(JSON)
|
|
||||||
out = json.dumps(res)
|
|
||||||
self.assertEquals(res, json.loads(out))
|
|
|
@ -1,20 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
# from http://json.org/JSON_checker/test/pass3.json
|
|
||||||
JSON = r'''
|
|
||||||
{
|
|
||||||
"JSON Test Pattern pass3": {
|
|
||||||
"The outermost value": "must be an object or array.",
|
|
||||||
"In this test": "It is an object."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'''
|
|
||||||
|
|
||||||
class TestPass3(TestCase):
|
|
||||||
def test_parse(self):
|
|
||||||
# test in/out equivalence and parsing
|
|
||||||
res = json.loads(JSON)
|
|
||||||
out = json.dumps(res)
|
|
||||||
self.assertEquals(res, json.loads(out))
|
|
|
@ -1,67 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
class JSONTestObject:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class RecursiveJSONEncoder(json.JSONEncoder):
|
|
||||||
recurse = False
|
|
||||||
def default(self, o):
|
|
||||||
if o is JSONTestObject:
|
|
||||||
if self.recurse:
|
|
||||||
return [JSONTestObject]
|
|
||||||
else:
|
|
||||||
return 'JSONTestObject'
|
|
||||||
return json.JSONEncoder.default(o)
|
|
||||||
|
|
||||||
|
|
||||||
class TestRecursion(TestCase):
|
|
||||||
def test_listrecursion(self):
|
|
||||||
x = []
|
|
||||||
x.append(x)
|
|
||||||
try:
|
|
||||||
json.dumps(x)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.fail("didn't raise ValueError on list recursion")
|
|
||||||
x = []
|
|
||||||
y = [x]
|
|
||||||
x.append(y)
|
|
||||||
try:
|
|
||||||
json.dumps(x)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.fail("didn't raise ValueError on alternating list recursion")
|
|
||||||
y = []
|
|
||||||
x = [y, y]
|
|
||||||
# ensure that the marker is cleared
|
|
||||||
json.dumps(x)
|
|
||||||
|
|
||||||
def test_dictrecursion(self):
|
|
||||||
x = {}
|
|
||||||
x["test"] = x
|
|
||||||
try:
|
|
||||||
json.dumps(x)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.fail("didn't raise ValueError on dict recursion")
|
|
||||||
x = {}
|
|
||||||
y = {"a": x, "b": x}
|
|
||||||
# ensure that the marker is cleared
|
|
||||||
json.dumps(x)
|
|
||||||
|
|
||||||
def test_defaultrecursion(self):
|
|
||||||
enc = RecursiveJSONEncoder()
|
|
||||||
self.assertEquals(enc.encode(JSONTestObject), '"JSONTestObject"')
|
|
||||||
enc.recurse = True
|
|
||||||
try:
|
|
||||||
enc.encode(JSONTestObject)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.fail("didn't raise ValueError on default recursion")
|
|
|
@ -1,117 +0,0 @@
|
||||||
import sys
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
import simplejson.decoder
|
|
||||||
|
|
||||||
class TestScanString(TestCase):
|
|
||||||
def test_py_scanstring(self):
|
|
||||||
self._test_scanstring(simplejson.decoder.py_scanstring)
|
|
||||||
|
|
||||||
def test_c_scanstring(self):
|
|
||||||
if not simplejson.decoder.c_scanstring:
|
|
||||||
return
|
|
||||||
self._test_scanstring(simplejson.decoder.c_scanstring)
|
|
||||||
|
|
||||||
def _test_scanstring(self, scanstring):
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('"z\\ud834\\udd20x"', 1, None, True),
|
|
||||||
(u'z\U0001d120x', 16))
|
|
||||||
|
|
||||||
if sys.maxunicode == 65535:
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring(u'"z\U0001d120x"', 1, None, True),
|
|
||||||
(u'z\U0001d120x', 6))
|
|
||||||
else:
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring(u'"z\U0001d120x"', 1, None, True),
|
|
||||||
(u'z\U0001d120x', 5))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('"\\u007b"', 1, None, True),
|
|
||||||
(u'{', 8))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True),
|
|
||||||
(u'A JSON payload should be an object or array, not a string.', 60))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('["Unclosed array"', 2, None, True),
|
|
||||||
(u'Unclosed array', 17))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('["extra comma",]', 2, None, True),
|
|
||||||
(u'extra comma', 14))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('["double extra comma",,]', 2, None, True),
|
|
||||||
(u'double extra comma', 21))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('["Comma after the close"],', 2, None, True),
|
|
||||||
(u'Comma after the close', 24))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('["Extra close"]]', 2, None, True),
|
|
||||||
(u'Extra close', 14))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Extra comma": true,}', 2, None, True),
|
|
||||||
(u'Extra comma', 14))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True),
|
|
||||||
(u'Extra value after close', 26))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Illegal expression": 1 + 2}', 2, None, True),
|
|
||||||
(u'Illegal expression', 21))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Illegal invocation": alert()}', 2, None, True),
|
|
||||||
(u'Illegal invocation', 21))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True),
|
|
||||||
(u'Numbers cannot have leading zeroes', 37))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True),
|
|
||||||
(u'Numbers cannot be hex', 24))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True),
|
|
||||||
(u'Too deep', 30))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Missing colon" null}', 2, None, True),
|
|
||||||
(u'Missing colon', 16))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Double colon":: null}', 2, None, True),
|
|
||||||
(u'Double colon', 15))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('{"Comma instead of colon", null}', 2, None, True),
|
|
||||||
(u'Comma instead of colon', 25))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('["Colon instead of comma": false]', 2, None, True),
|
|
||||||
(u'Colon instead of comma', 25))
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
scanstring('["Bad value", truth]', 2, None, True),
|
|
||||||
(u'Bad value', 12))
|
|
||||||
|
|
||||||
def test_issue3623(self):
|
|
||||||
self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1,
|
|
||||||
"xxx")
|
|
||||||
self.assertRaises(UnicodeDecodeError,
|
|
||||||
json.encoder.encode_basestring_ascii, "xx\xff")
|
|
||||||
|
|
||||||
def test_overflow(self):
|
|
||||||
# Python 2.5 does not have maxsize
|
|
||||||
maxsize = getattr(sys, 'maxsize', sys.maxint)
|
|
||||||
self.assertRaises(OverflowError, json.decoder.scanstring, "xxx",
|
|
||||||
maxsize + 1)
|
|
||||||
|
|
|
@ -1,42 +0,0 @@
|
||||||
import textwrap
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
|
|
||||||
class TestSeparators(TestCase):
|
|
||||||
def test_separators(self):
|
|
||||||
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
|
|
||||||
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
|
|
||||||
|
|
||||||
expect = textwrap.dedent("""\
|
|
||||||
[
|
|
||||||
[
|
|
||||||
"blorpie"
|
|
||||||
] ,
|
|
||||||
[
|
|
||||||
"whoops"
|
|
||||||
] ,
|
|
||||||
[] ,
|
|
||||||
"d-shtaeou" ,
|
|
||||||
"d-nthiouh" ,
|
|
||||||
"i-vhbjkhnth" ,
|
|
||||||
{
|
|
||||||
"nifty" : 87
|
|
||||||
} ,
|
|
||||||
{
|
|
||||||
"field" : "yes" ,
|
|
||||||
"morefield" : false
|
|
||||||
}
|
|
||||||
]""")
|
|
||||||
|
|
||||||
|
|
||||||
d1 = json.dumps(h)
|
|
||||||
d2 = json.dumps(h, indent=' ', sort_keys=True, separators=(' ,', ' : '))
|
|
||||||
|
|
||||||
h1 = json.loads(d1)
|
|
||||||
h2 = json.loads(d2)
|
|
||||||
|
|
||||||
self.assertEquals(h1, h)
|
|
||||||
self.assertEquals(h2, h)
|
|
||||||
self.assertEquals(d2, expect)
|
|
|
@ -1,21 +0,0 @@
|
||||||
import decimal
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
from simplejson import decoder, encoder, scanner
|
|
||||||
|
|
||||||
def has_speedups():
|
|
||||||
return encoder.c_make_encoder is not None
|
|
||||||
|
|
||||||
class TestDecode(TestCase):
|
|
||||||
def test_make_scanner(self):
|
|
||||||
if not has_speedups():
|
|
||||||
return
|
|
||||||
self.assertRaises(AttributeError, scanner.c_make_scanner, 1)
|
|
||||||
|
|
||||||
def test_make_encoder(self):
|
|
||||||
if not has_speedups():
|
|
||||||
return
|
|
||||||
self.assertRaises(TypeError, encoder.c_make_encoder,
|
|
||||||
None,
|
|
||||||
"\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75",
|
|
||||||
None)
|
|
|
@ -1,99 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
class TestUnicode(TestCase):
|
|
||||||
def test_encoding1(self):
|
|
||||||
encoder = json.JSONEncoder(encoding='utf-8')
|
|
||||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
|
||||||
s = u.encode('utf-8')
|
|
||||||
ju = encoder.encode(u)
|
|
||||||
js = encoder.encode(s)
|
|
||||||
self.assertEquals(ju, js)
|
|
||||||
|
|
||||||
def test_encoding2(self):
|
|
||||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
|
||||||
s = u.encode('utf-8')
|
|
||||||
ju = json.dumps(u, encoding='utf-8')
|
|
||||||
js = json.dumps(s, encoding='utf-8')
|
|
||||||
self.assertEquals(ju, js)
|
|
||||||
|
|
||||||
def test_encoding3(self):
|
|
||||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
|
||||||
j = json.dumps(u)
|
|
||||||
self.assertEquals(j, '"\\u03b1\\u03a9"')
|
|
||||||
|
|
||||||
def test_encoding4(self):
|
|
||||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
|
||||||
j = json.dumps([u])
|
|
||||||
self.assertEquals(j, '["\\u03b1\\u03a9"]')
|
|
||||||
|
|
||||||
def test_encoding5(self):
|
|
||||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
|
||||||
j = json.dumps(u, ensure_ascii=False)
|
|
||||||
self.assertEquals(j, u'"' + u + u'"')
|
|
||||||
|
|
||||||
def test_encoding6(self):
|
|
||||||
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
|
|
||||||
j = json.dumps([u], ensure_ascii=False)
|
|
||||||
self.assertEquals(j, u'["' + u + u'"]')
|
|
||||||
|
|
||||||
def test_big_unicode_encode(self):
|
|
||||||
u = u'\U0001d120'
|
|
||||||
self.assertEquals(json.dumps(u), '"\\ud834\\udd20"')
|
|
||||||
self.assertEquals(json.dumps(u, ensure_ascii=False), u'"\U0001d120"')
|
|
||||||
|
|
||||||
def test_big_unicode_decode(self):
|
|
||||||
u = u'z\U0001d120x'
|
|
||||||
self.assertEquals(json.loads('"' + u + '"'), u)
|
|
||||||
self.assertEquals(json.loads('"z\\ud834\\udd20x"'), u)
|
|
||||||
|
|
||||||
def test_unicode_decode(self):
|
|
||||||
for i in range(0, 0xd7ff):
|
|
||||||
u = unichr(i)
|
|
||||||
#s = '"\\u{0:04x}"'.format(i)
|
|
||||||
s = '"\\u%04x"' % (i,)
|
|
||||||
self.assertEquals(json.loads(s), u)
|
|
||||||
|
|
||||||
def test_object_pairs_hook_with_unicode(self):
|
|
||||||
s = u'{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
|
|
||||||
p = [(u"xkd", 1), (u"kcw", 2), (u"art", 3), (u"hxm", 4),
|
|
||||||
(u"qrt", 5), (u"pad", 6), (u"hoy", 7)]
|
|
||||||
self.assertEqual(json.loads(s), eval(s))
|
|
||||||
self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
|
|
||||||
od = json.loads(s, object_pairs_hook=json.OrderedDict)
|
|
||||||
self.assertEqual(od, json.OrderedDict(p))
|
|
||||||
self.assertEqual(type(od), json.OrderedDict)
|
|
||||||
# the object_pairs_hook takes priority over the object_hook
|
|
||||||
self.assertEqual(json.loads(s,
|
|
||||||
object_pairs_hook=json.OrderedDict,
|
|
||||||
object_hook=lambda x: None),
|
|
||||||
json.OrderedDict(p))
|
|
||||||
|
|
||||||
|
|
||||||
def test_default_encoding(self):
|
|
||||||
self.assertEquals(json.loads(u'{"a": "\xe9"}'.encode('utf-8')),
|
|
||||||
{'a': u'\xe9'})
|
|
||||||
|
|
||||||
def test_unicode_preservation(self):
|
|
||||||
self.assertEquals(type(json.loads(u'""')), unicode)
|
|
||||||
self.assertEquals(type(json.loads(u'"a"')), unicode)
|
|
||||||
self.assertEquals(type(json.loads(u'["a"]')[0]), unicode)
|
|
||||||
|
|
||||||
def test_ensure_ascii_false_returns_unicode(self):
|
|
||||||
# http://code.google.com/p/simplejson/issues/detail?id=48
|
|
||||||
self.assertEquals(type(json.dumps([], ensure_ascii=False)), unicode)
|
|
||||||
self.assertEquals(type(json.dumps(0, ensure_ascii=False)), unicode)
|
|
||||||
self.assertEquals(type(json.dumps({}, ensure_ascii=False)), unicode)
|
|
||||||
self.assertEquals(type(json.dumps("", ensure_ascii=False)), unicode)
|
|
||||||
|
|
||||||
def test_ensure_ascii_false_bytestring_encoding(self):
|
|
||||||
# http://code.google.com/p/simplejson/issues/detail?id=48
|
|
||||||
doc1 = {u'quux': 'Arr\xc3\xaat sur images'}
|
|
||||||
doc2 = {u'quux': u'Arr\xeat sur images'}
|
|
||||||
doc_ascii = '{"quux": "Arr\\u00eat sur images"}'
|
|
||||||
doc_unicode = u'{"quux": "Arr\xeat sur images"}'
|
|
||||||
self.assertEquals(json.dumps(doc1), doc_ascii)
|
|
||||||
self.assertEquals(json.dumps(doc2), doc_ascii)
|
|
||||||
self.assertEquals(json.dumps(doc1, ensure_ascii=False), doc_unicode)
|
|
||||||
self.assertEquals(json.dumps(doc2, ensure_ascii=False), doc_unicode)
|
|
|
@ -1,39 +0,0 @@
|
||||||
r"""Command-line tool to validate and pretty-print JSON
|
|
||||||
|
|
||||||
Usage::
|
|
||||||
|
|
||||||
$ echo '{"json":"obj"}' | python -m simplejson.tool
|
|
||||||
{
|
|
||||||
"json": "obj"
|
|
||||||
}
|
|
||||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
|
||||||
Expecting property name: line 1 column 2 (char 2)
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if len(sys.argv) == 1:
|
|
||||||
infile = sys.stdin
|
|
||||||
outfile = sys.stdout
|
|
||||||
elif len(sys.argv) == 2:
|
|
||||||
infile = open(sys.argv[1], 'rb')
|
|
||||||
outfile = sys.stdout
|
|
||||||
elif len(sys.argv) == 3:
|
|
||||||
infile = open(sys.argv[1], 'rb')
|
|
||||||
outfile = open(sys.argv[2], 'wb')
|
|
||||||
else:
|
|
||||||
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
|
|
||||||
try:
|
|
||||||
obj = json.load(infile,
|
|
||||||
object_pairs_hook=json.OrderedDict,
|
|
||||||
use_decimal=True)
|
|
||||||
except ValueError, e:
|
|
||||||
raise SystemExit(e)
|
|
||||||
json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True)
|
|
||||||
outfile.write('\n')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -2,6 +2,7 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -10,11 +11,6 @@ import tempfile
|
||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
here = os.path.abspath(os.path.dirname(__file__))
|
here = os.path.abspath(os.path.dirname(__file__))
|
||||||
sys.path.insert(0, here)
|
sys.path.insert(0, here)
|
||||||
|
|
||||||
|
|
|
@ -18,11 +18,7 @@ import glob
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import json
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
results = Results()
|
results = Results()
|
||||||
here = os.path.dirname(os.path.realpath(__file__))
|
here = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
|
@ -10,10 +10,7 @@
|
||||||
# Target: Python 2.5
|
# Target: Python 2.5
|
||||||
#
|
#
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
try:
|
import json
|
||||||
import json
|
|
||||||
except:
|
|
||||||
import simplejson as json
|
|
||||||
import re
|
import re
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
|
@ -11,11 +11,7 @@ import re
|
||||||
import histogram_tools
|
import histogram_tools
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# For compatibility with python 2.6
|
from collections import OrderedDict
|
||||||
try:
|
|
||||||
from collections import OrderedDict
|
|
||||||
except ImportError:
|
|
||||||
from simplejson import OrderedDict
|
|
||||||
|
|
||||||
# Keep this in sync with TelemetryPing.
|
# Keep this in sync with TelemetryPing.
|
||||||
startup_histogram_re = re.compile("SQLITE|HTTP|SPDY|CACHE|DNS")
|
startup_histogram_re = re.compile("SQLITE|HTTP|SPDY|CACHE|DNS")
|
||||||
|
|
|
@ -2,16 +2,10 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
import json
|
||||||
import math
|
import math
|
||||||
|
|
||||||
# For compatibility with Python 2.6
|
from collections import OrderedDict
|
||||||
try:
|
|
||||||
from collections import OrderedDict
|
|
||||||
except ImportError:
|
|
||||||
from simplejson import OrderedDict
|
|
||||||
import simplejson as json
|
|
||||||
else:
|
|
||||||
import json
|
|
||||||
|
|
||||||
def table_dispatch(kind, table, body):
|
def table_dispatch(kind, table, body):
|
||||||
"""Call body with table[kind] if it exists. Raise an error otherwise."""
|
"""Call body with table[kind] if it exists. Raise an error otherwise."""
|
||||||
|
|
Загрузка…
Ссылка в новой задаче