зеркало из https://github.com/mozilla/server-core.git
removed non-core elements
--HG-- rename : syncserver/__init__.py => synccore/__init__.py rename : syncserver/auth/__init__.py => synccore/auth/__init__.py rename : syncserver/auth/dummy.py => synccore/auth/dummy.py rename : syncserver/auth/ldapsql.py => synccore/auth/ldapsql.py rename : syncserver/auth/sql.py => synccore/auth/sql.py rename : syncserver/storage/sqlmappers.py => synccore/auth/sqlmappers.py rename : syncserver/cef.py => synccore/cef.py rename : syncserver/plugin.py => synccore/plugin.py rename : syncserver/respcodes.py => synccore/respcodes.py rename : syncserver/tests/__init__.py => synccore/tests/__init__.py rename : syncserver/tests/support.py => synccore/tests/support.py rename : syncserver/tests/test_cef.py => synccore/tests/test_cef.py rename : syncserver/tests/test_dummyauth.py => synccore/tests/test_dummyauth.py rename : syncserver/tests/test_plugin.py => synccore/tests/test_plugin.py rename : syncserver/tests/test_sqlauth.py => synccore/tests/test_sqlauth.py rename : syncserver/tests/test_util.py => synccore/tests/test_util.py rename : syncserver/tests/test_wbo.py => synccore/tests/test_wbo.py rename : syncserver/util.py => synccore/util.py rename : syncserver/wbo.py => synccore/wbo.py rename : syncserver/websetup.py => synccore/websetup.py rename : syncserver/wsgiapp.py => synccore/wsgiapp.py
This commit is contained in:
Родитель
2e581fe35d
Коммит
59eb99c1ce
|
@ -1,4 +1,2 @@
|
|||
recursive-include syncserver/templates *.mako
|
||||
recursive-include syncserver/static *.*
|
||||
include *.cfg *.ini *.wsgi *.sh
|
||||
|
||||
|
|
21
Makefile
21
Makefile
|
@ -1,7 +1,7 @@
|
|||
VIRTUALENV = virtualenv
|
||||
BIN = bin
|
||||
|
||||
.PHONY: all build check coverage test mysqltest redisqltest doc alltest
|
||||
.PHONY: all build check coverage test mysqltest doc alltest
|
||||
|
||||
all: build test
|
||||
|
||||
|
@ -13,27 +13,24 @@ build:
|
|||
$(BIN)/python setup.py develop
|
||||
|
||||
check:
|
||||
rm -rf syncserver/templates/*.py
|
||||
$(BIN)/flake8 syncserver
|
||||
rm -rf synccore/templates/*.py
|
||||
$(BIN)/flake8 synccore
|
||||
|
||||
coverage:
|
||||
$(BIN)/nosetests -s --cover-html --cover-html-dir=html --with-coverage --cover-package=syncserver syncserver
|
||||
WEAVE_TESTFILE=mysql $(BIN)/nosetests -s --cover-html --cover-html-dir=html --with-coverage --cover-package=syncserver syncserver
|
||||
$(BIN)/nosetests -s --cover-html --cover-html-dir=html --with-coverage --cover-package=synccore synccore
|
||||
WEAVE_TESTFILE=mysql $(BIN)/nosetests -s --cover-html --cover-html-dir=html --with-coverage --cover-package=synccore synccore
|
||||
|
||||
test:
|
||||
$(BIN)/nosetests -s syncserver
|
||||
$(BIN)/nosetests -s synccore
|
||||
|
||||
mysqltest:
|
||||
WEAVE_TESTFILE=mysql $(BIN)/nosetests -s syncserver
|
||||
|
||||
redisqltest:
|
||||
WEAVE_TESTFILE=redisql $(BIN)/nosetests -s syncserver
|
||||
WEAVE_TESTFILE=mysql $(BIN)/nosetests -s synccore
|
||||
|
||||
ldaptest:
|
||||
WEAVE_TESTFILE=ldap $(BIN)/nosetests -s syncserver
|
||||
WEAVE_TESTFILE=ldap $(BIN)/nosetests -s synccore
|
||||
|
||||
|
||||
alltest: test mysqltest redisqltest ldaptest
|
||||
alltest: test mysqltest ldaptest
|
||||
|
||||
doc:
|
||||
$(BIN)/sphinx-build doc/source/ doc/build/
|
||||
|
|
|
@ -14,7 +14,7 @@ threadpool_workers = 60
|
|||
#proc_name = brim
|
||||
|
||||
[app:main]
|
||||
use = egg:SyncServer
|
||||
use = egg:SyncCore
|
||||
|
||||
provides_sync_apis = True
|
||||
provides_user_apis = True
|
||||
|
@ -24,8 +24,8 @@ captcha.public_key = 6Le8OLwSAAAAAK-wkjNPBtHD4Iv50moNFANIalJL
|
|||
captcha.private_key = 6Le8OLwSAAAAAEKoqfc-DmoF4HNswD7RNdGwxRij
|
||||
captcha.use_ssl = False
|
||||
|
||||
#storage = syncserver.storage.redisql.RediSQLStorage
|
||||
storage = syncserver.storage.sql.SQLStorage
|
||||
#storage = synccore.storage.redisql.RediSQLStorage
|
||||
storage = synccore.storage.sql.SQLStorage
|
||||
#storage.sqluri = mysql://sync:sync@localhost/sync
|
||||
storage.sqluri = sqlite:////tmp/test.db
|
||||
#storage.sqluri = postgresql://sync:sync@localhost/sync
|
||||
|
@ -36,7 +36,7 @@ storage.pool_size = 100
|
|||
storage.pool_recycle = 3600
|
||||
storage.reset_on_return = True
|
||||
|
||||
auth = syncserver.auth.sql.SQLAuth
|
||||
auth = synccore.auth.sql.SQLAuth
|
||||
#auth.sqluri = mysql://sync:sync@localhost/sync
|
||||
auth.sqluri = sqlite:////tmp/test.db
|
||||
#auth.sqluri = postgresql://sync:sync@localhost/sync
|
||||
|
@ -58,10 +58,10 @@ cef.product = weave
|
|||
# logging
|
||||
#
|
||||
[loggers]
|
||||
keys = root,syncserver
|
||||
keys = root,synccore
|
||||
|
||||
[handlers]
|
||||
keys = global,syncserver
|
||||
keys = global,synccore
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
@ -70,10 +70,10 @@ keys = generic
|
|||
level = WARNING
|
||||
handlers = global
|
||||
|
||||
[logger_syncserver]
|
||||
[logger_synccore]
|
||||
qualname = weave
|
||||
level = NOTSET
|
||||
handlers = syncserver
|
||||
handlers = synccore
|
||||
propagate = 0
|
||||
|
||||
[handler_global]
|
||||
|
@ -82,7 +82,7 @@ args = (sys.stderr,)
|
|||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[handler_syncserver]
|
||||
[handler_synccore]
|
||||
class = handlers.RotatingFileHandler
|
||||
args = ("sync.log", )
|
||||
level = NOTSET
|
||||
|
|
12
setup.py
12
setup.py
|
@ -42,13 +42,5 @@ install_requires = ['SQLALchemy', 'PasteDeploy', 'WebOb', 'Mako', 'WebTest',
|
|||
extra_requires = {'full': ['MySQL-python', 'redis', 'python-ldap']}
|
||||
|
||||
|
||||
entry_points = """
|
||||
[paste.app_factory]
|
||||
main = syncserver.wsgiapp:make_app
|
||||
|
||||
[paste.app_install]
|
||||
main = paste.script.appinstall:Installer
|
||||
"""
|
||||
|
||||
setup(name='SyncServer', version=0.1, packages=find_packages(),
|
||||
install_requires=install_requires, entry_points=entry_points)
|
||||
setup(name='SyncCore', version=0.1, packages=find_packages(),
|
||||
install_requires=install_requires)
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
[SyncServer]
|
||||
[SyncCore]
|
||||
# need to add non-python deps, and fix the repoze.profile dependency
|
||||
Depends: python-sqlalchemy, python-mysqldb, python-pastedeploy, python-pastescript, python-routes, python-webob, python-webtest, python-mako, python-redis, python-recaptcha, python-simplejson, python-setuptools
|
||||
Depends: python-sqlalchemy, python-mysqldb, python-pastedeploy, python-pastescript, python-routes, python-webob, python-webtest, python-simplejson, python-setuptools
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
""" Authentication tool
|
||||
"""
|
||||
import abc
|
||||
from syncserver.plugin import Plugin
|
||||
from synccore.plugin import Plugin
|
||||
|
||||
|
||||
class WeaveAuth(Plugin):
|
|
@ -50,7 +50,7 @@ from sqlalchemy import Integer, String, DateTime
|
|||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.sql import bindparam, select, insert, delete
|
||||
|
||||
from syncserver.util import generate_reset_code, check_reset_code, ssha
|
||||
from synccore.util import generate_reset_code, check_reset_code, ssha
|
||||
|
||||
_Base = declarative_base()
|
||||
|
|
@ -42,10 +42,10 @@ import datetime
|
|||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.sql import bindparam, select, insert, update, delete
|
||||
|
||||
from syncserver.util import (validate_password, ssha256, check_reset_code,
|
||||
from synccore.util import (validate_password, ssha256, check_reset_code,
|
||||
generate_reset_code)
|
||||
# sharing the same table than the sql storage
|
||||
from syncserver.storage.sqlmappers import users
|
||||
from synccore.auth.sqlmappers import users
|
||||
|
||||
_SQLURI = 'mysql://sync:sync@localhost/sync'
|
||||
|
|
@ -80,7 +80,7 @@ def auth_failure(message, severity, request, **kw):
|
|||
"""
|
||||
# XXX might want to remove the request dependency here
|
||||
# so this module is standalone
|
||||
from syncserver.util import filter_params
|
||||
from synccore.util import filter_params
|
||||
|
||||
signature = _convert(_CEF_AUTH_FAILURE)
|
||||
name = _convert(message)
|
|
@ -37,7 +37,7 @@
|
|||
Base plugin class with registration mechanism and configuration reading.
|
||||
"""
|
||||
import abc
|
||||
from syncserver.util import filter_params
|
||||
from synccore.util import filter_params
|
||||
|
||||
|
||||
def _resolve_name(name):
|
|
@ -37,12 +37,11 @@ from ConfigParser import RawConfigParser
|
|||
import os
|
||||
from logging.config import fileConfig
|
||||
|
||||
from syncserver.storage import WeaveStorage
|
||||
from syncserver.auth import WeaveAuth
|
||||
from syncserver.util import convert_config
|
||||
import syncserver
|
||||
from synccore.auth import WeaveAuth
|
||||
from synccore.util import convert_config
|
||||
import synccore
|
||||
|
||||
_WEAVEDIR = os.path.dirname(syncserver.__file__)
|
||||
_WEAVEDIR = os.path.dirname(synccore.__file__)
|
||||
_TOPDIR = os.path.split(_WEAVEDIR)[0]
|
||||
if 'WEAVE_TESTFILE' in os.environ:
|
||||
_INI_FILE = os.path.join(_TOPDIR, 'tests_%s.ini' % \
|
||||
|
@ -66,6 +65,6 @@ def initenv():
|
|||
|
||||
config = dict(cfg.items('DEFAULT') + cfg.items('app:main'))
|
||||
config = convert_config(config)
|
||||
storage = WeaveStorage.get_from_config(config)
|
||||
auth = WeaveAuth.get_from_config(config)
|
||||
return _TOPDIR, config, storage, auth
|
||||
return _TOPDIR, config, auth
|
||||
|
|
@ -37,7 +37,7 @@ import unittest
|
|||
import os
|
||||
from tempfile import mkstemp
|
||||
|
||||
from syncserver.cef import auth_failure
|
||||
from synccore.cef import auth_failure
|
||||
|
||||
|
||||
class FakeRequest(object):
|
|
@ -35,8 +35,8 @@
|
|||
# ***** END LICENSE BLOCK *****
|
||||
import unittest
|
||||
|
||||
from syncserver.auth.dummy import DummyAuth
|
||||
from syncserver.auth import WeaveAuth
|
||||
from synccore.auth.dummy import DummyAuth
|
||||
from synccore.auth import WeaveAuth
|
||||
|
||||
WeaveAuth.register(DummyAuth)
|
||||
|
|
@ -34,7 +34,7 @@
|
|||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
import unittest
|
||||
from syncserver.plugin import Plugin
|
||||
from synccore.plugin import Plugin
|
||||
|
||||
|
||||
class TestPlugin(unittest.TestCase):
|
|
@ -38,10 +38,10 @@ import datetime
|
|||
|
||||
from sqlalchemy.sql import text
|
||||
|
||||
from syncserver.tests.support import initenv
|
||||
from syncserver.auth.sql import SQLAuth
|
||||
from syncserver.auth import WeaveAuth
|
||||
from syncserver.util import ssha
|
||||
from synccore.tests.support import initenv
|
||||
from synccore.auth.sql import SQLAuth
|
||||
from synccore.auth import WeaveAuth
|
||||
from synccore.util import ssha
|
||||
|
||||
WeaveAuth.register(SQLAuth)
|
||||
|
||||
|
@ -49,7 +49,7 @@ WeaveAuth.register(SQLAuth)
|
|||
class TestSQLAuth(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.appdir, self.config, self.storage, self.auth = initenv()
|
||||
self.appdir, self.config, self.auth = initenv()
|
||||
# we don't support other storages for this test
|
||||
assert self.auth.sqluri.split(':/')[0] in ('mysql', 'sqlite')
|
||||
|
||||
|
@ -70,7 +70,8 @@ class TestSQLAuth(unittest.TestCase):
|
|||
return
|
||||
|
||||
self.assertEquals(self.auth.authenticate_user('tarek', 'xxx'), None)
|
||||
self.assertEquals(self.auth.authenticate_user('tarek', 'tarek'), 1)
|
||||
user_id = self.auth.authenticate_user('tarek', 'tarek')
|
||||
self.assertEquals(user_id, self.user_id)
|
||||
|
||||
def test_reset_code(self):
|
||||
if self.auth.get_name() != 'sql':
|
|
@ -39,7 +39,7 @@ from base64 import encodestring
|
|||
|
||||
from webob.exc import HTTPServiceUnavailable
|
||||
|
||||
from syncserver.util import (authenticate_user, convert_config, bigint2time,
|
||||
from synccore.util import (authenticate_user, convert_config, bigint2time,
|
||||
time2bigint, valid_email, batch, raise_503)
|
||||
|
||||
|
|
@ -35,7 +35,7 @@
|
|||
# ***** END LICENSE BLOCK *****
|
||||
import unittest
|
||||
|
||||
from syncserver.wbo import WBO
|
||||
from synccore.wbo import WBO
|
||||
|
||||
|
||||
class TestWBO(unittest.TestCase):
|
|
@ -57,7 +57,7 @@ from mako.lookup import TemplateLookup
|
|||
from webob.exc import HTTPUnauthorized, HTTPServiceUnavailable
|
||||
from webob import Response
|
||||
|
||||
from syncserver.cef import auth_failure
|
||||
from synccore.cef import auth_failure
|
||||
|
||||
# various authorization header names, depending on the setup
|
||||
_AUTH_HEADERS = ('Authorization', 'AUTHORIZATION', 'HTTP_AUTHORIZATION',
|
|
@ -40,9 +40,9 @@ Function called by :
|
|||
|
||||
Used to initialize the DB and create some data.
|
||||
"""
|
||||
from syncserver import logger
|
||||
from syncserver.auth import WeaveAuth
|
||||
from syncserver.util import read_config
|
||||
from synccore import logger
|
||||
from synccore.auth import WeaveAuth
|
||||
from synccore.util import read_config
|
||||
|
||||
|
||||
def setup_app(command, filename, section):
|
|
@ -48,13 +48,13 @@ from webob.dec import wsgify
|
|||
from webob.exc import HTTPNotFound, HTTPUnauthorized, HTTPBadRequest
|
||||
from webob import Response
|
||||
|
||||
from syncserver import API_VERSION
|
||||
from syncserver.util import authenticate_user, convert_config
|
||||
from syncserver.storage import WeaveStorage
|
||||
from syncserver.auth import WeaveAuth
|
||||
from syncserver.controllers.storage import StorageController
|
||||
from syncserver.controllers.user import UserController
|
||||
from syncserver.controllers.static import StaticController
|
||||
from synccore import API_VERSION
|
||||
from synccore.util import authenticate_user, convert_config
|
||||
from synccore.storage import WeaveStorage
|
||||
from synccore.auth import WeaveAuth
|
||||
from synccore.controllers.storage import StorageController
|
||||
from synccore.controllers.user import UserController
|
||||
from synccore.controllers.static import StaticController
|
||||
|
||||
# URL dispatching happens here
|
||||
# methods / match / controller / controller method / auth ?
|
|
@ -1,69 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Static controller that serve files.
|
||||
|
||||
XXX DO NOT USE IN PRODUCTION -- USE AN APACHE ALIAS INSTEAD
|
||||
|
||||
This controller will fully load files it serves in memory.
|
||||
"""
|
||||
import os
|
||||
from mimetypes import guess_type
|
||||
|
||||
from webob.exc import HTTPNotFound
|
||||
from webob import Response
|
||||
|
||||
_STATIC_DIR = os.path.join(os.path.dirname(__file__), '..', 'static')
|
||||
|
||||
|
||||
class StaticController(object):
|
||||
"""Used to return static files
|
||||
"""
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def get_file(self, request):
|
||||
"""Returns a file located in the static/ directory."""
|
||||
filename = request.sync_info['filename']
|
||||
path = os.path.join(_STATIC_DIR, filename)
|
||||
if not os.path.exists(path):
|
||||
raise HTTPNotFound()
|
||||
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
|
||||
__, content_type = guess_type(filename)
|
||||
return Response(data, content_type=content_type)
|
|
@ -1,351 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Storage controller. Implements all info, user APIs from:
|
||||
|
||||
https://wiki.mozilla.org/Labs/Weave/Sync/1.0/API
|
||||
|
||||
"""
|
||||
import simplejson as json
|
||||
|
||||
from webob.exc import HTTPBadRequest, HTTPNotFound, HTTPPreconditionFailed
|
||||
from syncserver.util import (convert_response, json_response, round_time,
|
||||
batch, raise_503)
|
||||
|
||||
from syncserver.wbo import WBO
|
||||
from syncserver.respcodes import (WEAVE_MALFORMED_JSON, WEAVE_INVALID_WBO,
|
||||
WEAVE_INVALID_WRITE, WEAVE_OVER_QUOTA)
|
||||
|
||||
_WBO_FIELDS = ['id', 'parentid', 'predecessorid', 'sortindex', 'modified',
|
||||
'payload', 'payload_size']
|
||||
|
||||
|
||||
class StorageController(object):
|
||||
|
||||
def __init__(self, app, storage):
|
||||
self.app = app
|
||||
self.storage = raise_503(storage)
|
||||
|
||||
def index(self, request):
|
||||
return "Sync Server"
|
||||
|
||||
def _has_modifiers(self, data):
|
||||
return 'payload' in data
|
||||
|
||||
def _was_modified(self, request, user_id, collection_name):
|
||||
"""Checks the X-If-Unmodified-Since header."""
|
||||
unmodified = request.headers.get('X-If-Unmodified-Since')
|
||||
if unmodified is None:
|
||||
return False
|
||||
|
||||
unmodified = round_time(unmodified)
|
||||
max = self.storage.get_collection_max_timestamp(user_id,
|
||||
collection_name)
|
||||
if max is None:
|
||||
return False
|
||||
return max > unmodified
|
||||
|
||||
def get_storage(self, request):
|
||||
# XXX returns a 400 if the root is called
|
||||
raise HTTPBadRequest()
|
||||
|
||||
def get_collections(self, request, v=None):
|
||||
"""Returns a hash of collections associated with the account,
|
||||
Along with the last modified timestamp for each collection
|
||||
"""
|
||||
# 'v' is the version of the client, given the first time
|
||||
user_id = request.sync_info['user_id']
|
||||
collections = self.storage.get_collection_timestamps(user_id)
|
||||
response = convert_response(request, collections)
|
||||
response.headers['X-Weave-Records'] = str(len(collections))
|
||||
return response
|
||||
|
||||
def get_collection_counts(self, request):
|
||||
"""Returns a hash of collections associated with the account,
|
||||
Along with the total number of items for each collection.
|
||||
"""
|
||||
user_id = request.sync_info['user_id']
|
||||
counts = self.storage.get_collection_counts(user_id)
|
||||
response = convert_response(request, counts)
|
||||
response.headers['X-Weave-Records'] = str(len(counts))
|
||||
return response
|
||||
|
||||
def get_quota(self, request):
|
||||
if not self.storage.use_quota:
|
||||
return json_response((0.0, 0))
|
||||
user_id = request.sync_info['user_id']
|
||||
used = self.storage.get_total_size(user_id)
|
||||
return json_response((used, self.storage.quota_size))
|
||||
|
||||
def get_collection_usage(self, request):
|
||||
user_id = request.sync_info['user_id']
|
||||
|
||||
return json_response(self.storage.get_collection_sizes(user_id))
|
||||
|
||||
# XXX see if we want to use kwargs here instead
|
||||
def get_collection(self, request, ids=None, predecessorid=None,
|
||||
parentid=None, older=None, newer=None, full=False,
|
||||
index_above=None, index_below=None, limit=None,
|
||||
offset=None, sort=None):
|
||||
"""Returns a list of the WBO ids contained in a collection."""
|
||||
# XXX sanity check on arguments (detect incompatible params here, or
|
||||
# unknown values)
|
||||
filters = {}
|
||||
if ids is not None:
|
||||
filters['id'] = 'in', ids.split(',')
|
||||
if predecessorid is not None:
|
||||
filters['predecessorid'] = '=', predecessorid
|
||||
if parentid is not None:
|
||||
filters['parentid'] = '=', parentid
|
||||
if older is not None:
|
||||
filters['modified'] = '<', older
|
||||
if newer is not None:
|
||||
filters['modified'] = '>', newer
|
||||
if index_above is not None:
|
||||
filters['sortindex'] = '>', float(index_above)
|
||||
if index_below is not None:
|
||||
filters['sortindex'] = '<', float(index_below)
|
||||
|
||||
if limit is not None:
|
||||
limit = int(limit)
|
||||
|
||||
if offset is not None:
|
||||
# we need both
|
||||
if limit is None:
|
||||
offset = None
|
||||
else:
|
||||
offset = int(offset)
|
||||
|
||||
collection_name = request.sync_info['collection']
|
||||
user_id = request.sync_info['user_id']
|
||||
if not full:
|
||||
fields = ['id']
|
||||
else:
|
||||
fields = _WBO_FIELDS
|
||||
|
||||
res = self.storage.get_items(user_id, collection_name, fields, filters,
|
||||
limit, offset, sort)
|
||||
if not full:
|
||||
res = [line['id'] for line in res]
|
||||
|
||||
response = convert_response(request, res)
|
||||
response.headers['X-Weave-Records'] = str(len(res))
|
||||
return response
|
||||
|
||||
def get_item(self, request, full=True): # always full
|
||||
"""Returns a single WBO object."""
|
||||
collection_name = request.sync_info['collection']
|
||||
item_id = request.sync_info['item']
|
||||
user_id = request.sync_info['user_id']
|
||||
fields = _WBO_FIELDS
|
||||
res = self.storage.get_item(user_id, collection_name, item_id,
|
||||
fields=fields)
|
||||
if res is None:
|
||||
raise HTTPNotFound()
|
||||
|
||||
return json_response(res)
|
||||
|
||||
def _check_quota(self, request):
|
||||
"""Checks the quota.
|
||||
|
||||
If under the treshold, adds a header
|
||||
If the quota is reached, issues a 400
|
||||
"""
|
||||
user_id = request.sync_info['user_id']
|
||||
left = self.storage.get_size_left(user_id)
|
||||
if left <= 0.: # no space left
|
||||
raise HTTPBadRequest(WEAVE_OVER_QUOTA)
|
||||
return left
|
||||
|
||||
def set_item(self, request):
|
||||
"""Sets a single WBO object."""
|
||||
left = self._check_quota(request)
|
||||
user_id = request.sync_info['user_id']
|
||||
collection_name = request.sync_info['collection']
|
||||
item_id = request.sync_info['item']
|
||||
|
||||
if self._was_modified(request, user_id, collection_name):
|
||||
raise HTTPPreconditionFailed(collection_name)
|
||||
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
except ValueError:
|
||||
raise HTTPBadRequest(WEAVE_MALFORMED_JSON)
|
||||
|
||||
wbo = WBO(data)
|
||||
consistent, msg = wbo.validate()
|
||||
|
||||
if not consistent:
|
||||
raise HTTPBadRequest(msg)
|
||||
|
||||
if self._has_modifiers(wbo):
|
||||
wbo['modified'] = request.server_time
|
||||
|
||||
res = self.storage.set_item(user_id, collection_name, item_id, **wbo)
|
||||
response = json_response(res)
|
||||
if left <= 1024:
|
||||
response.headers['X-Weave-Quota-Remaining'] = str(left)
|
||||
return response
|
||||
|
||||
def delete_item(self, request):
|
||||
"""Deletes a single WBO object."""
|
||||
collection_name = request.sync_info['collection']
|
||||
item_id = request.sync_info['item']
|
||||
user_id = request.sync_info['user_id']
|
||||
if self._was_modified(request, user_id, collection_name):
|
||||
raise HTTPPreconditionFailed(collection_name)
|
||||
self.storage.delete_item(user_id, collection_name, item_id)
|
||||
return json_response(request.server_time)
|
||||
|
||||
def set_collection(self, request):
|
||||
"""Sets a batch of WBO objects into a collection."""
|
||||
user_id = request.sync_info['user_id']
|
||||
collection_name = request.sync_info['collection']
|
||||
|
||||
if self._was_modified(request, user_id, collection_name):
|
||||
raise HTTPPreconditionFailed(collection_name)
|
||||
|
||||
try:
|
||||
wbos = json.loads(request.body)
|
||||
except ValueError:
|
||||
raise HTTPBadRequest(WEAVE_MALFORMED_JSON)
|
||||
|
||||
if not isinstance(wbos, (tuple, list)):
|
||||
# thats a batch of one
|
||||
if 'id' not in wbos:
|
||||
raise HTTPBadRequest(WEAVE_INVALID_WBO)
|
||||
id_ = str(wbos['id'])
|
||||
if '/' in id_:
|
||||
raise HTTPBadRequest(WEAVE_INVALID_WBO)
|
||||
|
||||
request.sync_info['item'] = id_
|
||||
return self.set_item(request)
|
||||
|
||||
res = {'modified': request.server_time, 'success': [], 'failed': {}}
|
||||
|
||||
# sanity chech
|
||||
kept_wbos = []
|
||||
for wbo in wbos:
|
||||
wbo = WBO(wbo)
|
||||
|
||||
if 'id' not in wbo:
|
||||
res['failed'][''] = ['invalid id']
|
||||
continue
|
||||
|
||||
if self._has_modifiers(wbo):
|
||||
wbo['modified'] = request.server_time
|
||||
|
||||
consistent, msg = wbo.validate()
|
||||
item_id = wbo['id']
|
||||
|
||||
if not consistent:
|
||||
res['failed'][item_id] = [msg]
|
||||
else:
|
||||
kept_wbos.append(wbo)
|
||||
|
||||
left = self._check_quota(request)
|
||||
|
||||
for wbos in batch(kept_wbos):
|
||||
wbos = list(wbos) # to avoid exhaustion
|
||||
try:
|
||||
self.storage.set_items(user_id, collection_name, wbos)
|
||||
except Exception, e: # we want to swallow the 503 in that case
|
||||
# something went wrong
|
||||
for wbo in wbos:
|
||||
res['failed'][wbo['id']] = str(e)
|
||||
else:
|
||||
res['success'].extend([wbo['id'] for wbo in wbos])
|
||||
|
||||
response = json_response(res)
|
||||
if left <= 1024:
|
||||
response.headers['X-Weave-Quota-Remaining'] = str(left)
|
||||
return response
|
||||
|
||||
def delete_collection(self, request, ids=None, parentid=None, older=None,
|
||||
newer=None, index_above=None, index_below=None,
|
||||
predecessorid=None, limit=None, offset=None,
|
||||
sort=None):
|
||||
"""Deletes the collection and all contents.
|
||||
|
||||
Additional request parameters may modify the selection of which
|
||||
items to delete.
|
||||
"""
|
||||
# XXX sanity check on arguments (detect incompatible params here, or
|
||||
# unknown values)
|
||||
collection_name = request.sync_info['collection']
|
||||
user_id = request.sync_info['user_id']
|
||||
if self._was_modified(request, user_id, collection_name):
|
||||
raise HTTPPreconditionFailed(collection_name)
|
||||
|
||||
filters = {}
|
||||
if ids is not None:
|
||||
ids = [id_.strip() for id_ in ids.split(',')]
|
||||
if parentid is not None:
|
||||
filters['parentid'] = '=', parentid
|
||||
if predecessorid is not None:
|
||||
filters['predecessorid'] = '=', predecessorid
|
||||
if older is not None:
|
||||
filters['modified'] = '<', float(older)
|
||||
if newer is not None:
|
||||
filters['modified'] = '>', float(newer)
|
||||
if index_above is not None:
|
||||
filters['sortindex'] = '>', float(index_above)
|
||||
if index_below is not None:
|
||||
filters['sortindex'] = '<', float(index_below)
|
||||
if limit is not None:
|
||||
limit = int(limit)
|
||||
if offset is not None:
|
||||
# we need both
|
||||
if limit is None:
|
||||
offset = None
|
||||
else:
|
||||
offset = int(offset)
|
||||
|
||||
res = self.storage.delete_items(user_id, collection_name, ids, filters,
|
||||
limit=limit, offset=offset, sort=sort)
|
||||
return json_response(res)
|
||||
|
||||
def delete_storage(self, request):
|
||||
"""Deletes all records for the user.
|
||||
|
||||
Will return a precondition error unless an X-Confirm-Delete header
|
||||
is included.
|
||||
"""
|
||||
if 'X-Confirm-Delete' not in request.headers:
|
||||
raise HTTPBadRequest(WEAVE_INVALID_WRITE)
|
||||
user_id = request.sync_info['user_id']
|
||||
self.storage.delete_storage(user_id) # XXX failures ?
|
||||
return json_response(True)
|
|
@ -1,252 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
User controller. Implements all APIs from:
|
||||
|
||||
https://wiki.mozilla.org/Labs/Weave/User/1.0/API
|
||||
|
||||
"""
|
||||
import os
|
||||
import simplejson as json
|
||||
|
||||
from webob.exc import (HTTPServiceUnavailable, HTTPBadRequest,
|
||||
HTTPInternalServerError, HTTPNotFound)
|
||||
from recaptcha.client import captcha
|
||||
|
||||
from syncserver.util import (json_response, send_email, valid_email,
|
||||
valid_password, render_mako, raise_503)
|
||||
from syncserver.respcodes import (WEAVE_MISSING_PASSWORD,
|
||||
WEAVE_NO_EMAIL_ADRESS,
|
||||
WEAVE_INVALID_WRITE,
|
||||
WEAVE_MALFORMED_JSON,
|
||||
WEAVE_WEAK_PASSWORD,
|
||||
WEAVE_INVALID_CAPTCHA)
|
||||
|
||||
_TPL_DIR = os.path.join(os.path.dirname(__file__), 'templates')
|
||||
|
||||
|
||||
class UserController(object):
|
||||
|
||||
def __init__(self, app, auth):
|
||||
self.app = app
|
||||
self.auth = raise_503(auth)
|
||||
|
||||
def user_exists(self, request):
|
||||
exists = (self.auth.get_user_id(request.sync_info['username'])
|
||||
is not None)
|
||||
return json_response(int(exists))
|
||||
|
||||
def user_node(self, request):
|
||||
"""Returns the storage node root for the user"""
|
||||
# XXX the PHP Server does not send a json back here
|
||||
# but a plain text expected by the client
|
||||
#
|
||||
# return json_response(request.host_url)
|
||||
return request.host_url + '/'
|
||||
|
||||
def password_reset(self, request):
|
||||
"""Sends an e-mail for a password reset request."""
|
||||
user_id = request.sync_info['user_id']
|
||||
code = self.auth.generate_reset_code(user_id)
|
||||
user_name, user_email = self.auth.get_user_info(user_id)
|
||||
data = {'host': request.host_url, 'user_name': user_name,
|
||||
'code': code}
|
||||
body = render_mako('password_reset_mail.mako', **data)
|
||||
|
||||
sender = request.config['smtp.sender']
|
||||
host = request.config['smtp.host']
|
||||
port = int(request.config['smtp.port'])
|
||||
user = request.config.get('smtp.user')
|
||||
password = request.config.get('smtp.password')
|
||||
|
||||
subject = 'Resetting your Weave password'
|
||||
res, msg = send_email(sender, user_email, subject, body, host, port,
|
||||
user, password)
|
||||
|
||||
if not res:
|
||||
raise HTTPServiceUnavailable(msg)
|
||||
|
||||
return 'success'
|
||||
|
||||
def create_user(self, request):
|
||||
"""Creates a user."""
|
||||
user_name = request.sync_info['username']
|
||||
|
||||
if self.auth.get_user_id(user_name) is not None:
|
||||
raise HTTPBadRequest(WEAVE_INVALID_WRITE)
|
||||
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
except ValueError:
|
||||
raise HTTPBadRequest(WEAVE_MALFORMED_JSON)
|
||||
|
||||
# getting the e-mail
|
||||
email = data.get('email')
|
||||
if not valid_email(email):
|
||||
raise HTTPBadRequest(WEAVE_NO_EMAIL_ADRESS)
|
||||
|
||||
# getting the password
|
||||
password = data.get('password')
|
||||
if password is None:
|
||||
raise HTTPBadRequest(WEAVE_MISSING_PASSWORD)
|
||||
|
||||
if not valid_password(user_name, password):
|
||||
raise HTTPBadRequest(WEAVE_WEAK_PASSWORD)
|
||||
|
||||
# check if captcha info are provided
|
||||
challenge = data.get('captcha-challenge')
|
||||
response = data.get('captcha-response')
|
||||
|
||||
if challenge is not None and response is not None:
|
||||
resp = captcha.submit(challenge, response,
|
||||
self.app.config['captcha.private_key'],
|
||||
remoteip=request.remote_addr)
|
||||
if not resp.is_valid:
|
||||
raise HTTPBadRequest(WEAVE_INVALID_CAPTCHA)
|
||||
else:
|
||||
if self.app.config['use_captcha']:
|
||||
raise HTTPBadRequest(WEAVE_INVALID_CAPTCHA)
|
||||
|
||||
# all looks good, let's create the user
|
||||
# XXX need to do it in routes
|
||||
if not self.auth.create_user(user_name, password, email):
|
||||
raise HTTPInternalServerError('User creation failed.')
|
||||
|
||||
return user_name
|
||||
|
||||
def change_email(self, request):
|
||||
"""Changes the user e-mail"""
|
||||
user_id = request.sync_info['user_id']
|
||||
try:
|
||||
email = json.loads(request.body)
|
||||
except ValueError:
|
||||
raise HTTPBadRequest(WEAVE_MALFORMED_JSON)
|
||||
|
||||
if not valid_email(email):
|
||||
raise HTTPBadRequest(WEAVE_NO_EMAIL_ADRESS)
|
||||
|
||||
if not self.auth.update_email(user_id, email):
|
||||
raise HTTPInternalServerError('User update failed.')
|
||||
|
||||
return email
|
||||
|
||||
def password_reset_form(self, request, **kw):
|
||||
"""Returns a form for resetting the password"""
|
||||
if 'key' in kw:
|
||||
# we have a key, let's display the key controlling form
|
||||
return render_mako('password_reset_form.mako', **kw)
|
||||
elif not request.POST and not request.GET:
|
||||
# asking for the first time
|
||||
return render_mako('password_ask_reset_form.mako')
|
||||
raise HTTPBadRequest()
|
||||
|
||||
def _repost(self, request, error):
|
||||
request.POST['error'] = error
|
||||
return self.password_reset_form(request, **dict(request.POST))
|
||||
|
||||
def do_password_reset(self, request):
|
||||
"""Do a password reset."""
|
||||
user_name = request.POST.get('username')
|
||||
if request.POST.keys() == ['username']:
|
||||
# setting up a password reset
|
||||
user_name = request.POST['username']
|
||||
user_id = self.auth.get_user_id(user_name)
|
||||
request.sync_info['user_id'] = user_id
|
||||
try:
|
||||
self.password_reset(request)
|
||||
except HTTPServiceUnavailable, e:
|
||||
return render_mako('password_failure.mako', error=e.detail)
|
||||
else:
|
||||
return render_mako('password_key_sent.mako')
|
||||
raise HTTPBadRequest()
|
||||
|
||||
# full form, the actual password reset
|
||||
password = request.POST.get('password')
|
||||
confirm = request.POST.get('confirm')
|
||||
key = request.POST.get('key')
|
||||
|
||||
if user_name is None:
|
||||
return self._repost(request,
|
||||
'Username not provided. Please check '
|
||||
'the link you used.')
|
||||
|
||||
user_id = self.auth.get_user_id(user_name)
|
||||
if user_id is None:
|
||||
return self._repost(request, 'We are unable to locate your '
|
||||
'account')
|
||||
|
||||
if password is None:
|
||||
return self._repost(request, 'Password not provided. '
|
||||
'Please check the link you used.')
|
||||
|
||||
if password != confirm:
|
||||
return self._repost(request, 'Password and confirmation do '
|
||||
'not match')
|
||||
|
||||
if not valid_password(user_name, password):
|
||||
return self._repost(request, 'Password should be at least 8 '
|
||||
'characters and not the same as your '
|
||||
'username')
|
||||
|
||||
if not self.auth.verify_reset_code(user_id, key):
|
||||
return self._repost(request, 'Key does not match with username. '
|
||||
'Please request a new key.')
|
||||
|
||||
# everything looks fine
|
||||
if not self.auth.update_password(user_id, password):
|
||||
return self._repost(request, 'Password change failed '
|
||||
'unexpectedly.')
|
||||
|
||||
self.auth.clear_reset_code(user_id)
|
||||
return render_mako('password_changed.mako')
|
||||
|
||||
def delete_user(self, request):
|
||||
"""Deletes the user."""
|
||||
user_id = request.sync_info['user_id']
|
||||
res = self.auth.delete_user(user_id)
|
||||
return json_response(res)
|
||||
|
||||
def _captcha(self):
|
||||
"""Return HTML string for inserting recaptcha into a form."""
|
||||
return captcha.displayhtml(self.app.config['captcha.public_key'],
|
||||
use_ssl=self.app.config['captcha.use_ssl'])
|
||||
|
||||
def captcha_form(self, request):
|
||||
"""Renders the captcha form"""
|
||||
if not self.app.config['use_captcha']:
|
||||
raise HTTPNotFound('No captcha configured')
|
||||
|
||||
return render_mako('captcha.mako', captcha=self._captcha())
|
Двоичные данные
syncserver/static/bg.jpg
Двоичные данные
syncserver/static/bg.jpg
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 49 KiB |
Двоичные данные
syncserver/static/circles.png
Двоичные данные
syncserver/static/circles.png
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 382 B |
|
@ -1,191 +0,0 @@
|
|||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background: url('/media/bg.jpg') repeat-x;
|
||||
background-color: #b5e1ea;
|
||||
font-family: Tahoma, Arial, Helvetica, sans-serif;
|
||||
font-size: 12px;
|
||||
color: #003663;
|
||||
}
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.ctext {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.cimage {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.small {
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
#content {
|
||||
width: 880px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
#content #top {
|
||||
height: 200px;
|
||||
padding-top: 15px;
|
||||
}
|
||||
|
||||
#content #bottom {
|
||||
margin: 0 29px 0 30px;
|
||||
}
|
||||
|
||||
#content #bottom .table_middle {
|
||||
background: url('/media/table-sides.png') repeat-y;
|
||||
min-height: 200px;
|
||||
}
|
||||
|
||||
#content #bottom .table_middle .divider {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
#content #bottom .table_middle .title {
|
||||
padding-left: 15px;
|
||||
padding-bottom: 10px;
|
||||
font-size: 18px;
|
||||
}
|
||||
|
||||
#content #bottom .table_middle .details {
|
||||
padding-left: 30px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
#content #bottom .table_middle .updatelist li {
|
||||
list-style-image: url('/media/circles.png');
|
||||
padding-bottom: 5px;
|
||||
}
|
||||
|
||||
#content #footer {
|
||||
margin-top: 30px;
|
||||
clear: both;
|
||||
text-align: center;
|
||||
color: #547A9B;
|
||||
font-size: 12px;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
#content #footer a {
|
||||
color: #547A9B;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
#content #footer .labs {
|
||||
margin-top: 15px;
|
||||
margin-bottom: 3px;
|
||||
}
|
||||
|
||||
#content #footer .labs img {
|
||||
vertical-align: middle;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
#content #footer .legal {
|
||||
margin-top: 10px;
|
||||
font-size: 10px;
|
||||
}
|
||||
|
||||
#content #footer .legal span {
|
||||
margin-left: 10px;
|
||||
}
|
||||
|
||||
.error {
|
||||
background-color: #fd553b;
|
||||
margin-left: 0px;
|
||||
margin-top: 2px;
|
||||
margin-bottom: 10px;
|
||||
padding: 16px 16px 16px 16px;
|
||||
font-weight: bold;
|
||||
-moz-border-radius: 5px;
|
||||
-khtml-border-radius: 5px;
|
||||
-webkit-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.box {
|
||||
background-color: #9698af;
|
||||
padding: 16px 16px 10px 16px;
|
||||
-moz-border-radius: 5px;
|
||||
-khtml-border-radius: 5px;
|
||||
-webkit-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.box p {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
form p {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
form input {
|
||||
font-size: 20px;
|
||||
}
|
||||
|
||||
form .submit {
|
||||
}
|
||||
|
||||
#user_login, #user_pass, #user_pass2 {
|
||||
font-size: 20px;
|
||||
width: 97%;
|
||||
padding: 3px;
|
||||
margin-right: 6px;
|
||||
}
|
||||
|
||||
.error {
|
||||
background-color: #fd553b;
|
||||
margin-left: 0px;
|
||||
margin-top: 2px;
|
||||
margin-bottom: 10px;
|
||||
padding: 16px 16px 16px 16px;
|
||||
font-weight: bold;
|
||||
-moz-border-radius: 5px;
|
||||
-khtml-border-radius: 5px;
|
||||
-webkit-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.box {
|
||||
background-color: #9698af;
|
||||
padding: 16px 16px 10px 16px;
|
||||
-moz-border-radius: 5px;
|
||||
-khtml-border-radius: 5px;
|
||||
-webkit-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.box p {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
form p {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
form input {
|
||||
font-size: 20px;
|
||||
}
|
||||
|
||||
form .submit {
|
||||
}
|
||||
|
||||
#user_login, #user_pass, #user_pass2 {
|
||||
font-size: 20px;
|
||||
width: 97%;
|
||||
padding: 3px;
|
||||
margin-right: 6px;
|
||||
}
|
Двоичные данные
syncserver/static/table-sides.png
Двоичные данные
syncserver/static/table-sides.png
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 179 B |
Двоичные данные
syncserver/static/table-top.png
Двоичные данные
syncserver/static/table-top.png
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 378 B |
Двоичные данные
syncserver/static/weave-logo.png
Двоичные данные
syncserver/static/weave-logo.png
Двоичный файл не отображается.
До Ширина: | Высота: | Размер: 60 KiB |
|
@ -1,358 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
|
||||
users
|
||||
|
|
||||
-- collections
|
||||
|
|
||||
|
|
||||
---- items
|
||||
|
||||
"""
|
||||
import abc
|
||||
from syncserver.plugin import Plugin
|
||||
|
||||
|
||||
class WeaveStorage(Plugin):
|
||||
"""Abstract Base Class for the storage."""
|
||||
name = 'storage'
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_name(self):
|
||||
"""Returns the name of the plugin.
|
||||
|
||||
Must be a class method.
|
||||
|
||||
Args:
|
||||
None
|
||||
|
||||
Returns:
|
||||
The plugin name
|
||||
"""
|
||||
|
||||
#
|
||||
# Users APIs -- the user id is the email
|
||||
#
|
||||
@abc.abstractmethod
|
||||
def user_exists(self, user_id):
|
||||
"""Returns True if the user exists.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_user(self, user_id, **values):
|
||||
"""Sets information for a user.
|
||||
|
||||
If the user doesn't exists, it will be created.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- values: mapping containing the values.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_user(self, user_id, fields=None):
|
||||
"""Returns user information.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- fields: if provided, its a list of fields to return,
|
||||
all fields are returns by default.
|
||||
|
||||
Returns:
|
||||
A dict containing the values for the user.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_user(self, user_id):
|
||||
"""Remove a user and his data from the storage.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
#
|
||||
# Collections APIs
|
||||
#
|
||||
@abc.abstractmethod
|
||||
def delete_collection(self, user_id, collection_name):
|
||||
"""Deletes a collection.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def collection_exists(self, user_id, collection_name):
|
||||
"""Returns True if the collection exists.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection
|
||||
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_collection(self, user_id, collection_name, **values):
|
||||
"""Creates a new collection.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- values: mapping containing the values.
|
||||
|
||||
Returns:
|
||||
integer identifying the collection in the storage.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_collection(self, user_id, collection_name, fields=None):
|
||||
"""Return information about a collection.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- fields: if provided, its a list of fields to return,
|
||||
all fields are returns by default.
|
||||
|
||||
Returns:
|
||||
A dict containing the information for the collection
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_collections(self, user_id, fields=None):
|
||||
"""Returns the collections information.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- fields: if provided, its a list of fields to return,
|
||||
all fields are returns by default.
|
||||
|
||||
Returns:
|
||||
A list of dict containing the information for the collection
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_collection_names(self, user_id):
|
||||
"""Returns the collection names for a user.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
A list of dict containing the name and id for each collection.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_collection_timestamps(self, user_id):
|
||||
"""Returns the collection timestamps for a user.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
A list of dict containing the name and timestamp for each
|
||||
collection.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_collection_counts(self, user_id):
|
||||
"""Returns the collection counts.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
A list of dict containing the name and count for each
|
||||
collection.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_collection_sizes(self, user_id):
|
||||
"""Returns the total size in KB for each collection of a user storage.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
A dict containing the name and size for each collection.
|
||||
"""
|
||||
|
||||
#
|
||||
# Items APIs
|
||||
#
|
||||
@abc.abstractmethod
|
||||
def item_exists(self, user_id, collection_name, item_id):
|
||||
"""Returns True if an item exists.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- item_id: string identifying the item
|
||||
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_items(self, user_id, collection_name, fields=None):
|
||||
"""returns items from a collection
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- item_id: string identifying the item
|
||||
- fields: if provided, its a list of fields to return,
|
||||
all fields are returns by default.
|
||||
|
||||
Returns:
|
||||
A list of dict containing the information for the items
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_item(self, user_id, collection_name, item_id, fields=None):
|
||||
"""Returns one item.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- item_id: string identifying the item
|
||||
- fields: if provided, its a list of fields to return,
|
||||
all fields are returns by default.
|
||||
|
||||
Returns:
|
||||
A dict containing the information for the item
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_item(self, user_id, collection_name, item_id, **values):
|
||||
"""Sets an item.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- item_id: string identifying the item
|
||||
- values: mapping containing the values.
|
||||
|
||||
Returns:
|
||||
A dict containing the information for the item
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_items(self, user_id, collection_name, items):
|
||||
"""Adds or update a batch of items.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- items: a list of dict
|
||||
|
||||
Returns:
|
||||
Integer: number of inserts/updates
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_item(self, user_id, collection_name, item_id):
|
||||
"""Deletes an item
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- item_id: string identifying the item
|
||||
- values: mapping containing the values.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_items(self, user_id, collection_name, item_ids=None):
|
||||
"""Deletes items. All items are removed unless item_ids is provided.
|
||||
|
||||
Args:
|
||||
- user_id: integer identifying the user in the storage.
|
||||
- collection_name: name of the collection.
|
||||
- item_ids: if provided, its the ids of the items to be removed.
|
||||
all items will be removed if not provided.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_total_size(self, user_id):
|
||||
"""Returns the total size in KB of a user storage.
|
||||
|
||||
The size is the sum of stored payloads.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
The size in Kbytes (float)
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_size_left(self, user_id):
|
||||
"""Returns the remaining size in KB of a user storage.
|
||||
|
||||
The remaining size is calculated by substracting the
|
||||
max size and the used size.
|
||||
|
||||
Args:
|
||||
user_id: integer identifying the user in the storage.
|
||||
|
||||
Returns:
|
||||
The remaining size in Kbytes (float)
|
||||
"""
|
|
@ -1,98 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Multiple backend -- read to a master storage,
|
||||
writes to a collection of slave storages
|
||||
"""
|
||||
from syncserver.storage import WeaveStorage
|
||||
from syncserver.plugin import filter_params
|
||||
|
||||
|
||||
def _prepare_apis(name, bases, attrs):
|
||||
"""Sets the APIs so reads happen on master, and write on all"""
|
||||
read = ('user_exists', 'get_user', 'collection_exists', 'get_collection',
|
||||
'get_collections', 'get_collection_names', 'get_item',
|
||||
'get_collection_timestamps', 'get_collection_counts',
|
||||
'get_collection_max_timestamp', 'item_exists', 'get_items',
|
||||
'get_total_size', 'get_collection_sizes', 'get_size_left')
|
||||
|
||||
write = ('set_user', 'delete_user', 'delete_storage', 'delete_collection',
|
||||
'set_collection', 'set_item', 'set_items', 'delete_items',
|
||||
'delete_item')
|
||||
|
||||
def _write(func):
|
||||
|
||||
def __write(self, *args, **kwargs):
|
||||
res = getattr(self.master, func)(*args, **kwargs)
|
||||
# XXX see if we want to perform it asynced
|
||||
for slave in self.slaves:
|
||||
getattr(slave, func)(*args, **kwargs)
|
||||
return res
|
||||
return __write
|
||||
|
||||
def _read(func):
|
||||
|
||||
def __read(self, *args, **kwargs):
|
||||
return getattr(self.master, func)(*args, **kwargs)
|
||||
return __read
|
||||
|
||||
for meth in read:
|
||||
attrs[meth] = _read(meth)
|
||||
|
||||
for meth in write:
|
||||
attrs[meth] = _write(meth)
|
||||
|
||||
return type(name, bases, attrs)
|
||||
|
||||
|
||||
class WeaveMultiStorage(object):
|
||||
"""Iterate on storages on every call."""
|
||||
__metaclass__ = _prepare_apis
|
||||
|
||||
def __init__(self, master, slaves, **params):
|
||||
__, master_params = filter_params('master', params, splitchar='_')
|
||||
self.master = WeaveStorage.get(master, **master_params)
|
||||
self.slaves = []
|
||||
|
||||
for slave in slaves.split(','):
|
||||
name, type_ = slave.split(':')
|
||||
__, slave_params = filter_params(name, params, splitchar='_')
|
||||
self.slaves.append(WeaveStorage.get(type_, **slave_params))
|
||||
|
||||
@classmethod
|
||||
def get_name(cls):
|
||||
"""Returns the name of the storage"""
|
||||
return 'multi'
|
|
@ -1,337 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Redis + SQL backend
|
||||
"""
|
||||
import json
|
||||
from time import time
|
||||
|
||||
import redis
|
||||
from sqlalchemy.sql import select, bindparam, func
|
||||
|
||||
from syncserver.storage.sql import SQLStorage, STANDARD_COLLECTIONS_NAMES
|
||||
from syncserver import logger
|
||||
from syncserver.storage.sqlmappers import wbo
|
||||
|
||||
_SQLURI = 'mysql://sync:sync@localhost/sync'
|
||||
_KB = float(1024)
|
||||
_COLLECTION_LIST = select([wbo.c.collection, func.max(wbo.c.modified),
|
||||
func.count(wbo)],
|
||||
wbo.c.username == bindparam('user_id')).group_by(wbo.c.collection)
|
||||
|
||||
|
||||
def _key(*args):
|
||||
return ':'.join([str(arg) for arg in args])
|
||||
|
||||
|
||||
class GracefulRedisServer(redis.Redis):
|
||||
"""If the Redis server gets down, we emit log.errors but
|
||||
make sure the app does not break"""
|
||||
|
||||
def get(self, key):
|
||||
try:
|
||||
return super(GracefulRedisServer, self).get(key)
|
||||
except redis.client.ConnectionError, e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
|
||||
def set(self, key, value):
|
||||
try:
|
||||
return super(GracefulRedisServer, self).set(key, value)
|
||||
except redis.client.ConnectionError, e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
|
||||
def smembers(self, key):
|
||||
try:
|
||||
return super(GracefulRedisServer, self).smembers(key)
|
||||
except redis.client.ConnectionError, e:
|
||||
logger.error(str(e))
|
||||
return []
|
||||
|
||||
def sismember(self, key, value):
|
||||
try:
|
||||
return super(GracefulRedisServer, self).sismember(key, value)
|
||||
except redis.client.ConnectionError, e:
|
||||
logger.error(str(e))
|
||||
return False
|
||||
|
||||
def srem(self, key, value):
|
||||
try:
|
||||
super(GracefulRedisServer, self).srem(key, value)
|
||||
except redis.client.ConnectionError, e:
|
||||
logger.error(str(e))
|
||||
|
||||
|
||||
class RediSQLStorage(SQLStorage):
|
||||
"""Uses Redis when possible/useful, SQL otherwise.
|
||||
"""
|
||||
|
||||
def __init__(self, sqluri=_SQLURI, standard_collections=False,
|
||||
use_quota=False, quota_size=0, pool_size=100,
|
||||
pool_recycle=3600, redis_host='localhost',
|
||||
redis_port=6379):
|
||||
super(RediSQLStorage, self).__init__(sqluri, standard_collections,
|
||||
use_quota, quota_size, pool_size,
|
||||
pool_recycle)
|
||||
self._conn = GracefulRedisServer(host=redis_host, port=redis_port)
|
||||
self._conn.ping() # will generate a connection error if down
|
||||
|
||||
@classmethod
|
||||
def get_name(self):
|
||||
return 'redisql'
|
||||
|
||||
def _delete_cache(self, user_id):
|
||||
"""Removes all cache for the given user"""
|
||||
item_ids = self._conn.smembers(_key('tabs', user_id))
|
||||
|
||||
for item_id in item_ids:
|
||||
self._conn.srem(_key('tabs', user_id), item_id)
|
||||
self._conn.set(_key('tabs', user_id, item_id), None)
|
||||
self._conn.set(_key('tabs', 'size', user_id, item_id), None)
|
||||
|
||||
self._conn.set(_key('meta', 'global', user_id), None)
|
||||
|
||||
def delete_storage(self, user_id):
|
||||
self._delete_cache(user_id)
|
||||
super(RediSQLStorage, self).delete_storage(user_id)
|
||||
|
||||
def delete_user(self, user_id):
|
||||
self._delete_cache(user_id)
|
||||
super(RediSQLStorage, self).delete_user(user_id)
|
||||
|
||||
def _is_meta_global(self, collection_name, item_id):
|
||||
return collection_name == 'meta' and item_id == 'global'
|
||||
|
||||
def item_exists(self, user_id, collection_name, item_id):
|
||||
"""Returns a timestamp if an item exists."""
|
||||
if self._is_meta_global(collection_name, item_id):
|
||||
value = self._conn.get(_key('meta', 'global', user_id))
|
||||
if value is not None:
|
||||
wbo = json.loads(value)
|
||||
return wbo['modified']
|
||||
elif collection_name == 'tabs':
|
||||
if self._conn.sismember(_key('tabs', user_id), item_id):
|
||||
return True
|
||||
|
||||
return super(RediSQLStorage, self).item_exists(user_id,
|
||||
collection_name,
|
||||
item_id)
|
||||
|
||||
def get_item(self, user_id, collection_name, item_id, fields=None):
|
||||
"""Returns one item.
|
||||
|
||||
If the item is meta/global, we want to get the cached one if present.
|
||||
"""
|
||||
if self._is_meta_global(collection_name, item_id):
|
||||
value = self._conn.get(_key('meta', 'global', user_id))
|
||||
if value is not None:
|
||||
return json.loads(value)
|
||||
elif collection_name == 'tabs':
|
||||
value = self._conn.get(_key('tabs', user_id, item_id))
|
||||
if value is not None:
|
||||
return json.loads(value)
|
||||
|
||||
return super(RediSQLStorage, self).get_item(user_id, collection_name,
|
||||
item_id, fields)
|
||||
|
||||
def _update_cached_stamp(self, user_id, collection_name, stamp):
|
||||
self._conn.set(_key('collections', 'stamp', user_id, collection_name),
|
||||
stamp)
|
||||
|
||||
def set_item(self, user_id, collection_name, item_id, **values):
|
||||
"""Adds or update an item"""
|
||||
if 'payload' in values and 'modified' not in values:
|
||||
now = time()
|
||||
values['modified'] = now
|
||||
self._update_cached_stamp(user_id, collection_name, now)
|
||||
|
||||
if self._is_meta_global(collection_name, item_id):
|
||||
self._conn.set(_key('meta', 'global', user_id),
|
||||
json.dumps(values))
|
||||
elif collection_name == 'tabs':
|
||||
self._conn.sadd(_key('tabs', user_id), item_id)
|
||||
self._conn.set(_key('tabs', user_id, item_id),
|
||||
json.dumps(values))
|
||||
self._conn.set(_key('tabs', 'size', user_id, item_id),
|
||||
len(values.get('payload', '')))
|
||||
# we don't store tabs in SQL
|
||||
return
|
||||
|
||||
return self._set_item(user_id, collection_name, item_id, **values)
|
||||
|
||||
def set_items(self, user_id, collection_name, items):
|
||||
"""Adds or update a batch of items.
|
||||
|
||||
Returns a list of success or failures.
|
||||
"""
|
||||
now = time()
|
||||
if self._is_meta_global(collection_name, items[0]['id']):
|
||||
values = items[0]
|
||||
values['username'] = user_id
|
||||
self._conn.set(_key('meta', 'global', user_id),
|
||||
json.dumps(values))
|
||||
elif collection_name == 'tabs':
|
||||
for item in items:
|
||||
item_id = item['id']
|
||||
self._conn.sadd(_key('tabs', user_id), item_id)
|
||||
self._conn.set(_key('tabs', user_id, item_id),
|
||||
json.dumps(item))
|
||||
self._conn.set(_key('tabs', 'size', user_id, item_id),
|
||||
len(item.get('payload', '')))
|
||||
# we don't store tabs in SQL
|
||||
self._update_cached_stamp(user_id, 'tabs', now)
|
||||
return
|
||||
|
||||
self._update_cached_stamp(user_id, collection_name, now)
|
||||
return super(RediSQLStorage, self).set_items(user_id, collection_name,
|
||||
items)
|
||||
|
||||
def delete_item(self, user_id, collection_name, item_id):
|
||||
"""Deletes an item"""
|
||||
now = time()
|
||||
if self._is_meta_global(collection_name, item_id):
|
||||
self._conn.set(_key('meta', 'global', user_id), None)
|
||||
elif collection_name == 'tabs':
|
||||
self._conn.srem(_key('tabs', user_id), item_id)
|
||||
self._conn.set(_key('tabs', user_id, item_id), None)
|
||||
self._conn.set(_key('tabs', 'size', user_id, item_id), None)
|
||||
# we don't store tabs in SQL
|
||||
self._update_cached_stamp(user_id, 'tabs', now)
|
||||
return
|
||||
|
||||
self._update_cached_stamp(user_id, collection_name, now)
|
||||
return super(RediSQLStorage, self).delete_item(user_id,
|
||||
collection_name,
|
||||
item_id)
|
||||
|
||||
def delete_items(self, user_id, collection_name, item_ids=None,
|
||||
filters=None, limit=None, offset=None, sort=None):
|
||||
"""Deletes items. All items are removed unless item_ids is provided"""
|
||||
self._update_cached_stamp(user_id, collection_name, time())
|
||||
if (collection_name == 'meta' and (item_ids is None
|
||||
or 'global' in item_ids)):
|
||||
self._conn.set(_key('meta', 'global', user_id), None)
|
||||
elif collection_name == 'tabs':
|
||||
# getting all members
|
||||
if item_ids is None:
|
||||
item_ids = self._conn.smembers(_key('tabs', user_id))
|
||||
|
||||
for item_id in item_ids:
|
||||
self._conn.srem(_key('tabs', user_id), item_id)
|
||||
self._conn.set(_key('tabs', user_id, item_id), None)
|
||||
self._conn.set(_key('tabs', 'size', user_id, item_id), None)
|
||||
|
||||
# we don't store tabs in SQL
|
||||
return
|
||||
|
||||
return super(RediSQLStorage, self).delete_items(user_id,
|
||||
collection_name,
|
||||
item_ids, filters,
|
||||
limit, offset, sort)
|
||||
|
||||
def get_total_size(self, user_id):
|
||||
"""Returns the total size in KB of a user storage"""
|
||||
size = super(RediSQLStorage, self).get_total_size(user_id)
|
||||
|
||||
# add the tabs sizes, if any
|
||||
tabs_size = 0
|
||||
for item_id in self._conn.smembers(_key('tabs', user_id)):
|
||||
tab_size = self._conn.get(_key('tabs', 'size', user_id, item_id))
|
||||
if tab_size is not None:
|
||||
tabs_size += int(tab_size)
|
||||
|
||||
return size + tabs_size / _KB
|
||||
|
||||
def get_collection_sizes(self, user_id):
|
||||
"""Returns the total size in KB for each collection of a user storage.
|
||||
"""
|
||||
sizes = super(RediSQLStorage, self).get_collection_sizes(user_id)
|
||||
tabs_size = 0
|
||||
for item_id in self._conn.smembers(_key('tabs', user_id)):
|
||||
tab_size = self._conn.get(_key('tabs', 'size', user_id, item_id))
|
||||
if tab_size is not None:
|
||||
tabs_size += int(tab_size)
|
||||
|
||||
sizes['tabs'] = tabs_size
|
||||
return sizes
|
||||
|
||||
def get_collection_timestamps(self, user_id):
|
||||
if self.standard_collections:
|
||||
collection_names = STANDARD_COLLECTIONS_NAMES.keys()
|
||||
else:
|
||||
collection_names = [name for id_, name in
|
||||
self.get_collection_names(user_id)]
|
||||
|
||||
if self._conn.get(_key('stamps', user_id)) is None:
|
||||
# not cached yet
|
||||
stamps = super(RediSQLStorage,
|
||||
self).get_collection_timestamps(user_id)
|
||||
|
||||
# we also need to add the tabs timestamp
|
||||
stamps['tabs'] = self._conn.get(_key('collections', 'stamp',
|
||||
user_id, 'tabs'))
|
||||
|
||||
for name, value in stamps.items():
|
||||
self._conn.set(_key('collections', 'stamp',
|
||||
user_id, name), value)
|
||||
|
||||
for colname in collection_names:
|
||||
if colname in stamps:
|
||||
continue
|
||||
self._conn.set(_key('collections', 'stamp',
|
||||
user_id, colname), None)
|
||||
stamps[colname] = None
|
||||
|
||||
# cache marker
|
||||
self._conn.set(_key('stamps', user_id), 1)
|
||||
return stamps
|
||||
|
||||
# got it in cache
|
||||
stamps = {}
|
||||
for name in collection_names:
|
||||
stamp = self._conn.get(_key('collections', 'stamp', user_id,
|
||||
name))
|
||||
if stamp is not None:
|
||||
stamp = float(stamp)
|
||||
stamps[name] = stamp
|
||||
|
||||
return stamps
|
||||
|
||||
def get_collection_max_timestamp(self, user_id, collection_name):
|
||||
# let's get them all, so they get cached
|
||||
stamps = self.get_collection_timestamps(user_id)
|
||||
return stamps[collection_name]
|
|
@ -1,638 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
SQL backend
|
||||
"""
|
||||
from time import time
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.sql import (text, select, bindparam, delete, insert, update,
|
||||
func, and_)
|
||||
|
||||
from syncserver.storage.sqlmappers import (tables, users, collections,
|
||||
wbo, MAX_TTL)
|
||||
from syncserver.util import time2bigint, bigint2time
|
||||
from syncserver.wbo import WBO
|
||||
|
||||
|
||||
_SQLURI = 'mysql://sync:sync@localhost/sync'
|
||||
_STANDARD_COLLECTIONS = {1: 'client', 2: 'crypto', 3: 'forms', 4: 'history',
|
||||
5: 'key', 6: 'meta', 7: 'bookmarks', 8: 'prefs',
|
||||
9: 'tabs', 10: 'passwords'}
|
||||
|
||||
STANDARD_COLLECTIONS_NAMES = dict([(value, key) for key, value in
|
||||
_STANDARD_COLLECTIONS.items()])
|
||||
|
||||
# SQL Queries
|
||||
_USER_N_COLL = and_(collections.c.userid == bindparam('user_id'),
|
||||
collections.c.name == bindparam('collection_name'))
|
||||
|
||||
_USER_EXISTS = select([users.c.id], users.c.id == bindparam('user_id'))
|
||||
_DELETE_USER_COLLECTIONS = delete(collections).where( \
|
||||
collections.c.userid == bindparam('user_id'))
|
||||
_DELETE_USER_COLLECTION = delete(collections).where(_USER_N_COLL)
|
||||
_DELETE_USER_WBOS = delete(wbo, wbo.c.username == bindparam('user_id'))
|
||||
_DELETE_USER = delete(users, users.c.id == bindparam('user_id'))
|
||||
|
||||
_COLLECTION_EXISTS = select([collections.c.collectionid], _USER_N_COLL)
|
||||
|
||||
_COLLECTION_NEXTID = select([func.max(collections.c.collectionid)],
|
||||
collections.c.userid == bindparam('user_id'))
|
||||
|
||||
_COLLECTION_STAMPS = select([wbo.c.collection, func.max(wbo.c.modified)],
|
||||
and_(wbo.c.username == bindparam('user_id'),
|
||||
wbo.c.ttl > bindparam('ttl'))).group_by(wbo.c.collection)
|
||||
|
||||
_COLLECTION_COUNTS = select([wbo.c.collection, func.count(wbo.c.collection)],
|
||||
and_(wbo.c.username == bindparam('user_id'),
|
||||
wbo.c.ttl > bindparam('ttl'))).group_by(wbo.c.collection)
|
||||
|
||||
_COLLECTIONS_MAX_STAMPS = select([func.max(wbo.c.modified)],
|
||||
and_(wbo.c.collection == bindparam('collection_id'),
|
||||
wbo.c.username == bindparam('user_id'),
|
||||
wbo.c.ttl > bindparam('ttl')))
|
||||
|
||||
_ITEM_ID_COL_USER = and_(wbo.c.collection == bindparam('collection_id'),
|
||||
wbo.c.username == bindparam('user_id'),
|
||||
wbo.c.id == bindparam('item_id'),
|
||||
wbo.c.ttl > bindparam('ttl'))
|
||||
|
||||
_ITEM_EXISTS = select([wbo.c.modified], _ITEM_ID_COL_USER)
|
||||
|
||||
_DELETE_ITEMS = delete(wbo,
|
||||
and_(wbo.c.collection == bindparam('collection_id'),
|
||||
wbo.c.username == bindparam('user_id'),
|
||||
wbo.c.ttl > bindparam('ttl')))
|
||||
|
||||
_USER_STORAGE_SIZE = select([func.sum(wbo.c.payload_size)],
|
||||
and_(wbo.c.username == bindparam('user_id'),
|
||||
wbo.c.ttl > bindparam('ttl')))
|
||||
|
||||
_COLLECTIONS_STORAGE_SIZE = select([wbo.c.collection,
|
||||
func.sum(wbo.c.payload_size)],
|
||||
and_(wbo.c.username == bindparam('user_id'),
|
||||
wbo.c.ttl > bindparam('ttl'))).group_by(wbo.c.collection)
|
||||
|
||||
_KB = float(1024)
|
||||
|
||||
|
||||
class SQLStorage(object):
|
||||
|
||||
def __init__(self, sqluri=_SQLURI, standard_collections=False,
|
||||
use_quota=False, quota_size=0, pool_size=100,
|
||||
pool_recycle=3600, reset_on_return=True):
|
||||
self.sqluri = sqluri
|
||||
kw = {'pool_size': int(pool_size),
|
||||
'pool_recycle': int(pool_recycle),
|
||||
'logging_name': 'weaveserver'}
|
||||
|
||||
if self.sqluri.startswith('mysql'):
|
||||
kw['reset_on_return'] = reset_on_return
|
||||
|
||||
self._engine = create_engine(sqluri, **kw)
|
||||
for table in tables:
|
||||
table.metadata.bind = self._engine
|
||||
table.create(checkfirst=True)
|
||||
self._user_collections = {}
|
||||
self.engine_name = self._engine.name
|
||||
self.standard_collections = standard_collections
|
||||
self.use_quota = use_quota
|
||||
self.quota_size = long(quota_size)
|
||||
|
||||
@classmethod
|
||||
def get_name(cls):
|
||||
"""Returns the name of the storage"""
|
||||
return 'sql'
|
||||
|
||||
#
|
||||
# Users APIs
|
||||
#
|
||||
|
||||
def user_exists(self, user_id):
|
||||
"""Returns true if the user exists."""
|
||||
res = self._engine.execute(_USER_EXISTS, user_id=user_id).fetchone()
|
||||
return res is not None
|
||||
|
||||
def set_user(self, user_id, **values):
|
||||
"""set information for a user. values contains the fields to set.
|
||||
|
||||
If the user doesn't exists, it will be created."""
|
||||
values['id'] = user_id
|
||||
if not self.user_exists(user_id):
|
||||
query = insert(users).values(**values)
|
||||
else:
|
||||
query = update(users).where(users.c.id == user_id)
|
||||
query = query.values(**values)
|
||||
|
||||
self._engine.execute(query)
|
||||
|
||||
def get_user(self, user_id, fields=None):
|
||||
"""Returns user information.
|
||||
|
||||
If fields is provided, its a list of fields to return
|
||||
"""
|
||||
if fields is None:
|
||||
fields = [users]
|
||||
else:
|
||||
fields = [getattr(users.c, field) for field in fields]
|
||||
|
||||
query = select(fields, users.c.id == user_id)
|
||||
return self._engine.execute(query).first()
|
||||
|
||||
def delete_user(self, user_id):
|
||||
"""Removes a user (and all its data)"""
|
||||
for query in (_DELETE_USER_COLLECTIONS, _DELETE_USER_WBOS,
|
||||
_DELETE_USER):
|
||||
self._engine.execute(query, user_id=user_id)
|
||||
|
||||
def _get_collection_id(self, user_id, collection_name, create=True):
|
||||
"""Returns a collection id, given the name."""
|
||||
if (self.standard_collections and
|
||||
collection_name in STANDARD_COLLECTIONS_NAMES):
|
||||
return STANDARD_COLLECTIONS_NAMES[collection_name]
|
||||
|
||||
# custom collection
|
||||
data = self.get_collection(user_id, collection_name,
|
||||
['collectionid'])
|
||||
if data is None:
|
||||
# we want to create it
|
||||
if not create:
|
||||
return None
|
||||
return self.set_collection(user_id, collection_name)
|
||||
|
||||
return data['collectionid']
|
||||
|
||||
def delete_storage(self, user_id):
|
||||
"""Removes all user data"""
|
||||
for query in (_DELETE_USER_COLLECTIONS, _DELETE_USER_WBOS):
|
||||
self._engine.execute(query, user_id=user_id)
|
||||
# XXX see if we want to check the rowcount
|
||||
return True
|
||||
|
||||
#
|
||||
# Collections APIs
|
||||
#
|
||||
|
||||
def delete_collection(self, user_id, collection_name):
|
||||
"""deletes a collection"""
|
||||
if not self.collection_exists(user_id, collection_name):
|
||||
return
|
||||
|
||||
# removing items first
|
||||
self.delete_items(user_id, collection_name)
|
||||
|
||||
# then the collection
|
||||
return self._engine.execute(_DELETE_USER_COLLECTION, user_id=user_id,
|
||||
collection_name=collection_name)
|
||||
|
||||
def collection_exists(self, user_id, collection_name):
|
||||
"""Returns True if the collection exists"""
|
||||
res = self._engine.execute(_COLLECTION_EXISTS, user_id=user_id,
|
||||
collection_name=collection_name)
|
||||
res = res.fetchone()
|
||||
return res is not None
|
||||
|
||||
def set_collection(self, user_id, collection_name, **values):
|
||||
"""Creates a collection"""
|
||||
# XXX values is not used for now because there are no values besides
|
||||
# the name
|
||||
if self.collection_exists(user_id, collection_name):
|
||||
return
|
||||
|
||||
values['userid'] = user_id
|
||||
values['name'] = collection_name
|
||||
|
||||
# getting the max collection_id
|
||||
# XXX why don't we have an autoinc here ?
|
||||
# see https://bugzilla.mozilla.org/show_bug.cgi?id=579096
|
||||
max = self._engine.execute(_COLLECTION_NEXTID,
|
||||
user_id=user_id).first()
|
||||
if max[0] is None:
|
||||
next_id = 1
|
||||
else:
|
||||
next_id = max[0] + 1
|
||||
|
||||
# insertion
|
||||
values['collectionid'] = next_id
|
||||
query = insert(collections).values(**values)
|
||||
self._engine.execute(query, **values)
|
||||
return next_id
|
||||
|
||||
def get_collection(self, user_id, collection_name, fields=None):
|
||||
"""Return information about a collection."""
|
||||
if fields is None:
|
||||
fields = [collections]
|
||||
field_names = collections.columns.keys()
|
||||
else:
|
||||
field_names = fields
|
||||
fields = [getattr(collections.c, field) for field in fields]
|
||||
|
||||
query = select(fields, and_(collections.c.userid == user_id,
|
||||
collections.c.name == collection_name))
|
||||
res = self._engine.execute(query).first()
|
||||
|
||||
# the collection is created
|
||||
if res is None:
|
||||
collid = self.set_collection(user_id, collection_name)
|
||||
res = {'userid': user_id, 'collectionid': collid,
|
||||
'name': collection_name}
|
||||
if fields is not None:
|
||||
for key in res.keys():
|
||||
if key not in field_names:
|
||||
del res[key]
|
||||
else:
|
||||
# make this a single step
|
||||
res = dict([(key, value) for key, value in res.items()
|
||||
if value is not None])
|
||||
return res
|
||||
|
||||
def get_collections(self, user_id, fields=None):
|
||||
"""returns the collections information """
|
||||
if fields is None:
|
||||
fields = [collections]
|
||||
else:
|
||||
fields = [getattr(collections.c, field) for field in fields]
|
||||
|
||||
query = select(fields, collections.c.userid == user_id)
|
||||
return self._engine.execute(query).fetchall()
|
||||
|
||||
def get_collection_names(self, user_id):
|
||||
"""return the collection names for a given user"""
|
||||
query = text('select collectionid, name from collections '
|
||||
'where userid = :user_id')
|
||||
return [(res[0], res[1]) for res in
|
||||
self._engine.execute(query, user_id=user_id).fetchall()]
|
||||
|
||||
def get_collection_timestamps(self, user_id):
|
||||
"""return the collection names for a given user"""
|
||||
res = self._engine.execute(_COLLECTION_STAMPS,
|
||||
user_id=user_id, ttl=time())
|
||||
return dict([(self._collid2name(user_id, coll_id), bigint2time(stamp))
|
||||
for coll_id, stamp in res])
|
||||
|
||||
def _collid2name(self, user_id, collection_id):
|
||||
if (self.standard_collections and
|
||||
collection_id in _STANDARD_COLLECTIONS):
|
||||
return _STANDARD_COLLECTIONS[collection_id]
|
||||
|
||||
# custom collections
|
||||
if user_id not in self._user_collections:
|
||||
names = dict(self.get_collection_names(user_id))
|
||||
self._user_collections[user_id] = names
|
||||
|
||||
return self._user_collections[user_id][collection_id]
|
||||
|
||||
def _purge_user_collections(self, user_id):
|
||||
if user_id in self._user_collections:
|
||||
del self._user_collections[user_id]
|
||||
|
||||
def get_collection_counts(self, user_id):
|
||||
"""Return the collection counts for a given user"""
|
||||
res = self._engine.execute(_COLLECTION_COUNTS, user_id=user_id,
|
||||
ttl=time())
|
||||
try:
|
||||
return dict([(self._collid2name(user_id, collid), count)
|
||||
for collid, count in res])
|
||||
finally:
|
||||
self._purge_user_collections(user_id)
|
||||
|
||||
def get_collection_max_timestamp(self, user_id, collection_name):
|
||||
"""Returns the max timestamp of a collection."""
|
||||
collection_id = self._get_collection_id(user_id, collection_name)
|
||||
res = self._engine.execute(_COLLECTIONS_MAX_STAMPS, user_id=user_id,
|
||||
collection_id=collection_id, ttl=time())
|
||||
res = res.fetchone()
|
||||
stamp = res[0]
|
||||
if stamp is None:
|
||||
return None
|
||||
return bigint2time(stamp)
|
||||
|
||||
def get_collection_sizes(self, user_id):
|
||||
"""Returns the total size in KB for each collection of a user storage.
|
||||
|
||||
The size is the sum of stored payloads.
|
||||
"""
|
||||
if not self.use_quota:
|
||||
return dict()
|
||||
res = self._engine.execute(_COLLECTIONS_STORAGE_SIZE, user_id=user_id,
|
||||
ttl=time())
|
||||
return dict([(self._collid2name(user_id, col[0]), int(col[1]) / _KB)
|
||||
for col in res])
|
||||
|
||||
#
|
||||
# Items APIs
|
||||
#
|
||||
def item_exists(self, user_id, collection_name, item_id):
|
||||
"""Returns a timestamp if an item exists."""
|
||||
collection_id = self._get_collection_id(user_id, collection_name)
|
||||
res = self._engine.execute(_ITEM_EXISTS, user_id=user_id,
|
||||
item_id=item_id,
|
||||
collection_id=collection_id, ttl=time())
|
||||
res = res.fetchone()
|
||||
if res is None:
|
||||
return None
|
||||
return bigint2time(res[0])
|
||||
|
||||
def get_items(self, user_id, collection_name, fields=None, filters=None,
|
||||
limit=None, offset=None, sort=None):
|
||||
"""returns items from a collection
|
||||
|
||||
"filter" is a dict used to add conditions to the db query.
|
||||
Its keys are the field names on which the condition operates.
|
||||
Its values are the values the field should have.
|
||||
It can be a single value, or a list. For the latter the in()
|
||||
operator is used. For single values, the operator has to be provided.
|
||||
"""
|
||||
collection_id = self._get_collection_id(user_id, collection_name)
|
||||
if fields is None:
|
||||
fields = [wbo]
|
||||
else:
|
||||
fields = [getattr(wbo.c, field) for field in fields]
|
||||
|
||||
# preparing the where statement
|
||||
where = [wbo.c.username == user_id,
|
||||
wbo.c.collection == collection_id]
|
||||
|
||||
if filters is not None:
|
||||
for field, value in filters.items():
|
||||
field = getattr(wbo.c, field)
|
||||
|
||||
operator, value = value
|
||||
if field.name == 'modified':
|
||||
value = time2bigint(value)
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = [str(item) for item in value]
|
||||
where.append(field.in_(value))
|
||||
else:
|
||||
if operator == '=':
|
||||
where.append(field == value)
|
||||
elif operator == '<':
|
||||
where.append(field < value)
|
||||
elif operator == '>':
|
||||
where.append(field > value)
|
||||
|
||||
if filters is None or 'ttl' not in filters:
|
||||
where.append(wbo.c.ttl > time())
|
||||
|
||||
where = and_(*where)
|
||||
query = select(fields, where)
|
||||
|
||||
if sort is not None:
|
||||
if sort == 'oldest':
|
||||
query = query.order_by(wbo.c.modified.asc())
|
||||
elif sort == 'newest':
|
||||
query = query.order_by(wbo.c.modified.desc())
|
||||
else:
|
||||
query = query.order_by(wbo.c.sortindex.desc())
|
||||
|
||||
if limit is not None and int(limit) > 0:
|
||||
query = query.limit(int(limit))
|
||||
|
||||
if offset is not None and int(offset) > 0:
|
||||
query = query.offset(int(offset))
|
||||
|
||||
res = self._engine.execute(query)
|
||||
converters = {'modified': bigint2time}
|
||||
return [WBO(line, converters) for line in res]
|
||||
|
||||
def get_item(self, user_id, collection_name, item_id, fields=None):
|
||||
"""returns one item"""
|
||||
collection_id = self._get_collection_id(user_id, collection_name)
|
||||
if fields is None:
|
||||
fields = [wbo]
|
||||
else:
|
||||
fields = [getattr(wbo.c, field) for field in fields]
|
||||
|
||||
query = select(fields, _ITEM_ID_COL_USER)
|
||||
res = self._engine.execute(query, user_id=user_id, item_id=item_id,
|
||||
collection_id=collection_id,
|
||||
ttl=time()).first()
|
||||
if res is None:
|
||||
return None
|
||||
|
||||
return WBO(res, {'modified': bigint2time})
|
||||
|
||||
def _set_item(self, user_id, collection_name, item_id, **values):
|
||||
"""Adds or update an item"""
|
||||
if 'modified' in values:
|
||||
values['modified'] = time2bigint(values['modified'])
|
||||
|
||||
if 'ttl' not in values:
|
||||
values['ttl'] = MAX_TTL
|
||||
else:
|
||||
values['ttl'] += time()
|
||||
|
||||
modified = self.item_exists(user_id, collection_name, item_id)
|
||||
|
||||
if self.use_quota and 'payload' in values:
|
||||
values['payload_size'] = len(values['payload'])
|
||||
|
||||
collection_id = self._get_collection_id(user_id,
|
||||
collection_name)
|
||||
|
||||
if modified is None: # does not exists
|
||||
values['collection'] = collection_id
|
||||
values['id'] = item_id
|
||||
values['username'] = user_id
|
||||
query = insert(wbo).values(**values)
|
||||
else:
|
||||
if 'id' in values:
|
||||
del values['id']
|
||||
key = and_(wbo.c.id == item_id, wbo.c.username == user_id,
|
||||
wbo.c.collection == collection_id)
|
||||
query = update(wbo).where(key).values(**values)
|
||||
|
||||
self._engine.execute(query)
|
||||
|
||||
if 'modified' in values:
|
||||
return bigint2time(values['modified'])
|
||||
|
||||
return modified
|
||||
|
||||
def set_item(self, user_id, collection_name, item_id, **values):
|
||||
"""Adds or update an item"""
|
||||
if 'payload' in values and 'modified' not in values:
|
||||
values['modified'] = time()
|
||||
|
||||
return self._set_item(user_id, collection_name, item_id, **values)
|
||||
|
||||
def set_items(self, user_id, collection_name, items):
|
||||
"""Adds or update a batch of items.
|
||||
|
||||
Returns a list of success or failures.
|
||||
"""
|
||||
if self.engine_name in ('sqlite', 'postgresql'):
|
||||
count = 0
|
||||
for item in items:
|
||||
if 'id' not in item:
|
||||
continue
|
||||
item_id = item['id']
|
||||
self.set_item(user_id, collection_name, item_id, **item)
|
||||
count += 1
|
||||
return count
|
||||
|
||||
# XXX See if SQLAlchemy knows how to do batch inserts
|
||||
# that's quite specific to mysql
|
||||
fields = ('id', 'parentid', 'predecessorid', 'sortindex', 'modified',
|
||||
'payload', 'payload_size', 'ttl')
|
||||
|
||||
query = 'insert into wbo (username, collection, %s) values ' \
|
||||
% ','.join(fields)
|
||||
|
||||
values = {}
|
||||
values['collection'] = self._get_collection_id(user_id,
|
||||
collection_name)
|
||||
values['user_id'] = user_id
|
||||
|
||||
# building the values batch
|
||||
binds = [':%s%%(num)d' % field for field in fields]
|
||||
pattern = '(:user_id,:collection,%s) ' % ','.join(binds)
|
||||
|
||||
lines = []
|
||||
for num, item in enumerate(items):
|
||||
lines.append(pattern % {'num': num})
|
||||
for field in fields:
|
||||
value = item.get(field)
|
||||
if field == 'modified' and value is not None:
|
||||
value = time2bigint(value)
|
||||
values['%s%d' % (field, num)] = value
|
||||
|
||||
if ('payload%d' % num in values and
|
||||
'modified%d' % num not in values):
|
||||
values['modified%d' % num] = time2bigint(time())
|
||||
|
||||
if values.get('ttl%d' % num) is None:
|
||||
values['ttl%d' % num] = 2100000000
|
||||
else:
|
||||
values['ttl%d' % num] += time()
|
||||
|
||||
if self.use_quota and 'payload%d' % num in values:
|
||||
size = len(values['payload%d' % num])
|
||||
values['payload_size%d' % num] = size
|
||||
|
||||
query += ','.join(lines)
|
||||
|
||||
# allowing updates as well
|
||||
query += (' on duplicate key update parentid = values(parentid),'
|
||||
'predecessorid = values(predecessorid),'
|
||||
'sortindex = values(sortindex),'
|
||||
'modified = values(modified), payload = values(payload),'
|
||||
'payload_size = values(payload_size),'
|
||||
'ttl = values(ttl)')
|
||||
|
||||
res = self._engine.execute(text(query), **values)
|
||||
return res.rowcount
|
||||
|
||||
def delete_item(self, user_id, collection_name, item_id):
|
||||
"""Deletes an item"""
|
||||
collection_id = self._get_collection_id(user_id, collection_name)
|
||||
query = text('delete from wbo where username = :user_id and '
|
||||
'collection = :collection_id and id = :item_id')
|
||||
res = self._engine.execute(query, user_id=user_id,
|
||||
collection_id=collection_id, item_id=item_id)
|
||||
return res.rowcount == 1
|
||||
|
||||
def delete_items(self, user_id, collection_name, item_ids=None,
|
||||
filters=None, limit=None, offset=None, sort=None):
|
||||
"""Deletes items. All items are removed unless item_ids is provided"""
|
||||
collection_id = self._get_collection_id(user_id, collection_name)
|
||||
if item_ids is None:
|
||||
query = ('delete from wbo where username = :user_id and '
|
||||
'collection = :collection_id')
|
||||
else:
|
||||
ids = ', '.join(['"%s"' % str(id_) for id_ in item_ids])
|
||||
query = ('delete from wbo where username = :user_id and '
|
||||
'collection = :collection_id and id in (%s)' % ids)
|
||||
|
||||
# preparing filters
|
||||
extra = []
|
||||
extra_values = {}
|
||||
if filters is not None:
|
||||
for field, value in filters.items():
|
||||
operator, value = value
|
||||
if field == 'modified':
|
||||
value = time2bigint(value)
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = [str(item) for item in value]
|
||||
extra.append('%s %s (%s)' % (field, operator,
|
||||
','.join(value)))
|
||||
else:
|
||||
extra.append('%s %s :%s' % (field, operator, field))
|
||||
extra_values[field] = value
|
||||
|
||||
if extra != []:
|
||||
query = '%s and %s' % (query, ' and '.join(extra))
|
||||
|
||||
if sort is not None and self.engine_name != 'sqlite':
|
||||
if sort == 'oldest':
|
||||
query += " order by modified"
|
||||
elif sort == 'newest':
|
||||
query += " order by modified desc"
|
||||
elif sort == 'index':
|
||||
query += " order by sortindex desc"
|
||||
|
||||
if self.engine_name != 'sqlite':
|
||||
if limit is not None and int(limit) > 0:
|
||||
query += ' limit %d' % limit
|
||||
|
||||
if offset is not None and int(offset) > 0:
|
||||
query += ' offset %d' % offset
|
||||
|
||||
# XXX see if we want to send back more details
|
||||
# e.g. by checking the rowcount
|
||||
res = self._engine.execute(text(query), user_id=user_id,
|
||||
collection_id=collection_id, **extra_values)
|
||||
return res.rowcount > 0
|
||||
|
||||
def get_total_size(self, user_id):
|
||||
"""Returns the total size in KB of a user storage.
|
||||
|
||||
The size is the sum of stored payloads.
|
||||
"""
|
||||
if not self.use_quota:
|
||||
return 0.0
|
||||
res = self._engine.execute(_USER_STORAGE_SIZE, user_id=user_id,
|
||||
ttl=time())
|
||||
res = res.fetchone()
|
||||
if res is None or res[0] is None:
|
||||
return 0.0
|
||||
return int(res[0]) / _KB
|
||||
|
||||
def get_size_left(self, user_id):
|
||||
"""Returns the storage left for a user"""
|
||||
return self.quota_size - self.get_total_size(user_id)
|
|
@ -1,34 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="en">
|
||||
<head>
|
||||
<title>Mozilla Labs / Weave / Forgot Password</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<link rel='stylesheet' href='/media/forgot_password.css' type='text/css' media='all' />
|
||||
</head>
|
||||
<body>
|
||||
<div id="content">
|
||||
<div id="top">
|
||||
<img src="/media/weave-logo.png" alt="Weave for Firefox" />
|
||||
</div>
|
||||
<div id="bottom">
|
||||
<div><img src="/media/table-top.png" alt="" /></div>
|
||||
|
||||
<div class="table_middle">
|
||||
<div class="title">Password Reset</div>
|
||||
<div class="details">
|
||||
${self.body()}
|
||||
</div>
|
||||
</div>
|
||||
<div id="footer">
|
||||
<div class="legal">
|
||||
© 2010 Mozilla
|
||||
<br />
|
||||
<span>
|
||||
<a href="http://www.mozilla.com/en-US/about/legal.html">Legal Notices</a> |
|
||||
<a href="http://www.mozilla.com/en-US/privacy-policy.html">Privacy Policy</a>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,15 +0,0 @@
|
|||
<body>
|
||||
<div id="content">
|
||||
%if error:
|
||||
<strong>Wrong answer</strong>
|
||||
%endif
|
||||
|
||||
<script>var RecaptchaOptions = {theme: "clean"};
|
||||
</script>
|
||||
<div style="background-color: system;">
|
||||
<form action="/misc/1.0/captcha_html" method="POST" >
|
||||
${captcha}
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
|
@ -1,18 +0,0 @@
|
|||
<%inherit file="base.mako"/>
|
||||
|
||||
<p>Enter your username here and we'll send you an email with instructions and a key that will let you reset your password.</p>
|
||||
|
||||
<div class="box">
|
||||
<form class="mainForm" name="forgotPass" id="forgotPass"
|
||||
action="/weave-password-reset" method="post">
|
||||
<p>
|
||||
<label>Username:<br />
|
||||
<input type="text" name="username" id="user_login" size="20" /></label>
|
||||
</p>
|
||||
<p class="submit">
|
||||
<input type="submit" id="fpsubmit" value="Request Reset Key" />
|
||||
</p>
|
||||
<p> </p>
|
||||
</form>
|
||||
</div>
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
<%inherit file="base.mako"/>
|
||||
|
||||
Password successfully changed.
|
|
@ -1,9 +0,0 @@
|
|||
<%inherit file="base.mako"/>
|
||||
|
||||
<div>
|
||||
We were unable to process your request. Please try again later.
|
||||
</div>
|
||||
|
||||
<div>
|
||||
Error: ${error}
|
||||
</div>
|
|
@ -1,4 +0,0 @@
|
|||
<%inherit file="base.mako"/>
|
||||
|
||||
We have emailed a reset code to the email address we have on file for you. Please use it within the next 6 hours, or it will become invalid.
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
<%inherit file="base.mako"/>
|
||||
<p>
|
||||
<strong>Note:</strong> Do not set this to be the same as your
|
||||
passphrase! If you are unsure what your passphrase is, you'll need
|
||||
to trigger a server wipe from the Weave add-on.</p>
|
||||
|
||||
%if error:
|
||||
<div class="error">${error}</div>
|
||||
%endif
|
||||
<form class="mainForm" name="changePass" id="changePass"
|
||||
action="/weave-password-reset" method="post">
|
||||
<p>
|
||||
<label>New password:
|
||||
<input type="password" name="password" id="user_pass" size="20"/>
|
||||
</label>
|
||||
</p>
|
||||
<p>
|
||||
<label>Re-enter to confirm:
|
||||
<input type="password" name="confirm"
|
||||
id="user_pass2" size="20"/>
|
||||
</label>
|
||||
</p>
|
||||
<input type="hidden" name="key" value="${key}"/>
|
||||
%if username:
|
||||
<input type="hidden" name="username" value="${username}"/>
|
||||
%endif
|
||||
<input type="submit" id="pchange" name="pchange"
|
||||
value="Change my password"/>
|
||||
</form>
|
||||
</p>
|
|
@ -1,10 +0,0 @@
|
|||
You asked to reset your Weave password. To do so, please click this link:
|
||||
|
||||
${host}/weave-password-reset?username=${user_name}&key=${code}
|
||||
|
||||
|
||||
This will let you change your password to something new. If you didn't ask for this, don't worry, we'll keep your password safe.
|
||||
|
||||
|
||||
Best Wishes
|
||||
The Weave Team
|
|
@ -1,35 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
|
@ -1,35 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
|
@ -1,75 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
""" Base test class, with an instanciated app.
|
||||
"""
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from webtest import TestApp
|
||||
|
||||
from syncserver.tests.support import initenv
|
||||
from syncserver.wsgiapp import make_app
|
||||
|
||||
|
||||
class TestWsgiApp(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# loading the app
|
||||
self.appdir, self.config, self.storage, self.auth = initenv()
|
||||
# we don't support other storages for this test
|
||||
assert self.storage.sqluri.split(':/')[0] in ('mysql', 'sqlite')
|
||||
self.sqlfile = self.storage.sqluri.split('sqlite:///')[-1]
|
||||
self.app = TestApp(make_app(self.config))
|
||||
|
||||
# adding a user if needed
|
||||
self.user_id = self.auth.get_user_id('tarek')
|
||||
if self.user_id is None:
|
||||
self.auth.create_user('tarek', 'tarek', 'tarek@mozilla.con')
|
||||
self.user_id = self.auth.get_user_id('tarek')
|
||||
|
||||
def tearDown(self):
|
||||
self.storage.delete_storage(self.user_id)
|
||||
self.auth.delete_user(self.user_id)
|
||||
cef_logs = os.path.join(self.appdir, 'test_cef.log')
|
||||
if os.path.exists(cef_logs):
|
||||
os.remove(cef_logs)
|
||||
|
||||
if os.path.exists(self.sqlfile):
|
||||
os.remove(self.sqlfile)
|
||||
else:
|
||||
self.auth._engine.execute('truncate users')
|
||||
self.auth._engine.execute('truncate collections')
|
||||
self.auth._engine.execute('truncate wbo')
|
|
@ -1,46 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Basic tests to verify that the dispatching mechanism works.
|
||||
"""
|
||||
from syncserver.tests.functional import support
|
||||
|
||||
|
||||
class TestBasic(support.TestWsgiApp):
|
||||
|
||||
def test_404(self):
|
||||
# make sure an unkown url returns a 404
|
||||
self.app.get('/blabla', status=404)
|
|
@ -1,50 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Basic tests to verify that the dispatching mechanism works.
|
||||
"""
|
||||
from syncserver.tests.functional import support
|
||||
|
||||
|
||||
class TestUser(support.TestWsgiApp):
|
||||
|
||||
def test_file(self):
|
||||
# make sure we can get files
|
||||
self.app.get('/media/nothere', status=404)
|
||||
|
||||
res = self.app.get('/media/forgot_password.css')
|
||||
self.assertEquals(res.headers['Content-Type'],
|
||||
'text/html; charset=UTF-8')
|
|
@ -1,632 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Basic tests to verify that the dispatching mechanism works.
|
||||
"""
|
||||
import base64
|
||||
import json
|
||||
import time
|
||||
import struct
|
||||
|
||||
from syncserver.tests.functional import support
|
||||
|
||||
|
||||
class TestStorage(support.TestWsgiApp):
|
||||
|
||||
def setUp(self):
|
||||
super(TestStorage, self).setUp()
|
||||
# user auth token
|
||||
environ = {'Authorization': 'Basic %s' % \
|
||||
base64.encodestring('tarek:tarek')}
|
||||
self.app.extra_environ = environ
|
||||
|
||||
# let's create some collections for our tests
|
||||
for name in ('client', 'crypto', 'forms', 'history', 'col1', 'col2'):
|
||||
self.storage.set_collection(self.user_id, name)
|
||||
|
||||
for item in range(3):
|
||||
self.storage.set_item(self.user_id, 'col1', str(item),
|
||||
payload='xxx')
|
||||
|
||||
for item in range(5):
|
||||
self.storage.set_item(self.user_id, 'col2', str(item),
|
||||
payload='xxx')
|
||||
|
||||
def test_get_collections(self):
|
||||
|
||||
resp = self.app.get('/1.0/tarek/info/collections')
|
||||
res = json.loads(resp.body)
|
||||
keys = res.keys()
|
||||
self.assertTrue('col1' in keys)
|
||||
self.assertTrue('col1' in keys)
|
||||
self.assertEquals(int(resp.headers['X-Weave-Records']), len(keys))
|
||||
|
||||
# XXX need to test collections timestamps here
|
||||
|
||||
def test_get_collection_count(self):
|
||||
|
||||
resp = self.app.get('/1.0/tarek/info/collection_counts')
|
||||
res = json.loads(resp.body)
|
||||
self.assertEquals(res['col1'], 3)
|
||||
self.assertEquals(res['col2'], 5)
|
||||
self.assertEquals(int(resp.headers['X-Weave-Records']), 2)
|
||||
|
||||
def test_get_collection(self):
|
||||
res = self.app.get('/1.0/tarek/storage/col3')
|
||||
self.assertEquals(json.loads(res.body), [])
|
||||
resp = self.app.get('/1.0/tarek/storage/col2')
|
||||
res = json.loads(resp.body)
|
||||
res.sort()
|
||||
self.assertEquals(res, ['0', '1', '2', '3', '4'])
|
||||
self.assertEquals(int(resp.headers['X-Weave-Records']), 5)
|
||||
|
||||
# trying various filters
|
||||
|
||||
# "ids"
|
||||
# Returns the ids for objects in the collection that are in the
|
||||
# provided comma-separated list.
|
||||
res = self.app.get('/1.0/tarek/storage/col2?ids=1,3')
|
||||
res = json.loads(res.body)
|
||||
res.sort()
|
||||
self.assertEquals(res, ['1', '3'])
|
||||
|
||||
# "predecessorid"
|
||||
# Returns the ids for objects in the collection that
|
||||
# are directly preceded by the id given. Usually only returns one
|
||||
# result.
|
||||
wbo1 = {'id': '125', 'payload': 'XXX', 'predecessorid': 'XXXX'}
|
||||
wbos = json.dumps([wbo1])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
#self.storage.set_item(self.user_id, 'col2', '125',
|
||||
# predecessorid='XXXX')
|
||||
res = self.app.get('/1.0/tarek/storage/col2?predecessorid=XXXX')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['125'])
|
||||
|
||||
# "parentid"
|
||||
# Returns the ids for objects in the collection that are the children
|
||||
# of the parent id given.
|
||||
wbo1 = {'id': '126', 'payload': 'x', 'parentid': 'papa'}
|
||||
wbo2 = {'id': '127', 'payload': 'x', 'parentid': 'papa'}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
#self.storage.set_item(self.user_id, 'col2', '126', parentid='papa',
|
||||
# payload='x')
|
||||
#self.storage.set_item(self.user_id, 'col2', '127', parentid='papa',
|
||||
# payload='x')
|
||||
res = self.app.get('/1.0/tarek/storage/col2?parentid=papa')
|
||||
res = json.loads(res.body)
|
||||
res.sort()
|
||||
self.assertEquals(res, ['126', '127'])
|
||||
|
||||
# "older"
|
||||
# Returns only ids for objects in the collection that have been last
|
||||
# modified before the date given.
|
||||
|
||||
#self.storage.delete_items(self.user_id, 'col2')
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
|
||||
wbo = {'id': '128', 'payload': 'x', 'payload': 'x'}
|
||||
wbo = json.dumps(wbo)
|
||||
res = self.app.put('/1.0/tarek/storage/col2/128', params=wbo)
|
||||
ts = json.loads(res.body)
|
||||
|
||||
#ts = self.storage.set_item(self.user_id, 'col2', '128', payload='x')
|
||||
fts = json.dumps(ts)
|
||||
time.sleep(.3)
|
||||
|
||||
wbo = {'id': '128', 'payload': 'x', 'payload': 'x'}
|
||||
wbo = json.dumps(wbo)
|
||||
res = self.app.put('/1.0/tarek/storage/col2/129', params=wbo)
|
||||
ts2 = json.loads(res.body)
|
||||
|
||||
#ts2 = self.storage.set_item(self.user_id, 'col2', '129', payload='x')
|
||||
fts2 = json.dumps(ts2)
|
||||
|
||||
self.assertTrue(fts < fts2)
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2?older=%s' % ts2)
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['128'])
|
||||
|
||||
# "newer"
|
||||
# Returns only ids for objects in the collection that have been
|
||||
# last modified since the date given.
|
||||
res = self.app.get('/1.0/tarek/storage/col2?newer=%s' % ts)
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['129'])
|
||||
|
||||
# "full"
|
||||
# If defined, returns the full WBO, rather than just the id.
|
||||
res = self.app.get('/1.0/tarek/storage/col2?full=1')
|
||||
res = json.loads(res.body)
|
||||
keys = res[0].keys()
|
||||
keys.sort()
|
||||
wanted = ['id', 'modified', 'payload', 'payload_size']
|
||||
self.assertEquals(keys, wanted)
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
res = json.loads(res.body)
|
||||
self.assertTrue(isinstance(res, list))
|
||||
|
||||
# "index_above"
|
||||
# If defined, only returns items with a higher sortindex than the
|
||||
# value specified.
|
||||
wbo1 = {'id': '130', 'payload': 'x', 'sortindex': 11}
|
||||
wbo2 = {'id': '131', 'payload': 'x', 'sortindex': 9}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2?index_above=10')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['130'])
|
||||
|
||||
# "index_below"
|
||||
# If defined, only returns items with a lower sortindex than the value
|
||||
# specified.
|
||||
res = self.app.get('/1.0/tarek/storage/col2?index_below=10')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['131'])
|
||||
|
||||
# "limit"
|
||||
# Sets the maximum number of ids that will be returned
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
|
||||
wbos = []
|
||||
for i in range(10):
|
||||
wbo = {'id': str(i), 'payload': 'x'}
|
||||
wbos.append(wbo)
|
||||
wbos = json.dumps(wbos)
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2?limit=2')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(len(res), 2)
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
res = json.loads(res.body)
|
||||
self.assertTrue(len(res) > 9)
|
||||
|
||||
# "offset"
|
||||
# Skips the first n ids. For use with the limit parameter (required) to
|
||||
# paginate through a result set.
|
||||
|
||||
# let's get 2, 3 and 4
|
||||
res = self.app.get('/1.0/tarek/storage/col2?offset=2&limit=3')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(len(res), 3)
|
||||
res.sort()
|
||||
self.assertEquals(res, ['2', '3', '4'])
|
||||
|
||||
# "sort"
|
||||
# 'oldest' - Orders by modification date (oldest first)
|
||||
# 'newest' - Orders by modification date (newest first)
|
||||
# 'index' - Orders by the sortindex descending (highest weight first)
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
|
||||
for index, sortindex in (('0', 1), ('1', 34), ('2', 12)):
|
||||
wbo = {'id': index, 'payload': 'x', 'sortindex': sortindex}
|
||||
wbo = json.dumps(wbo)
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbo)
|
||||
time.sleep(0.1)
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2?sort=oldest')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['0', '1', '2'])
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2?sort=newest')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['2', '1', '0'])
|
||||
|
||||
res = self.app.get('/1.0/tarek/storage/col2?sort=index')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['1', '2', '0'])
|
||||
|
||||
def test_alternative_formats(self):
|
||||
|
||||
# application/json
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(res.content_type, 'application/json')
|
||||
|
||||
res = json.loads(res.body)
|
||||
res.sort()
|
||||
self.assertEquals(res, ['0', '1', '2', '3', '4'])
|
||||
|
||||
# application/newlines
|
||||
res = self.app.get('/1.0/tarek/storage/col2',
|
||||
headers=[('Accept', 'application/newlines')])
|
||||
self.assertEquals(res.content_type, 'application/newlines')
|
||||
|
||||
res = [json.loads(line) for line in res.body.strip().split('\n')]
|
||||
res.sort()
|
||||
self.assertEquals(res, ['0', '1', '2', '3', '4'])
|
||||
|
||||
# application/whoisi
|
||||
res = self.app.get('/1.0/tarek/storage/col2',
|
||||
headers=[('Accept', 'application/whoisi')])
|
||||
self.assertEquals(res.content_type, 'application/whoisi')
|
||||
|
||||
lines = []
|
||||
pos = 0
|
||||
while pos < len(res.body):
|
||||
# getting the 32bits value
|
||||
size = res.body[pos:pos + 4]
|
||||
size = struct.unpack('!I', size)[0]
|
||||
|
||||
# extracting the line
|
||||
line = res.body[pos + 4:pos + size + 4]
|
||||
lines.append(json.loads(line))
|
||||
pos = pos + size + 4
|
||||
|
||||
lines.sort()
|
||||
self.assertEquals(lines, ['0', '1', '2', '3', '4'])
|
||||
|
||||
# unkown format defaults to json
|
||||
res = self.app.get('/1.0/tarek/storage/col2',
|
||||
headers=[('Accept', 'application/xxx')])
|
||||
self.assertEquals(res.content_type, 'application/json')
|
||||
|
||||
def test_get_item(self):
|
||||
# grabbing object 1 from col2
|
||||
res = self.app.get('/1.0/tarek/storage/col2/1')
|
||||
res = json.loads(res.body)
|
||||
keys = res.keys()
|
||||
keys.sort()
|
||||
self.assertEquals(keys, ['id', 'modified', 'payload', 'payload_size'])
|
||||
self.assertEquals(res['id'], '1')
|
||||
|
||||
# unexisting object
|
||||
self.app.get('/1.0/tarek/storage/col2/99', status=404)
|
||||
|
||||
def test_set_item(self):
|
||||
# let's create an object
|
||||
wbo = {'payload': 'XXX'}
|
||||
wbo = json.dumps(wbo)
|
||||
self.app.put('/1.0/tarek/storage/col2/12345', params=wbo)
|
||||
res = self.app.get('/1.0/tarek/storage/col2/12345')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
|
||||
# now let's update it
|
||||
wbo = {'payload': 'YYY'}
|
||||
wbo = json.dumps(wbo)
|
||||
self.app.put('/1.0/tarek/storage/col2/12345', params=wbo)
|
||||
res = self.app.get('/1.0/tarek/storage/col2/12345')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res['payload'], 'YYY')
|
||||
|
||||
def test_set_collection(self):
|
||||
# sending two wbos
|
||||
wbo1 = {'id': 12, 'payload': 'XXX'}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX'}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
# checking what we did
|
||||
res = self.app.get('/1.0/tarek/storage/col2/12')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
res = self.app.get('/1.0/tarek/storage/col2/13')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
|
||||
# one more time, with changes
|
||||
wbo1 = {'id': 13, 'payload': 'XyX'}
|
||||
wbo2 = {'id': 14, 'payload': 'XXX'}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
# checking what we did
|
||||
res = self.app.get('/1.0/tarek/storage/col2/14')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
res = self.app.get('/1.0/tarek/storage/col2/13')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res['payload'], 'XyX')
|
||||
|
||||
# sending two wbos with one bad sortindex
|
||||
wbo1 = {'id': 'one', 'payload': 'XXX'}
|
||||
wbo2 = {'id': 'two', 'payload': 'XXX',
|
||||
'sortindex': 'FAIL'}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
self.app.get('/1.0/tarek/storage/col2/two', status=404)
|
||||
|
||||
def test_collection_usage(self):
|
||||
self.storage.delete_storage(self.user_id)
|
||||
|
||||
wbo1 = {'id': 13, 'payload': 'XyX'}
|
||||
wbo2 = {'id': 14, 'payload': 'XXX'}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
res = self.app.get('/1.0/tarek/info/collection_usage')
|
||||
usage = json.loads(res.body)
|
||||
col2_size = usage['col2']
|
||||
wanted = len(wbo1['payload']) + len(wbo2['payload'])
|
||||
self.assertEqual(col2_size, wanted / 1024.)
|
||||
|
||||
def test_delete_collection(self):
|
||||
self.storage.delete_items(self.user_id, 'col2')
|
||||
|
||||
# creating a collection of three
|
||||
wbo1 = {'id': 12, 'payload': 'XXX'}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX'}
|
||||
wbo3 = {'id': 14, 'payload': 'XXX'}
|
||||
wbos = json.dumps([wbo1, wbo2, wbo3])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 3)
|
||||
|
||||
# deleting all items
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 0)
|
||||
|
||||
# now trying deletion with filters
|
||||
|
||||
# "ids"
|
||||
# Deletes the ids for objects in the collection that are in the
|
||||
# provided comma-separated list.
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
self.app.delete('/1.0/tarek/storage/col2?ids=12,14')
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 1)
|
||||
self.app.delete('/1.0/tarek/storage/col2?ids=13')
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 0)
|
||||
|
||||
# "parentid"
|
||||
# Only deletes objects in the collection that are the
|
||||
# children of the parent id given.
|
||||
wbo1 = {'id': 12, 'payload': 'XXX', 'parentid': 1}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX', 'parentid': 1}
|
||||
wbo3 = {'id': 14, 'payload': 'XXX', 'parentid': 2}
|
||||
wbos = json.dumps([wbo1, wbo2, wbo3])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
self.app.delete('/1.0/tarek/storage/col2?parentid=1')
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 1)
|
||||
|
||||
# "older"
|
||||
# Only deletes objects in the collection that have been last
|
||||
# modified before the date given
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
wbo1 = {'id': 12, 'payload': 'XXX', 'parentid': 1}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX', 'parentid': 1}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
time.sleep(.1)
|
||||
now = time.time()
|
||||
time.sleep(.1)
|
||||
wbo3 = {'id': 14, 'payload': 'XXX', 'parentid': 2}
|
||||
wbos = json.dumps([wbo3])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
self.app.delete('/1.0/tarek/storage/col2?older=%f' % now)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 1)
|
||||
|
||||
# "newer"
|
||||
# Only deletes objects in the collection that have been last modified
|
||||
# since the date given.
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
wbo1 = {'id': 12, 'payload': 'XXX', 'parentid': 1}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX', 'parentid': 1}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
now = time.time()
|
||||
time.sleep(.3)
|
||||
wbo3 = {'id': 14, 'payload': 'XXX', 'parentid': 2}
|
||||
wbos = json.dumps([wbo3])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
|
||||
self.app.delete('/1.0/tarek/storage/col2?newer=%f' % now)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 2)
|
||||
|
||||
# "index_above"
|
||||
# Only delete objects with a higher sortindex than the value
|
||||
# specified
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
self.storage.set_item(self.user_id, 'col2', '130', sortindex=11)
|
||||
self.storage.set_item(self.user_id, 'col2', '131', sortindex=9)
|
||||
res = self.app.delete('/1.0/tarek/storage/col2?index_above=10')
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['131'])
|
||||
|
||||
# "index_below"
|
||||
# Only delete objects with a lower sortindex than the value
|
||||
# specified.
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
self.storage.set_item(self.user_id, 'col2', '130', sortindex=11)
|
||||
self.storage.set_item(self.user_id, 'col2', '131', sortindex=9)
|
||||
res = self.app.delete('/1.0/tarek/storage/col2?index_below=10')
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(res, ['130'])
|
||||
|
||||
# "limit"
|
||||
# Sets the maximum number of objects that will be deleted.
|
||||
# xxx see how to activate this under sqlite
|
||||
|
||||
#self.app.delete('/1.0/tarek/storage/col2')
|
||||
#wbos = json.dumps([wbo1, wbo2, wbo3])
|
||||
#self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
#self.app.delete('/1.0/tarek/storage/col2?limit=2')
|
||||
#res = self.app.get('/1.0/tarek/storage/col2')
|
||||
#self.assertEquals(len(json.loads(res.body)), 1)
|
||||
|
||||
# "sort"
|
||||
# 'oldest' - Orders by modification date (oldest first)
|
||||
# 'newest' - Orders by modification date (newest first)
|
||||
# 'index' - Orders by the sortindex (ordered lists)
|
||||
# 'depthindex' - Orders by depth, then by sortindex (ordered trees)
|
||||
|
||||
# sort is used only if limit is used.
|
||||
# check this with toby
|
||||
|
||||
def test_delete_item(self):
|
||||
self.storage.delete_items(self.user_id, 'col2')
|
||||
|
||||
# creating a collection of three
|
||||
wbo1 = {'id': 12, 'payload': 'XXX'}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX'}
|
||||
wbo3 = {'id': 14, 'payload': 'XXX'}
|
||||
wbos = json.dumps([wbo1, wbo2, wbo3])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 3)
|
||||
|
||||
# deleting item 13
|
||||
self.app.delete('/1.0/tarek/storage/col2/13')
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 2)
|
||||
|
||||
# unexisting item should return a 200
|
||||
self.app.delete('/1.0/tarek/storage/col2/12982')
|
||||
|
||||
def test_delete_storage(self):
|
||||
self.storage.delete_items(self.user_id, 'col2')
|
||||
|
||||
# creating a collection of three
|
||||
wbo1 = {'id': 12, 'payload': 'XXX'}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX'}
|
||||
wbo3 = {'id': 14, 'payload': 'XXX'}
|
||||
wbos = json.dumps([wbo1, wbo2, wbo3])
|
||||
self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 3)
|
||||
|
||||
# deleting all with no confirmation
|
||||
self.app.delete('/1.0/tarek/storage', status=400)
|
||||
|
||||
# deleting all for real now
|
||||
res = self.app.delete('/1.0/tarek/storage/col2',
|
||||
headers=[('X-Confirm-Delete', '1')])
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 0)
|
||||
|
||||
def test_x_weave_timestamp(self):
|
||||
now = time.time()
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertTrue(abs(now -
|
||||
float(res.headers['X-Weave-Timestamp'])) < 0.1)
|
||||
|
||||
# getting the timestamp with a PUT
|
||||
wbo = {'payload': 'XXX'}
|
||||
wbo = json.dumps(wbo)
|
||||
now = time.time()
|
||||
res = self.app.put('/1.0/tarek/storage/col2/12345', params=wbo)
|
||||
self.assertTrue(abs(now -
|
||||
float(res.headers['X-Weave-Timestamp'])) < 0.2)
|
||||
|
||||
# getting the timestamp with a POST
|
||||
wbo1 = {'id': 12, 'payload': 'XXX'}
|
||||
wbo2 = {'id': 13, 'payload': 'XXX'}
|
||||
wbos = json.dumps([wbo1, wbo2])
|
||||
now = time.time()
|
||||
res = self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
self.assertTrue(abs(now -
|
||||
float(res.headers['X-Weave-Timestamp'])) < 0.2)
|
||||
|
||||
def test_ifunmodifiedsince(self):
|
||||
wbo = {'payload': 'XXX'}
|
||||
wbo = json.dumps(wbo)
|
||||
ts = self.app.put('/1.0/tarek/storage/col2/12345', params=wbo)
|
||||
ts = json.loads(ts.body) - 10
|
||||
self.app.put('/1.0/tarek/storage/col2/12345', params=wbo,
|
||||
headers=[('X-If-Unmodified-Since', str(ts))],
|
||||
status=412)
|
||||
|
||||
def test_quota(self):
|
||||
wbo = {'payload': 'XXX'}
|
||||
wbo = json.dumps(wbo)
|
||||
self.app.put('/1.0/tarek/storage/col2/12345', params=wbo)
|
||||
|
||||
res = self.app.get('/1.0/tarek/info/quota')
|
||||
used, quota = json.loads(res.body)
|
||||
self.assertAlmostEquals(used, 0.026, 3)
|
||||
|
||||
def test_overquota(self):
|
||||
try:
|
||||
self.app.app.storage.quota_size = 0.1
|
||||
except AttributeError:
|
||||
# ErrorMiddleware is activated
|
||||
self.app.app.application.storage.quota_size = 0.1
|
||||
wbo = {'payload': 'XXX'}
|
||||
wbo = json.dumps(wbo)
|
||||
res = self.app.put('/1.0/tarek/storage/col2/12345', params=wbo)
|
||||
self.assertEquals(res.headers['X-Weave-Quota-Remaining'], '0.0765625')
|
||||
try:
|
||||
self.app.app.storage.quota_size = 0
|
||||
except AttributeError:
|
||||
# ErrorMiddleware is activated
|
||||
self.app.app.application.storage.quota_size = 0
|
||||
wbo = {'payload': 'XXX'}
|
||||
wbo = json.dumps(wbo)
|
||||
res = self.app.put('/1.0/tarek/storage/col2/12345', params=wbo,
|
||||
status=400)
|
||||
|
||||
|
||||
def test_get_collection_ttl(self):
|
||||
self.app.delete('/1.0/tarek/storage/col2')
|
||||
wbo = {'payload': 'XXX', 'ttl': 0}
|
||||
wbo = json.dumps(wbo)
|
||||
res = self.app.put('/1.0/tarek/storage/col2/12345', params=wbo)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(json.loads(res.body), [])
|
||||
|
||||
wbo = {'payload': 'XXX', 'ttl': 1}
|
||||
wbo = json.dumps(wbo)
|
||||
self.app.put('/1.0/tarek/storage/col2/123456', params=wbo)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 1)
|
||||
time.sleep(1.)
|
||||
res = self.app.get('/1.0/tarek/storage/col2')
|
||||
self.assertEquals(len(json.loads(res.body)), 0)
|
||||
|
||||
def test_batch(self):
|
||||
# makes sure the server handles correctly large batches
|
||||
# those are pushed in the DB in batches of 100
|
||||
wbos = [{'id': str(i), 'payload': 'XXX'} for i in range(250)]
|
||||
wbos = json.dumps(wbos)
|
||||
res = self.app.post('/1.0/tarek/storage/col2', params=wbos)
|
||||
res = json.loads(res.body)
|
||||
self.assertEquals(len(res['success']), 250)
|
|
@ -1,270 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
"""
|
||||
Basic tests to verify that the dispatching mechanism works.
|
||||
"""
|
||||
import base64
|
||||
import json
|
||||
import smtplib
|
||||
from email import message_from_string
|
||||
|
||||
from recaptcha.client import captcha
|
||||
|
||||
from syncserver.tests.functional import support
|
||||
|
||||
|
||||
class FakeSMTP(object):
|
||||
|
||||
msgs = []
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
pass
|
||||
|
||||
def quit(self):
|
||||
pass
|
||||
|
||||
def sendmail(self, sender, rcpts, msg):
|
||||
self.msgs.append((sender, rcpts, msg))
|
||||
|
||||
|
||||
class FakeCaptchaResponse(object):
|
||||
|
||||
is_valid = True
|
||||
|
||||
|
||||
class TestUser(support.TestWsgiApp):
|
||||
|
||||
def setUp(self):
|
||||
super(TestUser, self).setUp()
|
||||
# user auth token
|
||||
environ = {'Authorization': 'Basic %s' % \
|
||||
base64.encodestring('tarek:tarek')}
|
||||
self.app.extra_environ = environ
|
||||
|
||||
# we don't want to send emails for real
|
||||
self.old = smtplib.SMTP
|
||||
smtplib.SMTP = FakeSMTP
|
||||
|
||||
# we don't want to call recaptcha either
|
||||
self.old_submit = captcha.submit
|
||||
captcha.submit = self._submit
|
||||
|
||||
def tearDown(self):
|
||||
super(TestUser, self).tearDown()
|
||||
|
||||
# setting back smtp and recaptcha
|
||||
smtplib.SMTP = self.old
|
||||
captcha.submit = self.old_submit
|
||||
for user in ('tarek', 'tarek2'):
|
||||
user_id = self.auth.get_user_id(user)
|
||||
if user_id is None:
|
||||
continue
|
||||
self.auth.delete_user(user_id)
|
||||
|
||||
def _submit(self, *args, **kw):
|
||||
return FakeCaptchaResponse()
|
||||
|
||||
def test_invalid_token(self):
|
||||
environ = {'Authorization': 'FOooo baar'}
|
||||
self.app.extra_environ = environ
|
||||
self.app.get('/user/1.0/tarek/password_reset', status=401)
|
||||
|
||||
def test_user_exists(self):
|
||||
res = self.app.get('/user/1.0/tarek')
|
||||
self.assertTrue(json.loads(res.body))
|
||||
|
||||
def test_user_node(self):
|
||||
res = self.app.get('/user/1.0/tarek/node/weave')
|
||||
self.assertTrue(res.body, 'http://localhost')
|
||||
|
||||
def test_password_reset(self):
|
||||
# making sure a mail is sent
|
||||
res = self.app.get('/user/1.0/tarek/password_reset')
|
||||
self.assertEquals(res.body, 'success')
|
||||
self.assertEquals(len(FakeSMTP.msgs), 1)
|
||||
|
||||
# let's try some bad POSTs on weave-password-reset
|
||||
self.app.post('/weave-password-reset',
|
||||
params={'username': 'tarek',
|
||||
'boo': 'foo'}, status=400)
|
||||
|
||||
res = self.app.post('/weave-password-reset',
|
||||
params={'username': 'tarek', 'key': 'xxx',
|
||||
'boo': 'foo'})
|
||||
self.assertTrue('Password not provided' in res)
|
||||
|
||||
# let's ask via the web form now
|
||||
res = self.app.get('/weave-password-reset')
|
||||
res.form['username'].value = 'tarek'
|
||||
res = res.form.submit()
|
||||
self.assertTrue('next 6 hours' in res)
|
||||
self.assertEquals(len(FakeSMTP.msgs), 2)
|
||||
|
||||
# let's visit the link in the email
|
||||
msg = message_from_string(FakeSMTP.msgs[1][2]).get_payload()
|
||||
msg = base64.decodestring(msg)
|
||||
link = msg.split('\n')[2].strip()
|
||||
|
||||
# let's try some bad links (unknown user)
|
||||
badlink = link.replace('tarek', 'joe')
|
||||
res = self.app.get(badlink)
|
||||
res.form['password'].value = 'p' * 8
|
||||
res.form['confirm'].value = 'p' * 8
|
||||
res = res.form.submit()
|
||||
self.assertTrue('unable to locate your account' in res)
|
||||
|
||||
badlink = link.replace('username=tarek&', '')
|
||||
res = self.app.get(badlink)
|
||||
res.form['password'].value = 'p' * 8
|
||||
res.form['confirm'].value = 'p' * 8
|
||||
res = res.form.submit()
|
||||
self.assertTrue('Username not provided' in res)
|
||||
|
||||
# let's call the real link, it's a form we can fill
|
||||
# let's try bad values
|
||||
# mismatch
|
||||
res = self.app.get(link)
|
||||
res.form['password'].value = 'mynewpassword'
|
||||
res.form['confirm'].value = 'badconfirmation'
|
||||
res = res.form.submit()
|
||||
self.assertTrue('do not match' in res)
|
||||
|
||||
# weak password
|
||||
res = self.app.get(link)
|
||||
res.form['password'].value = 'my'
|
||||
res.form['confirm'].value = 'my'
|
||||
res = res.form.submit()
|
||||
self.assertTrue('at least 8' in res)
|
||||
|
||||
# wrong key
|
||||
res = self.app.get(link[:-1] + 'X')
|
||||
res.form['password'].value = 'mynewpassword'
|
||||
res.form['confirm'].value = 'mynewpassword'
|
||||
res = res.form.submit()
|
||||
self.assertTrue('Key does not match with username' in res)
|
||||
|
||||
# all good
|
||||
res = self.app.get(link)
|
||||
res.form['password'].value = 'mynewpassword'
|
||||
res.form['confirm'].value = 'mynewpassword'
|
||||
res = res.form.submit()
|
||||
self.assertTrue('Password successfully changed' in res)
|
||||
|
||||
def test_create_user(self):
|
||||
# creating a user
|
||||
|
||||
# the user already exists
|
||||
payload = {'email': 'tarek@ziade.org', 'password': 'x' * 9}
|
||||
payload = json.dumps(payload)
|
||||
self.app.put('/user/1.0/tarek', params=payload, status=400)
|
||||
|
||||
# missing the password
|
||||
payload = {'email': 'tarek@ziade.org'}
|
||||
payload = json.dumps(payload)
|
||||
self.app.put('/user/1.0/tarek2', params=payload, status=400)
|
||||
|
||||
# malformed e-mail
|
||||
payload = {'email': 'tarekziadeorg', 'password': 'x' * 9}
|
||||
payload = json.dumps(payload)
|
||||
self.app.put('/user/1.0/tarek2', params=payload, status=400)
|
||||
|
||||
# weak password
|
||||
payload = {'email': 'tarek@ziade.org', 'password': 'x'}
|
||||
payload = json.dumps(payload)
|
||||
self.app.put('/user/1.0/tarek2', params=payload, status=400)
|
||||
|
||||
# weak password #2
|
||||
payload = {'email': 'tarek@ziade.org', 'password': 'tarek2'}
|
||||
payload = json.dumps(payload)
|
||||
self.app.put('/user/1.0/tarek2', params=payload, status=400)
|
||||
|
||||
# everything is there
|
||||
res = self.app.get('/user/1.0/tarek2')
|
||||
self.assertFalse(json.loads(res.body))
|
||||
|
||||
payload = {'email': 'tarek@ziade.org', 'password': 'x' * 9,
|
||||
'captcha-challenge': 'xxx',
|
||||
'captcha-response': 'xxx'}
|
||||
payload = json.dumps(payload)
|
||||
res = self.app.put('/user/1.0/tarek2', params=payload)
|
||||
self.assertEquals(res.body, 'tarek2')
|
||||
|
||||
res = self.app.get('/user/1.0/tarek2')
|
||||
self.assertTrue(json.loads(res.body))
|
||||
|
||||
def test_change_email(self):
|
||||
|
||||
# bad email
|
||||
body = json.dumps('newemail.com')
|
||||
self.app.post('/user/1.0/tarek/email', params=body, status=400)
|
||||
|
||||
# good one
|
||||
body = json.dumps('new@email.com')
|
||||
res = self.app.post('/user/1.0/tarek/email', params=body)
|
||||
self.assertEquals(res.body, 'new@email.com')
|
||||
|
||||
def test_delete_user(self):
|
||||
# creating another user
|
||||
res = self.app.get('/user/1.0/tarek2')
|
||||
if not json.loads(res.body):
|
||||
payload = {'email': 'tarek@ziade.org',
|
||||
'password': 'x' * 9,
|
||||
'captcha-challenge': 'xxx',
|
||||
'captcha-response': 'xxx'}
|
||||
payload = json.dumps(payload)
|
||||
self.app.put('/user/1.0/tarek2', params=payload)
|
||||
|
||||
# trying to suppress 'tarek' with 'tarek2'
|
||||
# this should generate a 401
|
||||
environ = {'Authorization': 'Basic %s' % \
|
||||
base64.encodestring('tarek2:xxxxxxxxx')}
|
||||
self.app.extra_environ = environ
|
||||
self.app.delete('/user/1.0/tarek', status=401)
|
||||
|
||||
# now using the right credentials
|
||||
environ = {'Authorization': 'Basic %s' % \
|
||||
base64.encodestring('tarek:tarek')}
|
||||
self.app.extra_environ = environ
|
||||
res = self.app.delete('/user/1.0/tarek')
|
||||
self.assertTrue(json.loads(res.body))
|
||||
|
||||
# tarek should be gone
|
||||
res = self.app.get('/user/1.0/tarek')
|
||||
self.assertFalse(json.loads(res.body))
|
||||
|
||||
def test_recaptcha(self):
|
||||
# make sre the captcha is rendered
|
||||
self.app.get('/misc/1.0/captcha_html', status=200)
|
|
@ -1,48 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright (C) 2007, 2008 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
__author__ = 'davidbyttow@google.com (David Byttow)'
|
||||
|
||||
|
||||
import unittest
|
||||
import xmlrunner
|
||||
|
||||
class ModuleTestRunner(object):
|
||||
|
||||
def __init__(self, module_list=None, module_test_settings=None, asXML = False):
|
||||
self.modules = module_list or []
|
||||
self.settings = module_test_settings or {}
|
||||
self.asXML = asXML
|
||||
|
||||
def RunAllTests(self):
|
||||
"""Executes all tests present in the list of modules."""
|
||||
if self.asXML:
|
||||
runner = xmlrunner.XMLTestRunner()
|
||||
else:
|
||||
runner = unittest.TextTestRunner(verbosity=3)
|
||||
|
||||
results = []
|
||||
for module in self.modules:
|
||||
for setting, value in self.settings.iteritems():
|
||||
try:
|
||||
setattr(module, setting, value)
|
||||
except AttributeError:
|
||||
pass
|
||||
print '\nRunning all tests in module', module.__name__
|
||||
results.append(runner.run(unittest.defaultTestLoader.loadTestsFromModule(module)))
|
||||
return results
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xsl:stylesheet version='1.0'
|
||||
xmlns:xsl='http://www.w3.org/1999/XSL/Transform'
|
||||
xmlns:xhtml="http://www.w3.org/1999/xhtml"
|
||||
xmlns:xforms="http://www.w3.org/2002/xforms"
|
||||
xmlns:ts="http://www.w3c.org/MarkUp/Forms/XForms/Test/11">
|
||||
|
||||
<xsl:output method="xml" encoding="UTF-8" indent="yes" omit-xml-declaration="no" xalan:indent-amount="2" xmlns:xalan="http://xml.apache.org/xslt"/>
|
||||
|
||||
<xsl:param name="dir"></xsl:param>
|
||||
|
||||
<xsl:template match="testsuite">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml"
|
||||
xmlns:ts="http://www.w3c.org/MarkUp/Forms/XForms/Test/11"
|
||||
exclude-result-prefixes="ts">
|
||||
<head>
|
||||
<style>
|
||||
BODY {font:9pt Arial,sans-serif}
|
||||
TD {font:9pt Arial,sans-serif}
|
||||
.heading TD {font:10pt bold Arial, sans-serif}
|
||||
.innerCellFailed {background-color:#ffd0d0}
|
||||
.innerCellPassed {color:#D0D0D0}
|
||||
.inner130 {width:130px}
|
||||
.ctl {text-decoration:none;color:#808080;padding-right:8px}
|
||||
</style>
|
||||
|
||||
<title>Unit Test Results</title>
|
||||
<script>
|
||||
function expand(d) {
|
||||
document.getElementById(d).style.display = "none";
|
||||
document.getElementById(d + "_full").style.display = "block";
|
||||
}
|
||||
</script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<table cellpadding="0" cellspacing="1" border="0">
|
||||
<tr class="heading">
|
||||
<td><b>Test Case Name</b></td>
|
||||
<td><b>Status</b></td>
|
||||
</tr>
|
||||
|
||||
<xsl:apply-templates select="*"/>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
</xsl:template>
|
||||
|
||||
|
||||
|
||||
<xsl:template match="testcase">
|
||||
<xsl:variable name="name" select="@name"/>
|
||||
<tr class="outer">
|
||||
<td valign="top" class="inner130"><xsl:value-of select="@name"/></td>
|
||||
<xsl:choose>
|
||||
<xsl:when test="failure">
|
||||
<td class="innerCellFailed">
|
||||
<div style='height:14px;overflow:hidden'><xsl:attribute name='id'><xsl:value-of select="@name"/></xsl:attribute><pre style='margin-top:0px'><a class='ctl'><xsl:attribute name="href">javascript:expand('<xsl:value-of select="@name"/>')</xsl:attribute>+</a> <xsl:value-of select="failure"/></pre></div>
|
||||
<div style='display:none'><xsl:attribute name='id'><xsl:value-of select="@name"/>_full</xsl:attribute><pre style='margin-top:0px'><a class='ctl'><xsl:attribute name="href">javascript:expand('<xsl:value-of select="@name"/>')</xsl:attribute>+</a> <xsl:value-of select="failure"/></pre></div>
|
||||
</td>
|
||||
</xsl:when>
|
||||
<xsl:otherwise>
|
||||
<td class="innerCellPassed">OK</td>
|
||||
|
||||
</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</tr>
|
||||
|
||||
</xsl:template>
|
||||
|
||||
|
||||
<xsl:template match="statusSummary"/>
|
||||
<xsl:template match="profile"/>
|
||||
|
||||
</xsl:stylesheet>
|
|
@ -1,80 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# This suite contains basic tests for the identity server.
|
||||
# It does not exercise any of the per-protocol features.
|
||||
|
||||
from optparse import OptionParser
|
||||
from unittest import TextTestRunner, defaultTestLoader
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import server_tests
|
||||
import module_test_runner
|
||||
import test_config
|
||||
|
||||
def run():
|
||||
"""Runs the test suite."""
|
||||
test_runner = module_test_runner.ModuleTestRunner(asXML='-xml' in sys.argv)
|
||||
test_runner.modules = [server_tests]
|
||||
return test_runner.RunAllTests()
|
||||
|
||||
|
||||
# python run_server_tests.py --scheme=https --server=pm-weave04.mozilla.org --host=sj-weave01.services.mozilla.com --username=weavetest-sj01 --password=caid2raefoWi
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(level = logging.DEBUG)
|
||||
|
||||
# process arguments
|
||||
usage = "usage: %prog [options] [ test-class | test-class.test-method ] "
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.add_option("--scheme", help="http or https", dest="scheme")
|
||||
parser.add_option("--server",
|
||||
help="the actual internet host to contact",
|
||||
dest="server")
|
||||
parser.add_option("--host",
|
||||
help="the Host name to present in the HTTP request",
|
||||
dest="host")
|
||||
parser.add_option("--username",
|
||||
help="the Weave username to send",
|
||||
dest="username")
|
||||
parser.add_option("--password", help="the Weave password to send",
|
||||
dest="password")
|
||||
parser.add_option("--with-memcache",
|
||||
help=("whether the server is running with memcache "
|
||||
"(1 if true; 0 if not)"),
|
||||
dest="memcache")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if options.scheme:
|
||||
test_config.STORAGE_SCHEME = options.scheme
|
||||
if options.server:
|
||||
test_config.STORAGE_SERVER = options.server
|
||||
if options.host:
|
||||
test_config.HOST_NAME = options.host
|
||||
if options.username:
|
||||
test_config.USERNAME = options.username
|
||||
if options.password:
|
||||
test_config.PASSWORD = options.password
|
||||
|
||||
test_config.memcache = options.memcache == "1"
|
||||
|
||||
tests = args
|
||||
any_problems = False
|
||||
|
||||
if len(tests) > 0:
|
||||
results = []
|
||||
runner = TextTestRunner(verbosity=3)
|
||||
for test in tests:
|
||||
test = defaultTestLoader.loadTestsFromName(test,
|
||||
module=server_tests)
|
||||
results.append(runner.run(test))
|
||||
else:
|
||||
results = run()
|
||||
|
||||
for result in results:
|
||||
if len(result.failures + result.errors) > 0:
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(0)
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,12 +0,0 @@
|
|||
SCHEME = "http"
|
||||
SERVER_NAME = "localhost:5000"
|
||||
SERVER_BASE = "%s://%s" % (SCHEME, SERVER_NAME)
|
||||
|
||||
STORAGE_SCHEME = "http"
|
||||
STORAGE_SERVER = "localhost:5000"
|
||||
|
||||
HOST_NAME = None
|
||||
USERNAME = "tarek"
|
||||
PASSWORD = "tarek"
|
||||
|
||||
memcache = False
|
|
@ -1,344 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Implementation of a Weave client
|
||||
|
||||
import urllib
|
||||
import urllib2
|
||||
import httplib
|
||||
import hashlib
|
||||
import logging
|
||||
import unittest
|
||||
import base64
|
||||
import json
|
||||
|
||||
opener = urllib2.build_opener(urllib2.HTTPHandler)
|
||||
|
||||
class WeaveException(Exception):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.value)
|
||||
|
||||
def createUser(serverURL, userID, password, email, secret = None, captchaChallenge = None, captchaResponse = None, withHost =None):
|
||||
if userID.find('"') >=0:
|
||||
raise ValueError("Weave userIDs may not contain the quote character")
|
||||
if email.find('"') >=0:
|
||||
raise ValueError("Weave email addresses may not contain the quote character")
|
||||
if secret and secret.find('"') >=0:
|
||||
raise ValueError("Weave secret may not contain the quote character")
|
||||
|
||||
url = serverURL + "/user/1/%s/" % userID
|
||||
|
||||
secretStr = ""
|
||||
captchaStr = ""
|
||||
if secret:
|
||||
secretStr = ''', "secret":"%s"''' % secret
|
||||
|
||||
if captchaChallenge and captchaResponse:
|
||||
if secret:
|
||||
raise WeaveException("Cannot provide both a secret and a captchaResponse to createUser")
|
||||
captchaStr = ''', "captcha-challenge":"%s", "captcha-response":"%s"''' % (captchaChallenge, captchaResponse)
|
||||
|
||||
payload = '''{"password":"%s", "email": "%s"%s%s}''' % (password, email, secretStr, captchaStr)
|
||||
|
||||
req = urllib2.Request(url, data=payload)
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
|
||||
req.get_method = lambda: 'PUT'
|
||||
try:
|
||||
f = opener.open(req)
|
||||
result = f.read()
|
||||
if result != userID:
|
||||
raise WeaveException("Unable to create new user: got return value '%s' from server" % result)
|
||||
|
||||
except urllib2.URLError, e:
|
||||
msg = ""
|
||||
try:
|
||||
msg = e.read()
|
||||
except:
|
||||
pass
|
||||
raise WeaveException("Unable to communicate with Weave server: " + str(e) + "; %s" % msg)
|
||||
|
||||
|
||||
def checkNameAvailable(serverURL, userID, withHost=None):
|
||||
if userID.find('"') >=0:
|
||||
raise ValueError("Weave userIDs may not contain the quote character")
|
||||
|
||||
url = serverURL + "/user/1/%s/" % userID
|
||||
|
||||
req = urllib2.Request(url)
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
try:
|
||||
f = urllib2.urlopen(req)
|
||||
result = f.read()
|
||||
if result == "1":
|
||||
return False
|
||||
elif result == "0":
|
||||
return True
|
||||
else:
|
||||
raise WeaveException("Unexpected return value from server on name-availability request: '%s'" % result)
|
||||
except urllib2.URLError, e:
|
||||
raise WeaveException("Unable to communicate with Weave server: " + str(e))
|
||||
|
||||
|
||||
def getUserStorageNode(serverURL, userID, password, withHost=None):
|
||||
if userID.find('"') >=0:
|
||||
raise ValueError("Weave userIDs may not contain the quote character")
|
||||
|
||||
url = serverURL + "/user/1/%s/node/weave" % userID
|
||||
|
||||
|
||||
req = urllib2.Request(url)
|
||||
base64string = base64.encodestring('%s:%s' % (userID, password))[:-1]
|
||||
req.add_header("Authorization", "Basic %s" % base64string)
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
|
||||
try:
|
||||
f = opener.open(req)
|
||||
result = f.read()
|
||||
f.close()
|
||||
return result
|
||||
|
||||
except urllib2.URLError, e:
|
||||
if str(e).find("404") >= 0:
|
||||
return serverURL
|
||||
else:
|
||||
raise WeaveException("Unable to communicate with Weave server: " + str(e))
|
||||
|
||||
|
||||
def changeUserEmail(serverURL, userID, password, newemail, withHost=None):
|
||||
if userID.find('"') >=0:
|
||||
raise ValueError("Weave userIDs may not contain the quote character")
|
||||
if newemail.find('"') >=0:
|
||||
raise ValueError("Weave email addresses may not contain the quote character")
|
||||
|
||||
url = serverURL + "/user/1/%s/email" % userID
|
||||
|
||||
payload = newemail
|
||||
|
||||
req = urllib2.Request(url, data=payload)
|
||||
base64string = base64.encodestring('%s:%s' % (userID, password))[:-1]
|
||||
req.add_header("Authorization", "Basic %s" % base64string)
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
req.get_method = lambda: 'POST'
|
||||
try:
|
||||
f = opener.open(req)
|
||||
result = f.read()
|
||||
if result != newemail:
|
||||
raise WeaveException("Unable to change user email: got return value '%s' from server" % result)
|
||||
|
||||
except urllib2.URLError, e:
|
||||
raise WeaveException("Unable to communicate with Weave server: %s" % e)
|
||||
|
||||
|
||||
|
||||
def changeUserPassword(serverURL, userID, password, newpassword, withHost=None):
|
||||
if userID.find('"') >=0:
|
||||
raise ValueError("Weave userIDs may not contain the quote character")
|
||||
|
||||
url = serverURL + "/user/1/%s/password" % userID
|
||||
|
||||
payload = newpassword
|
||||
req = urllib2.Request(url, data=payload)
|
||||
base64string = base64.encodestring('%s:%s' % (userID, password))[:-1]
|
||||
req.add_header("Authorization", "Basic %s" % base64string)
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
req.get_method = lambda: 'POST'
|
||||
try:
|
||||
|
||||
f = opener.open(req)
|
||||
result = f.read()
|
||||
if result != "success":
|
||||
raise WeaveException("Unable to change user password: got return value '%s' from server" % result)
|
||||
|
||||
except urllib2.URLError, e:
|
||||
raise WeaveException("Unable to communicate with Weave server: %s" % e)
|
||||
|
||||
|
||||
|
||||
def deleteUser(serverURL, userID, password, withHost=None):
|
||||
if userID.find('"') >=0:
|
||||
raise ValueError("Weave userIDs may not contain the quote character")
|
||||
|
||||
url = serverURL + "/user/1/%s/" % userID
|
||||
|
||||
req = urllib2.Request(url)
|
||||
base64string = base64.encodestring('%s:%s' % (userID, password))[:-1]
|
||||
req.add_header("Authorization", "Basic %s" % base64string)
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
req.get_method = lambda: 'DELETE'
|
||||
try:
|
||||
f = opener.open(req)
|
||||
result = f.read()
|
||||
|
||||
except urllib2.URLError, e:
|
||||
msg = ""
|
||||
try:
|
||||
msg = e.read()
|
||||
except:
|
||||
pass
|
||||
raise WeaveException("Unable to communicate with Weave server: " + str(e) + "; %s" % msg)
|
||||
|
||||
|
||||
|
||||
def setUserProfile(serverURL, userID, profileField, profileValue, withHost=None):
|
||||
if userID.find('"') >=0:
|
||||
raise ValueError("Weave userIDs may not contain the quote character")
|
||||
|
||||
url = serverURL + "/user/1/%s/profile" % userID
|
||||
|
||||
payload = newpassword
|
||||
req = urllib2.Request(url, data=payload)
|
||||
base64string = base64.encodestring('%s:%s' % (userID, password))[:-1]
|
||||
req.add_header("Authorization", "Basic %s" % base64string)
|
||||
req.get_method = lambda: 'POST'
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
try:
|
||||
f = opener.open(req)
|
||||
result = f.read()
|
||||
if result != "success":
|
||||
raise WeaveException("Unable to change user password: got return value '%s' from server" % result)
|
||||
|
||||
except urllib2.URLError, e:
|
||||
raise WeaveException("Unable to communicate with Weave server: %s" % e)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def storage_http_op(method, userID, password, url, payload=None, asJSON=True, ifUnmodifiedSince=None, withConfirmation=None, withAuth=True, withHost=None, outputFormat=None):
|
||||
|
||||
req = urllib2.Request(url, data=payload)
|
||||
if withAuth:
|
||||
base64string = base64.encodestring('%s:%s' % (userID, password))[:-1]
|
||||
req.add_header("Authorization", "Basic %s" % base64string)
|
||||
if ifUnmodifiedSince:
|
||||
req.add_header("X-If-Unmodified-Since", "%s" % ifUnmodifiedSince)
|
||||
if withConfirmation:
|
||||
req.add_header("X-Confirm-Delete", "true")
|
||||
if outputFormat:
|
||||
req.add_header("Accept", outputFormat)
|
||||
if withHost:
|
||||
req.add_header("Host", withHost)
|
||||
|
||||
req.get_method = lambda: method
|
||||
|
||||
#print "%s %s" % (method, url)
|
||||
#if payload: print "> %s" % payload
|
||||
try:
|
||||
f = opener.open(req)
|
||||
result = f.read()
|
||||
#print "< %s" % result
|
||||
if asJSON:
|
||||
return json.loads(result)
|
||||
else:
|
||||
return result
|
||||
except urllib2.URLError, e:
|
||||
msg = ""
|
||||
try:
|
||||
msg = e.read()
|
||||
except:
|
||||
pass
|
||||
# TODO process error code
|
||||
raise WeaveException("Unable to communicate with Weave server: %s" % e)
|
||||
|
||||
|
||||
def add_or_modify_item(storageServerURL, userID, password, collection, item, urlID=None, ifUnmodifiedSince=None, withHost=None):
|
||||
'''Adds the WBO defined in 'item' to 'collection'. If the WBO does
|
||||
not contain a payload, will update the provided metadata fields on an
|
||||
already-defined object.
|
||||
|
||||
Returns the timestamp of the modification.'''
|
||||
if urlID:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s/%s" % (userID, collection, urllib.quote(urlID))
|
||||
else:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s" % (userID, collection)
|
||||
if type(item) == str:
|
||||
itemJSON = item
|
||||
else:
|
||||
itemJSON = json.dumps(item)
|
||||
return storage_http_op("PUT", userID, password, url, itemJSON, asJSON=False, ifUnmodifiedSince=ifUnmodifiedSince, withHost=withHost)
|
||||
|
||||
def add_or_modify_items(storageServerURL, userID, password, collection, itemArray, ifUnmodifiedSince=None, withHost=None):
|
||||
'''Adds all the items defined in 'itemArray' to 'collection'; effectively
|
||||
performs an add_or_modifiy_item for each.
|
||||
|
||||
Returns a map of successful and modified saves, like this:
|
||||
|
||||
{"modified":1233702554.25,
|
||||
"success":["{GXS58IDC}12","{GXS58IDC}13","{GXS58IDC}15","{GXS58IDC}16","{GXS58IDC}18","{GXS58IDC}19"],
|
||||
"failed":{"{GXS58IDC}11":["invalid parentid"],
|
||||
"{GXS58IDC}14":["invalid parentid"],
|
||||
"{GXS58IDC}17":["invalid parentid"],
|
||||
"{GXS58IDC}20":["invalid parentid"]}
|
||||
}
|
||||
'''
|
||||
url = storageServerURL + "/1.0/%s/storage/%s" % (userID, collection)
|
||||
if type(itemArray) == str:
|
||||
itemArrayJSON = itemArray
|
||||
else:
|
||||
itemArrayJSON = json.dumps(itemArray)
|
||||
return storage_http_op("POST", userID, password, url, itemArrayJSON, ifUnmodifiedSince=ifUnmodifiedSince, withHost=withHost)
|
||||
|
||||
|
||||
def delete_item(storageServerURL, userID, password, collection, id, ifUnmodifiedSince=None, withHost=None):
|
||||
url = storageServerURL + "/1.0/%s/storage/%s/%s" % (userID, collection, urllib.quote(id))
|
||||
return storage_http_op("DELETE", userID, password, url, ifUnmodifiedSince=ifUnmodifiedSince, withHost=withHost)
|
||||
|
||||
def delete_items(storageServerURL, userID, password, collection, idArray=None, params=None, withHost=None):
|
||||
if params:
|
||||
if idArray:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s?ids=%s&%s" % (userID, collection, urllib.quote(','.join(idArray)), params)
|
||||
else:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s?%s" % (userID, collection, params)
|
||||
else:
|
||||
if idArray:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s?ids=%s" % (userID, collection, urllib.quote(','.join(idArray)))
|
||||
else:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s" % (userID, collection)
|
||||
return storage_http_op("DELETE", userID, password, url, withHost=withHost)
|
||||
|
||||
def delete_items_older_than(storageServerURL, userID, password, collection, timestamp, withHost=None):
|
||||
url = storageServerURL + "/1.0/%s/storage/%s?older=%s" % (userID, collection, timestamp)
|
||||
return storage_http_op("DELETE", userID, password, url, withHost=withHost)
|
||||
|
||||
def delete_all(storageServerURL, userID, password, confirm=True, withHost=None):
|
||||
'''The only reason you'd want confirm=False is for unit testing'''
|
||||
url = storageServerURL + "/1.0/%s/storage" % (userID)
|
||||
return storage_http_op("DELETE", userID, password, url, asJSON=False, withConfirmation=confirm, withHost=withHost)
|
||||
|
||||
def get_collection_counts(storageServerURL, userID, password, withHost=None):
|
||||
url = storageServerURL + "/1.0/%s/info/collection_counts" % (userID)
|
||||
return storage_http_op("GET", userID, password, url, withHost=withHost)
|
||||
|
||||
def get_collection_timestamps(storageServerURL, userID, password, withHost=None):
|
||||
url = storageServerURL + "/1.0/%s/info/collections" % (userID)
|
||||
return storage_http_op("GET", userID, password, url, withHost=withHost)
|
||||
|
||||
def get_collection_ids(storageServerURL, userID, password, collection, params=None, asJSON=True, outputFormat=None, withHost=None):
|
||||
if params:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s?%s" % (userID, collection, params)
|
||||
else:
|
||||
url = storageServerURL + "/1.0/%s/storage/%s" % (userID, collection)
|
||||
return storage_http_op("GET", userID, password, url, asJSON=asJSON, outputFormat=outputFormat, withHost=withHost)
|
||||
|
||||
def get_item(storageServerURL, userID, password, collection, id, asJSON=True, withAuthUser=None, withAuth=True, withHost=None):
|
||||
"""withAuth is used for testing only: if set to False the Authorization header is omitted.
|
||||
withAuthUser is used for testing only: it sets the HTTP Authorize user to something other than userID"""
|
||||
url = storageServerURL + "/1.0/%s/storage/%s/%s?full=1" % (userID, collection, urllib.quote(id, safe=''))
|
||||
authUser = userID
|
||||
if withAuthUser: authUser = withAuthUser
|
||||
return storage_http_op("GET", authUser, password, url, asJSON=asJSON, withAuth=withAuth, withHost=withHost)
|
||||
|
||||
def get_quota(storageServerURL, userID, password, withHost=None):
|
||||
"Returns an array of [<amount used>,<limit>]"
|
||||
url = storageServerURL + "/1.0/%s/info/quota" % (userID)
|
||||
return storage_http_op("GET", userID, password, url, withHost=withHost)
|
|
@ -1,384 +0,0 @@
|
|||
"""
|
||||
XML Test Runner for PyUnit
|
||||
"""
|
||||
|
||||
# Written by Sebastian Rittau <srittau@jroger.in-berlin.de> and placed in
|
||||
# the Public Domain. With contributions by Paolo Borelli and others.
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
__version__ = "0.1"
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import unittest
|
||||
from xml.sax.saxutils import escape
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class _TestInfo(object):
|
||||
|
||||
"""Information about a particular test.
|
||||
|
||||
Used by _XMLTestResult.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, test, time):
|
||||
(self._class, self._method) = test.id().rsplit(".", 1)
|
||||
self._time = time
|
||||
self._error = None
|
||||
self._failure = None
|
||||
|
||||
@staticmethod
|
||||
def create_success(test, time):
|
||||
"""Create a _TestInfo instance for a successful test."""
|
||||
return _TestInfo(test, time)
|
||||
|
||||
@staticmethod
|
||||
def create_failure(test, time, failure):
|
||||
"""Create a _TestInfo instance for a failed test."""
|
||||
info = _TestInfo(test, time)
|
||||
info._failure = failure
|
||||
return info
|
||||
|
||||
@staticmethod
|
||||
def create_error(test, time, error):
|
||||
"""Create a _TestInfo instance for an erroneous test."""
|
||||
info = _TestInfo(test, time)
|
||||
info._error = error
|
||||
return info
|
||||
|
||||
def print_report(self, stream):
|
||||
"""Print information about this test case in XML format to the
|
||||
supplied stream.
|
||||
|
||||
"""
|
||||
stream.write(' <testcase classname="%(class)s" name="%(method)s" time="%(time).4f">' % \
|
||||
{
|
||||
"class": self._class,
|
||||
"method": self._method,
|
||||
"time": self._time,
|
||||
})
|
||||
if self._failure is not None:
|
||||
self._print_error(stream, 'failure', self._failure)
|
||||
if self._error is not None:
|
||||
self._print_error(stream, 'error', self._error)
|
||||
stream.write('</testcase>\n')
|
||||
|
||||
def _print_error(self, stream, tagname, error):
|
||||
"""Print information from a failure or error to the supplied stream."""
|
||||
text = escape(str(error[1]))
|
||||
stream.write('\n')
|
||||
stream.write(' <%s type="%s">%s\n' \
|
||||
% (tagname, _clsname(error[0]), text))
|
||||
tb_stream = StringIO()
|
||||
traceback.print_tb(error[2], None, tb_stream)
|
||||
stream.write(escape(tb_stream.getvalue()))
|
||||
stream.write(' </%s>\n' % tagname)
|
||||
stream.write(' ')
|
||||
|
||||
|
||||
def _clsname(cls):
|
||||
return cls.__module__ + "." + cls.__name__
|
||||
|
||||
|
||||
class _XMLTestResult(unittest.TestResult):
|
||||
|
||||
"""A test result class that stores result as XML.
|
||||
|
||||
Used by XMLTestRunner.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, classname):
|
||||
unittest.TestResult.__init__(self)
|
||||
self._test_name = classname
|
||||
self._start_time = None
|
||||
self._tests = []
|
||||
self._error = None
|
||||
self._failure = None
|
||||
|
||||
def startTest(self, test):
|
||||
unittest.TestResult.startTest(self, test)
|
||||
self._error = None
|
||||
self._failure = None
|
||||
self._start_time = time.time()
|
||||
|
||||
def stopTest(self, test):
|
||||
time_taken = time.time() - self._start_time
|
||||
unittest.TestResult.stopTest(self, test)
|
||||
if self._error:
|
||||
info = _TestInfo.create_error(test, time_taken, self._error)
|
||||
elif self._failure:
|
||||
info = _TestInfo.create_failure(test, time_taken, self._failure)
|
||||
else:
|
||||
info = _TestInfo.create_success(test, time_taken)
|
||||
self._tests.append(info)
|
||||
|
||||
def addError(self, test, err):
|
||||
unittest.TestResult.addError(self, test, err)
|
||||
self._error = err
|
||||
|
||||
def addFailure(self, test, err):
|
||||
unittest.TestResult.addFailure(self, test, err)
|
||||
self._failure = err
|
||||
|
||||
def print_report(self, stream, time_taken, out, err):
|
||||
"""Prints the XML report to the supplied stream.
|
||||
|
||||
The time the tests took to perform as well as the captured standard
|
||||
output and standard error streams must be passed in.a
|
||||
|
||||
"""
|
||||
stream.write('<testsuite errors="%(e)d" failures="%(f)d" ' % \
|
||||
{ "e": len(self.errors), "f": len(self.failures) })
|
||||
stream.write('name="%(n)s" tests="%(t)d" time="%(time).3f">\n' % \
|
||||
{
|
||||
"n": self._test_name,
|
||||
"t": self.testsRun,
|
||||
"time": time_taken,
|
||||
})
|
||||
for info in self._tests:
|
||||
info.print_report(stream)
|
||||
stream.write(' <system-out><![CDATA[%s]]></system-out>\n' % out)
|
||||
stream.write(' <system-err><![CDATA[%s]]></system-err>\n' % err)
|
||||
stream.write('</testsuite>\n')
|
||||
|
||||
|
||||
class XMLTestRunner(object):
|
||||
|
||||
"""A test runner that stores results in XML format compatible with JUnit.
|
||||
|
||||
XMLTestRunner(stream=None) -> XML test runner
|
||||
|
||||
The XML file is written to the supplied stream. If stream is None, the
|
||||
results are stored in a file called TEST-<module>.<class>.xml in the
|
||||
current working directory (if not overridden with the path property),
|
||||
where <module> and <class> are the module and class name of the test class.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, stream=None):
|
||||
self._stream = stream
|
||||
self._path = "."
|
||||
|
||||
def run(self, test):
|
||||
"""Run the given test case or test suite."""
|
||||
class_ = test.__class__
|
||||
classname = class_.__module__ + "." + class_.__name__
|
||||
if self._stream == None:
|
||||
filename = "TEST-%s.xml" % classname
|
||||
stream = file(os.path.join(self._path, filename), "w")
|
||||
stream.write('<?xml version="1.0" encoding="utf-8"?>\n')
|
||||
else:
|
||||
stream = self._stream
|
||||
|
||||
result = _XMLTestResult(classname)
|
||||
start_time = time.time()
|
||||
|
||||
with _fake_std_streams():
|
||||
test(result)
|
||||
try:
|
||||
out_s = sys.stdout.getvalue()
|
||||
except AttributeError:
|
||||
out_s = ""
|
||||
try:
|
||||
err_s = sys.stderr.getvalue()
|
||||
except AttributeError:
|
||||
err_s = ""
|
||||
|
||||
time_taken = time.time() - start_time
|
||||
result.print_report(stream, time_taken, out_s, err_s)
|
||||
if self._stream is None:
|
||||
stream.close()
|
||||
|
||||
return result
|
||||
|
||||
def _set_path(self, path):
|
||||
self._path = path
|
||||
|
||||
path = property(lambda self: self._path, _set_path, None,
|
||||
"""The path where the XML files are stored.
|
||||
|
||||
This property is ignored when the XML file is written to a file
|
||||
stream.""")
|
||||
|
||||
|
||||
class _fake_std_streams(object):
|
||||
|
||||
def __enter__(self):
|
||||
self._orig_stdout = sys.stdout
|
||||
self._orig_stderr = sys.stderr
|
||||
sys.stdout = StringIO()
|
||||
sys.stderr = StringIO()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
sys.stdout = self._orig_stdout
|
||||
sys.stderr = self._orig_stderr
|
||||
|
||||
|
||||
class XMLTestRunnerTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._stream = StringIO()
|
||||
|
||||
def _try_test_run(self, test_class, expected):
|
||||
|
||||
"""Run the test suite against the supplied test class and compare the
|
||||
XML result against the expected XML string. Fail if the expected
|
||||
string doesn't match the actual string. All time attributes in the
|
||||
expected string should have the value "0.000". All error and failure
|
||||
messages are reduced to "Foobar".
|
||||
|
||||
"""
|
||||
|
||||
runner = XMLTestRunner(self._stream)
|
||||
runner.run(unittest.makeSuite(test_class))
|
||||
|
||||
got = self._stream.getvalue()
|
||||
# Replace all time="X.YYY" attributes by time="0.000" to enable a
|
||||
# simple string comparison.
|
||||
got = re.sub(r'time="\d+\.\d+"', 'time="0.000"', got)
|
||||
# Likewise, replace all failure and error messages by a simple "Foobar"
|
||||
# string.
|
||||
got = re.sub(r'(?s)<failure (.*?)>.*?</failure>', r'<failure \1>Foobar</failure>', got)
|
||||
got = re.sub(r'(?s)<error (.*?)>.*?</error>', r'<error \1>Foobar</error>', got)
|
||||
# And finally Python 3 compatibility.
|
||||
got = got.replace('type="builtins.', 'type="exceptions.')
|
||||
|
||||
self.assertEqual(expected, got)
|
||||
|
||||
def test_no_tests(self):
|
||||
"""Regression test: Check whether a test run without any tests
|
||||
matches a previous run.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
pass
|
||||
self._try_test_run(TestTest, """<testsuite errors="0" failures="0" name="unittest.TestSuite" tests="0" time="0.000">
|
||||
<system-out><![CDATA[]]></system-out>
|
||||
<system-err><![CDATA[]]></system-err>
|
||||
</testsuite>
|
||||
""")
|
||||
|
||||
def test_success(self):
|
||||
"""Regression test: Check whether a test run with a successful test
|
||||
matches a previous run.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_foo(self):
|
||||
pass
|
||||
self._try_test_run(TestTest, """<testsuite errors="0" failures="0" name="unittest.TestSuite" tests="1" time="0.000">
|
||||
<testcase classname="__main__.TestTest" name="test_foo" time="0.000"></testcase>
|
||||
<system-out><![CDATA[]]></system-out>
|
||||
<system-err><![CDATA[]]></system-err>
|
||||
</testsuite>
|
||||
""")
|
||||
|
||||
def test_failure(self):
|
||||
"""Regression test: Check whether a test run with a failing test
|
||||
matches a previous run.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_foo(self):
|
||||
self.assert_(False)
|
||||
self._try_test_run(TestTest, """<testsuite errors="0" failures="1" name="unittest.TestSuite" tests="1" time="0.000">
|
||||
<testcase classname="__main__.TestTest" name="test_foo" time="0.000">
|
||||
<failure type="exceptions.AssertionError">Foobar</failure>
|
||||
</testcase>
|
||||
<system-out><![CDATA[]]></system-out>
|
||||
<system-err><![CDATA[]]></system-err>
|
||||
</testsuite>
|
||||
""")
|
||||
|
||||
def test_error(self):
|
||||
"""Regression test: Check whether a test run with a erroneous test
|
||||
matches a previous run.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_foo(self):
|
||||
raise IndexError()
|
||||
self._try_test_run(TestTest, """<testsuite errors="1" failures="0" name="unittest.TestSuite" tests="1" time="0.000">
|
||||
<testcase classname="__main__.TestTest" name="test_foo" time="0.000">
|
||||
<error type="exceptions.IndexError">Foobar</error>
|
||||
</testcase>
|
||||
<system-out><![CDATA[]]></system-out>
|
||||
<system-err><![CDATA[]]></system-err>
|
||||
</testsuite>
|
||||
""")
|
||||
|
||||
def test_stdout_capture(self):
|
||||
"""Regression test: Check whether a test run with output to stdout
|
||||
matches a previous run.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_foo(self):
|
||||
sys.stdout.write("Test\n")
|
||||
self._try_test_run(TestTest, """<testsuite errors="0" failures="0" name="unittest.TestSuite" tests="1" time="0.000">
|
||||
<testcase classname="__main__.TestTest" name="test_foo" time="0.000"></testcase>
|
||||
<system-out><![CDATA[Test
|
||||
]]></system-out>
|
||||
<system-err><![CDATA[]]></system-err>
|
||||
</testsuite>
|
||||
""")
|
||||
|
||||
def test_stderr_capture(self):
|
||||
"""Regression test: Check whether a test run with output to stderr
|
||||
matches a previous run.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_foo(self):
|
||||
sys.stderr.write("Test\n")
|
||||
self._try_test_run(TestTest, """<testsuite errors="0" failures="0" name="unittest.TestSuite" tests="1" time="0.000">
|
||||
<testcase classname="__main__.TestTest" name="test_foo" time="0.000"></testcase>
|
||||
<system-out><![CDATA[]]></system-out>
|
||||
<system-err><![CDATA[Test
|
||||
]]></system-err>
|
||||
</testsuite>
|
||||
""")
|
||||
|
||||
class NullStream(object):
|
||||
"""A file-like object that discards everything written to it."""
|
||||
def write(self, buffer):
|
||||
pass
|
||||
|
||||
def test_unittests_changing_stdout(self):
|
||||
"""Check whether the XMLTestRunner recovers gracefully from unit tests
|
||||
that change stdout, but don't change it back properly.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_foo(self):
|
||||
sys.stdout = XMLTestRunnerTest.NullStream()
|
||||
|
||||
runner = XMLTestRunner(self._stream)
|
||||
runner.run(unittest.makeSuite(TestTest))
|
||||
|
||||
def test_unittests_changing_stderr(self):
|
||||
"""Check whether the XMLTestRunner recovers gracefully from unit tests
|
||||
that change stderr, but don't change it back properly.
|
||||
|
||||
"""
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_foo(self):
|
||||
sys.stderr = XMLTestRunnerTest.NullStream()
|
||||
|
||||
runner = XMLTestRunner(self._stream)
|
||||
runner.run(unittest.makeSuite(TestTest))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -1,158 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
import unittest
|
||||
|
||||
from syncserver.storage.multi import WeaveMultiStorage
|
||||
from syncserver.storage import WeaveStorage
|
||||
|
||||
WeaveStorage.register(WeaveMultiStorage)
|
||||
|
||||
|
||||
class Storage(object):
|
||||
|
||||
def __init__(self, param1, param2):
|
||||
self.param1 = param1
|
||||
self.param2 = param2
|
||||
self.items = {}
|
||||
|
||||
@classmethod
|
||||
def get_name(self):
|
||||
"""Returns the name of the storage"""
|
||||
return 'storage'
|
||||
|
||||
def user_exists(self, user_name):
|
||||
''
|
||||
return True
|
||||
|
||||
def set_user(self, user_email, **values):
|
||||
''
|
||||
|
||||
def get_user(self, user_name, fields=None):
|
||||
''
|
||||
|
||||
def delete_user(self, user_name):
|
||||
''
|
||||
|
||||
def delete_collection(self, user_name, collection_name):
|
||||
''
|
||||
|
||||
def collection_exists(self, user_name, collection_name):
|
||||
''
|
||||
|
||||
def set_collection(self, user_name, collection_name, **values):
|
||||
''
|
||||
|
||||
def get_collection(self, user_name, collection_name, fields=None):
|
||||
''
|
||||
|
||||
def get_collections(self, user_name, fields=None):
|
||||
''
|
||||
|
||||
def get_collection_names(self, user_name):
|
||||
''
|
||||
|
||||
def get_collection_timestamps(self, user_name):
|
||||
''
|
||||
|
||||
def get_collection_counts(self, user_name):
|
||||
''
|
||||
|
||||
def item_exists(self, user_name, collection_name, item_id):
|
||||
''
|
||||
|
||||
def get_items(self, user_name, collection_name, fields=None):
|
||||
''
|
||||
|
||||
def get_item(self, user_name, collection_name, item_id, fields=None):
|
||||
''
|
||||
|
||||
def set_item(self, user_name, collection_name, item_id, **values):
|
||||
''
|
||||
self.items[item_id] = values
|
||||
|
||||
def set_items(self, user_name, collection_name, items):
|
||||
''
|
||||
|
||||
def delete_item(self, user_name, collection_name, item_id):
|
||||
''
|
||||
|
||||
def delete_items(self, user_name, collection_name, item_ids=None):
|
||||
''
|
||||
|
||||
def get_total_size(self, user_id):
|
||||
''
|
||||
|
||||
def get_collection_sizes(self, user_id):
|
||||
''
|
||||
|
||||
def get_size_left(self, user_id):
|
||||
''
|
||||
|
||||
|
||||
class TestMultiStorage(unittest.TestCase):
|
||||
|
||||
def test_multiple_storages(self):
|
||||
WeaveStorage.register(Storage)
|
||||
|
||||
# Defining a master with two slaves, with the same backend
|
||||
config = {'storage': 'syncserver.storage.multi.WeaveMultiStorage',
|
||||
'storage.master': 'storage',
|
||||
'storage.master.param1': 'one',
|
||||
'storage.master.param2': 'two',
|
||||
'storage.slaves': 'storage1:storage,storage2:storage',
|
||||
'storage.storage1.param1': 'one',
|
||||
'storage.storage1.param2': 'two',
|
||||
'storage.storage2.param1': 'three',
|
||||
'storage.storage2.param2': 'four',
|
||||
}
|
||||
multi = WeaveStorage.get_from_config(config)
|
||||
|
||||
# trying a read
|
||||
self.assertTrue(multi.user_exists('tarek'))
|
||||
|
||||
# trying a write
|
||||
multi.set_item('xx', 'xx', '1', ok=1)
|
||||
for storage in [multi.master] + multi.slaves:
|
||||
self.assertEquals(storage.items, {'1': {'ok': 1}})
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(TestMultiStorage))
|
||||
return suite
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(defaultTest="test_suite")
|
|
@ -1,228 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
import unittest
|
||||
from collections import defaultdict
|
||||
|
||||
try:
|
||||
from syncserver.storage.redisql import RediSQLStorage
|
||||
from syncserver.storage import redisql
|
||||
REDIS = True
|
||||
except ImportError:
|
||||
REDIS = False
|
||||
from syncserver.storage import WeaveStorage
|
||||
|
||||
_UID = 1
|
||||
|
||||
# manual registration
|
||||
if REDIS:
|
||||
WeaveStorage.register(RediSQLStorage)
|
||||
|
||||
|
||||
class FakeRedis(dict):
|
||||
|
||||
def __init__(self, host, port):
|
||||
self.set_called = self.get_called = 0
|
||||
self.sets = defaultdict(list)
|
||||
|
||||
def ping(self):
|
||||
pass
|
||||
|
||||
def set(self, name, value):
|
||||
if value is None and name in self:
|
||||
del self[name]
|
||||
return
|
||||
self.set_called += 1
|
||||
self[name] = value
|
||||
|
||||
def get(self, name):
|
||||
self.get_called += 1
|
||||
try:
|
||||
return self[name]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def sadd(self, key, name):
|
||||
self.sets[key].append(name)
|
||||
|
||||
def sismember(self, key, name):
|
||||
return name in self.sets[key]
|
||||
|
||||
def srem(self, key, name):
|
||||
self.sets[key].remove(name)
|
||||
|
||||
def smembers(self, key):
|
||||
return [id_ for id_ in self.sets[key]]
|
||||
|
||||
if REDIS:
|
||||
class TestRediSQLStorage(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.old = redisql.GracefulRedisServer
|
||||
redisql.GracefulRedisServer = FakeRedis
|
||||
self.storage = WeaveStorage.get('redisql',
|
||||
sqluri='sqlite:///:memory:',
|
||||
use_quota=True,
|
||||
quota_size=5120)
|
||||
# make sure we have the standard collections in place
|
||||
for name in ('client', 'crypto', 'forms', 'history'):
|
||||
self.storage.set_collection(_UID, name)
|
||||
|
||||
def tearDown(self):
|
||||
self.storage.delete_user(_UID)
|
||||
redisql.GracefulRedisServer = self.old
|
||||
|
||||
def test_basic(self):
|
||||
# just make sure calls goes through
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'col1')
|
||||
self.storage.set_item(_UID, 'col1', '1', payload='XXX')
|
||||
|
||||
# these calls should be cached
|
||||
res = self.storage.get_item(_UID, 'col1', '1')
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
|
||||
# this should remove the cache
|
||||
self.storage.delete_items(_UID, 'col1')
|
||||
items = self.storage.get_items(_UID, 'col1')
|
||||
self.assertEquals(len(items), 0)
|
||||
|
||||
def test_meta_global(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'meta')
|
||||
self.storage.set_item(_UID, 'meta', 'global', payload='XXX')
|
||||
|
||||
# these calls should be cached
|
||||
res = self.storage.get_item(_UID, 'meta', 'global')
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
self.assertEquals(self.storage._conn.get_called, 2)
|
||||
self.assertEquals(self.storage._conn.set_called, 2)
|
||||
self.assertEquals(self.storage._conn.keys(), ['meta:global:1',
|
||||
'collections:stamp:1:meta'])
|
||||
|
||||
# this should remove the cache
|
||||
self.storage.delete_item(_UID, 'meta', 'global')
|
||||
self.assertEquals(self.storage._conn.keys(),
|
||||
['collections:stamp:1:meta'])
|
||||
|
||||
items = [{'id': 'global', 'payload': 'xxx'},
|
||||
{'id': 'other', 'payload': 'xxx'},
|
||||
]
|
||||
self.storage.set_items(_UID, 'meta', items)
|
||||
self.assertEquals(self.storage._conn.keys(), ['meta:global:1',
|
||||
'collections:stamp:1:meta'])
|
||||
|
||||
# this should remove the cache
|
||||
self.storage.delete_items(_UID, 'meta')
|
||||
items = self.storage.get_items(_UID, 'col')
|
||||
self.assertEquals(len(items), 0)
|
||||
self.assertEquals(self.storage._conn.keys(),
|
||||
['collections:stamp:1:meta'])
|
||||
|
||||
def test_tabs(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'tabs')
|
||||
self.storage.set_item(_UID, 'tabs', '1', payload='XXX')
|
||||
|
||||
# these calls should be cached
|
||||
res = self.storage.get_item(_UID, 'tabs', '1')
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
self.assertEquals(self.storage._conn.get_called, 1)
|
||||
self.assertEquals(self.storage._conn.set_called, 3)
|
||||
self.assertEquals(self.storage._conn.keys(), ['tabs:1:1',
|
||||
'tabs:size:1:1',
|
||||
'collections:stamp:1:tabs'])
|
||||
|
||||
# this should remove the cache
|
||||
self.storage.delete_item(_UID, 'tabs', '1')
|
||||
self.assertEquals(self.storage._conn.keys(),
|
||||
['collections:stamp:1:tabs'])
|
||||
|
||||
items = [{'id': '1', 'payload': 'xxx'},
|
||||
{'id': '2', 'payload': 'xxx'},
|
||||
]
|
||||
self.storage.set_items(_UID, 'tabs', items)
|
||||
keys = self.storage._conn.keys()
|
||||
keys.sort()
|
||||
self.assertEquals(keys, ['collections:stamp:1:tabs', 'tabs:1:1',
|
||||
'tabs:1:2', 'tabs:size:1:1',
|
||||
'tabs:size:1:2'])
|
||||
|
||||
# this should remove the cache
|
||||
self.storage.delete_items(_UID, 'tabs')
|
||||
items = self.storage.get_items(_UID, 'tabs')
|
||||
self.assertEquals(len(items), 0)
|
||||
self.assertEquals(self.storage._conn.keys(),
|
||||
['collections:stamp:1:tabs'])
|
||||
|
||||
def test_size(self):
|
||||
# make sure we get the right size
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'tabs')
|
||||
self.storage.set_collection(_UID, 'foo')
|
||||
self.storage.set_item(_UID, 'tabs', '1', payload='XXX' * 200)
|
||||
self.storage.set_item(_UID, 'foo', '1', payload='XXX' * 200)
|
||||
|
||||
wanted = (len('XXX' * 200) * 2) / 1024.
|
||||
self.assertEquals(self.storage.get_total_size(_UID), wanted)
|
||||
|
||||
def test_collection_stamps(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'tabs')
|
||||
self.storage.set_collection(_UID, 'foo')
|
||||
self.storage.set_item(_UID, 'tabs', '1', payload='XXX' * 200)
|
||||
self.storage.set_item(_UID, 'foo', '1', payload='XXX' * 200)
|
||||
|
||||
get = self.storage._conn.get_called
|
||||
set = self.storage._conn.set_called
|
||||
keys = self.storage._conn.keys()
|
||||
|
||||
stamps = self.storage.get_collection_timestamps(_UID) # pumping cache
|
||||
stamps2 = self.storage.get_collection_timestamps(_UID)
|
||||
self.assertEquals(len(stamps), len(stamps2))
|
||||
self.assertEquals(len(stamps), 6)
|
||||
self.assertEquals(self.storage._conn.get_called, get + 9)
|
||||
self.assertEquals(self.storage._conn.set_called, set + 7)
|
||||
self.assertEquals(len(self.storage._conn.keys()), len(keys) + 5)
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
if REDIS:
|
||||
suite.addTest(unittest.makeSuite(TestRediSQLStorage))
|
||||
return suite
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(defaultTest="test_suite")
|
|
@ -1,205 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
import unittest
|
||||
import os
|
||||
from syncserver.tests.support import initenv
|
||||
|
||||
_UID = 1
|
||||
|
||||
|
||||
class TestSQLStorage(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.appdir, self.config, self.storage, self.auth = initenv()
|
||||
# we don't support other storages for this test
|
||||
assert self.storage.sqluri.split(':/')[0] in ('mysql', 'sqlite')
|
||||
|
||||
self.sqlfile = self.storage.sqluri.split('sqlite:///')[-1]
|
||||
# make sure we have the standard collections in place
|
||||
for name in ('client', 'crypto', 'forms', 'history', 'key', 'meta',
|
||||
'bookmarks', 'prefs', 'tabs', 'passwords'):
|
||||
self.storage.set_collection(_UID, name)
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.sqlfile):
|
||||
os.remove(self.sqlfile)
|
||||
else:
|
||||
self.storage._engine.execute('truncate users')
|
||||
self.storage._engine.execute('truncate collections')
|
||||
self.storage._engine.execute('truncate wbo')
|
||||
|
||||
def test_user_exists(self):
|
||||
self.assertFalse(self.storage.user_exists(_UID))
|
||||
|
||||
def test_set_get_user(self):
|
||||
self.assertFalse(self.storage.user_exists(_UID))
|
||||
self.storage.set_user(_UID, username='tarek', email='tarek@ziade.org')
|
||||
self.assertTrue(self.storage.user_exists(_UID))
|
||||
self.storage.set_user(_UID, email='tarek2@ziade.org')
|
||||
res = self.storage.get_user(_UID, fields=['email'])
|
||||
self.assertEquals(res, (u'tarek2@ziade.org',))
|
||||
res = self.storage.get_user(_UID)
|
||||
self.assertEquals(res, (1, u'tarek', None, u'tarek2@ziade.org', 0,
|
||||
None, None, None))
|
||||
|
||||
def test_collections(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.assertFalse(self.storage.collection_exists(_UID, 'My collection'))
|
||||
self.storage.set_collection(_UID, 'My collection')
|
||||
self.assertTrue(self.storage.collection_exists(_UID, 'My collection'))
|
||||
|
||||
res = self.storage.get_collection(_UID, 'My collection').items()
|
||||
res.sort()
|
||||
wanted = [('collectionid', 11), ('name', u'My collection'),
|
||||
('userid', 1)]
|
||||
self.assertEquals(res, wanted)
|
||||
res = self.storage.get_collection(_UID, 'My collection',
|
||||
fields=['name'])
|
||||
self.assertEquals(res, {'name': 'My collection'})
|
||||
|
||||
res = self.storage.get_collections(_UID)
|
||||
self.assertEquals(len(res), 11)
|
||||
res = res[-1].items()
|
||||
res.sort()
|
||||
self.assertEquals(res, wanted)
|
||||
|
||||
res = self.storage.get_collections(_UID, fields=['name'])
|
||||
res = res[-1].items()
|
||||
res.sort()
|
||||
self.assertEquals(res[-1], ('name', 'My collection'))
|
||||
|
||||
# adding a new collection
|
||||
self.storage.set_collection(_UID, 'My collection 2')
|
||||
res = self.storage.get_collections(_UID)
|
||||
self.assertEquals(len(res), 12)
|
||||
|
||||
names = self.storage.get_collection_names(_UID)
|
||||
self.assertEquals(names[-2:], [(11, 'My collection'),
|
||||
(12, 'My collection 2')])
|
||||
|
||||
# removing a collection
|
||||
self.storage.delete_collection(_UID, 'My collection 2')
|
||||
res = self.storage.get_collections(_UID)
|
||||
self.assertEquals(len(res), 11)
|
||||
|
||||
# removing *all*
|
||||
self.storage.delete_storage(_UID)
|
||||
res = self.storage.get_collections(_UID)
|
||||
self.assertEquals(len(res), 0)
|
||||
self.storage.delete_user(_UID)
|
||||
self.assertFalse(self.storage.user_exists(_UID))
|
||||
|
||||
def test_items(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'col')
|
||||
self.assertFalse(self.storage.item_exists(_UID, 'col', 1))
|
||||
self.assertEquals(self.storage.get_items(_UID, 'col'), [])
|
||||
|
||||
self.storage.set_item(_UID, 'col', 1, payload='XXX')
|
||||
res = self.storage.get_item(_UID, 'col', 1)
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
|
||||
self.storage.set_item(_UID, 'col', 2, payload='XXX')
|
||||
|
||||
items = self.storage.get_items(_UID, 'col')
|
||||
self.assertEquals(len(items), 2)
|
||||
|
||||
self.storage.delete_item(_UID, 'col', 1)
|
||||
items = self.storage.get_items(_UID, 'col')
|
||||
self.assertEquals(len(items), 1)
|
||||
|
||||
self.storage.delete_items(_UID, 'col')
|
||||
items = self.storage.get_items(_UID, 'col')
|
||||
self.assertEquals(len(items), 0)
|
||||
|
||||
self.storage.set_items(_UID, 'col',
|
||||
items=[{'id': 'o', 'payload': 'XXX'}])
|
||||
res = self.storage.get_item(_UID, 'col', 'o')
|
||||
self.assertEquals(res['payload'], 'XXX')
|
||||
|
||||
def test_get_collection_timestamps(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'col1')
|
||||
self.storage.set_collection(_UID, 'col2')
|
||||
self.storage.set_item(_UID, 'col1', 1, payload='XXX')
|
||||
self.storage.set_item(_UID, 'col2', 1, payload='XXX')
|
||||
|
||||
timestamps = self.storage.get_collection_timestamps(_UID)
|
||||
names = timestamps.keys()
|
||||
self.assertTrue('col1' in names)
|
||||
self.assertTrue('col2' in names)
|
||||
col1 = self.storage.get_collection_max_timestamp(_UID, 'col2')
|
||||
self.assertAlmostEquals(col1, timestamps['col2'])
|
||||
|
||||
def test_storage_size(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'col1')
|
||||
|
||||
self.storage.set_item(_UID, 'col1', 1, payload='XXX' * 34)
|
||||
self.storage.set_item(_UID, 'col1', 1, payload='XXX' * 876)
|
||||
self.assertAlmostEquals(self.storage.get_total_size(_UID), 2.566, 3)
|
||||
|
||||
def test_ttl(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'col1')
|
||||
self.storage.set_item(_UID, 'col1', 1, payload='XXX' * 34)
|
||||
self.storage.set_item(_UID, 'col1', 2, payload='XXX' * 876, ttl=0)
|
||||
self.assertEquals(len(self.storage.get_items(_UID, 'col1')), 1)
|
||||
self.assertEquals(len(self.storage.get_items(_UID, 'col1',
|
||||
filters={'ttl': ('>', -1)})),
|
||||
2)
|
||||
|
||||
def test_dashed_ids(self):
|
||||
self.storage.set_user(_UID, email='tarek@ziade.org')
|
||||
self.storage.set_collection(_UID, 'col1')
|
||||
id1 = '{ec1b7457-003a-45a9-bf1c-c34e37225ad7}'
|
||||
id2 = '{339f52e1-deed-497c-837a-1ab25a655e37}'
|
||||
self.storage.set_item(_UID, 'col1', id1, payload='XXX' * 34)
|
||||
self.storage.set_item(_UID, 'col1', id2, payload='XXX' * 89)
|
||||
self.assertEquals(len(self.storage.get_items(_UID, 'col1')), 2)
|
||||
|
||||
# now trying to delete them
|
||||
self.storage.delete_items(_UID, 'col1', item_ids=[id1, id2])
|
||||
self.assertEquals(len(self.storage.get_items(_UID, 'col1')), 0)
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(TestSQLStorage))
|
||||
return suite
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(defaultTest="test_suite")
|
|
@ -1,132 +0,0 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Sync Server
|
||||
#
|
||||
# The Initial Developer of the Original Code is the Mozilla Foundation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2010
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Tarek Ziade (tarek@mozilla.com)
|
||||
#
|
||||
# Alternatively, the contents of this file may be used under the terms of
|
||||
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
# in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
# of those above. If you wish to allow use of your version of this file only
|
||||
# under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
# use your version of this file under the terms of the MPL, indicate your
|
||||
# decision by deleting the provisions above and replace them with the notice
|
||||
# and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
# the provisions above, a recipient may use your version of this file under
|
||||
# the terms of any one of the MPL, the GPL or the LGPL.
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
import unittest
|
||||
from syncserver.storage import WeaveStorage
|
||||
|
||||
|
||||
class IAmAValidStorage(object):
|
||||
|
||||
@classmethod
|
||||
def get_name(self):
|
||||
"""Returns the name of the storage"""
|
||||
return 'valid'
|
||||
|
||||
def user_exists(self, user_name):
|
||||
''
|
||||
|
||||
def set_user(self, user_email, **values):
|
||||
''
|
||||
|
||||
def get_user(self, user_name, fields=None):
|
||||
''
|
||||
|
||||
def delete_user(self, user_name):
|
||||
''
|
||||
|
||||
def delete_collection(self, user_name, collection_name):
|
||||
''
|
||||
|
||||
def collection_exists(self, user_name, collection_name):
|
||||
''
|
||||
|
||||
def set_collection(self, user_name, collection_name, **values):
|
||||
''
|
||||
|
||||
def get_collection(self, user_name, collection_name, fields=None):
|
||||
''
|
||||
|
||||
def get_collections(self, user_name, fields=None):
|
||||
''
|
||||
|
||||
def get_collection_names(self, user_name):
|
||||
''
|
||||
|
||||
def get_collection_timestamps(self, user_name):
|
||||
''
|
||||
|
||||
def get_collection_counts(self, user_name):
|
||||
''
|
||||
|
||||
def item_exists(self, user_name, collection_name, item_id):
|
||||
''
|
||||
|
||||
def get_items(self, user_name, collection_name, fields=None):
|
||||
''
|
||||
|
||||
def get_item(self, user_name, collection_name, item_id, fields=None):
|
||||
''
|
||||
|
||||
def set_item(self, user_name, collection_name, item_id, **values):
|
||||
''
|
||||
|
||||
def set_items(self, user_name, collection_name, item_id, items):
|
||||
''
|
||||
|
||||
def delete_item(self, user_name, collection_name, item_id):
|
||||
''
|
||||
|
||||
def delete_items(self, user_name, collection_name, item_ids=None):
|
||||
''
|
||||
|
||||
def get_total_size(self, user_id):
|
||||
''
|
||||
|
||||
def get_collection_sizes(self, user_id):
|
||||
''
|
||||
|
||||
def get_size_left(user_id):
|
||||
''
|
||||
|
||||
|
||||
class TestWeaveStorageBase(unittest.TestCase):
|
||||
|
||||
def test_register(self):
|
||||
|
||||
class NotAStorage(object):
|
||||
pass
|
||||
|
||||
self.assertRaises(TypeError, WeaveStorage.register, NotAStorage)
|
||||
WeaveStorage.register(IAmAValidStorage)
|
||||
self.assert_(isinstance(WeaveStorage.get('valid'), IAmAValidStorage))
|
||||
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(unittest.makeSuite(TestWeaveStorageBase))
|
||||
return suite
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(defaultTest="test_suite")
|
12
tests.ini
12
tests.ini
|
@ -7,7 +7,7 @@ host = 0.0.0.0
|
|||
port = 5000
|
||||
|
||||
[app:main]
|
||||
use = egg:SyncServer
|
||||
use = egg:SyncCore
|
||||
|
||||
provides_sync_apis = True
|
||||
provides_user_apis = True
|
||||
|
@ -17,15 +17,7 @@ captcha.public_key = 6Le8OLwSAAAAAK-wkjNPBtHD4Iv50moNFANIalJL
|
|||
captcha.private_key = 6Le8OLwSAAAAAEKoqfc-DmoF4HNswD7RNdGwxRij
|
||||
captcha.use_ssl = False
|
||||
|
||||
storage = syncserver.storage.sql.SQLStorage
|
||||
storage.sqluri = sqlite:///tests.db
|
||||
storage.standard_collections = False
|
||||
storage.use_quota = True
|
||||
storage.quota_size = 5120
|
||||
storage.pool_size = 100
|
||||
storage.pool_recycle = 100
|
||||
|
||||
auth = syncserver.auth.sql.SQLAuth
|
||||
auth = synccore.auth.sql.SQLAuth
|
||||
auth.sqluri = sqlite:///tests.db
|
||||
auth.pool_size = 100
|
||||
auth.pool_recycle = 100
|
||||
|
|
|
@ -7,7 +7,7 @@ host = 0.0.0.0
|
|||
port = 5000
|
||||
|
||||
[app:main]
|
||||
use = egg:SyncServer
|
||||
use = egg:SyncCore
|
||||
|
||||
provides_sync_apis = True
|
||||
provides_user_apis = True
|
||||
|
@ -17,15 +17,7 @@ captcha.public_key = 6Le8OLwSAAAAAK-wkjNPBtHD4Iv50moNFANIalJL
|
|||
captcha.private_key = 6Le8OLwSAAAAAEKoqfc-DmoF4HNswD7RNdGwxRij
|
||||
captcha.use_ssl = False
|
||||
|
||||
storage = syncserver.storage.sql.SQLStorage
|
||||
storage.sqluri = sqlite:///tests.db
|
||||
storage.standard_collections = False
|
||||
storage.use_quota = True
|
||||
storage.quota_size = 5120
|
||||
storage.pool_size = 100
|
||||
storage.pool_recycle = 100
|
||||
|
||||
auth = syncserver.auth.ldapsql.LDAPAuth
|
||||
auth = synccore.auth.ldapsql.LDAPAuth
|
||||
auth.ldapuri = ldap://localhost
|
||||
auth.sqluri = sqlite:///tests.db
|
||||
auth.use_tls = False
|
||||
|
|
|
@ -7,7 +7,7 @@ host = 0.0.0.0
|
|||
port = 5000
|
||||
|
||||
[app:main]
|
||||
use = egg:SyncServer
|
||||
use = egg:SyncCore
|
||||
|
||||
provides_sync_apis = True
|
||||
provides_user_apis = True
|
||||
|
@ -17,21 +17,12 @@ captcha.public_key = 6Le8OLwSAAAAAK-wkjNPBtHD4Iv50moNFANIalJL
|
|||
captcha.private_key = 6Le8OLwSAAAAAEKoqfc-DmoF4HNswD7RNdGwxRij
|
||||
captcha.use_ssl = False
|
||||
|
||||
se_captcha = True
|
||||
use_captcha = True
|
||||
captcha.public_key = 6Le8OLwSAAAAAK-wkjNPBtHD4Iv50moNFANIalJL
|
||||
captcha.private_key = 6Le8OLwSAAAAAEKoqfc-DmoF4HNswD7RNdGwxRij
|
||||
captcha.use_ssl = False
|
||||
|
||||
storage = syncserver.storage.sql.SQLStorage
|
||||
storage.sqluri = mysql://sync:sync@localhost/sync
|
||||
storage.standard_collections = False
|
||||
storage.use_quota = True
|
||||
storage.quota_size = 5120
|
||||
storage.pool_size = 100
|
||||
storage.pool_recycle = 3600
|
||||
storage.reset_on_return = True
|
||||
|
||||
auth = syncserver.auth.sql.SQLAuth
|
||||
auth = synccore.auth.sql.SQLAuth
|
||||
auth.sqluri = mysql://sync:sync@localhost/sync
|
||||
|
||||
smtp.host = localhost
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
[DEFAULT]
|
||||
debug = true
|
||||
|
||||
[server:main]
|
||||
use = egg:Paste#http
|
||||
host = 0.0.0.0
|
||||
port = 5000
|
||||
|
||||
[app:main]
|
||||
use = egg:SyncServer
|
||||
|
||||
provides_sync_apis = True
|
||||
provides_user_apis = True
|
||||
|
||||
use_captcha = True
|
||||
captcha.public_key = 6Le8OLwSAAAAAK-wkjNPBtHD4Iv50moNFANIalJL
|
||||
captcha.private_key = 6Le8OLwSAAAAAEKoqfc-DmoF4HNswD7RNdGwxRij
|
||||
captcha.use_ssl = False
|
||||
|
||||
storage = syncserver.storage.redisql.RediSQLStorage
|
||||
storage.sqluri = mysql://sync:sync@localhost/sync
|
||||
storage.standard_collections = False
|
||||
storage.use_quota = True
|
||||
storage.quota_size = 5120
|
||||
|
||||
auth = syncserver.auth.sql.SQLAuth
|
||||
auth.sqluri = mysql://sync:sync@localhost/sync
|
||||
|
||||
smtp.host = localhost
|
||||
smtp.port = 25
|
||||
smtp.sender = weave@mozilla.com
|
||||
|
||||
cef = True
|
||||
cef.file = syslog
|
||||
cef.vendor = mozilla
|
||||
cef.version = 0
|
||||
cef.device_version = 1.3
|
||||
cef.product = weave
|
Загрузка…
Ссылка в новой задаче