initial commit of bespin python server that is separate from the bespin
client code
This commit is contained in:
Коммит
3028cf0fce
|
@ -0,0 +1,4 @@
|
|||
syntax: glob
|
||||
|
||||
*.pyc
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
include setup.py
|
||||
include pavement.py
|
||||
recursive-include bespin *
|
|
@ -0,0 +1,42 @@
|
|||
Bespin Python Server
|
||||
====================
|
||||
|
||||
This program provides the server side functionality for Bespin. Though there
|
||||
is nothing Mac or Unix specific to the Bespin server, at the moment it
|
||||
has only been tested on Unix-like platforms.
|
||||
|
||||
Understanding the Code
|
||||
----------------------
|
||||
|
||||
The BespinServer is built entirely out of WSGI components (to see which
|
||||
packages are used, check out requirements.txt).
|
||||
|
||||
In development, the data is stored in an sqlite database (devdata.db).
|
||||
SQLAlchemy (http://www.sqlalchemy.org) manages the persistence of the
|
||||
data.
|
||||
|
||||
bespin/model.py contains the model objects and the "manager" objects that
|
||||
know how to store and retrieve them from the database for use by the web
|
||||
layer. These manager objects are inserted into the WSGI environment.
|
||||
|
||||
There is a very trivial "web framework" in bespin/framework.py. This provides
|
||||
a simple wrapper for:
|
||||
|
||||
1. Handling authentication as needed (which is most URLs)
|
||||
2. Providing Request and Response objects that are simpler to use than the
|
||||
standard WSGI environ, start_response parameters. These are just small
|
||||
subclasses of WebOb's Request and Response.
|
||||
3. Providing a decorator that expresses which URL a given function responds
|
||||
to (wrapping the behavior of urlrelay).
|
||||
|
||||
Authentication is handled via Paste's AuthTKTMiddleware, which puts
|
||||
an authentication token into a cookie.
|
||||
|
||||
bespin/controllers.py contains the functions that respond to the URLs. It
|
||||
also contains the make_app function, which knows how to construct the WSGI
|
||||
application that will appear on the web.
|
||||
|
||||
bespin/config.py knows how to configure the system based on "profiles"
|
||||
such as "test", "dev" and "prod".
|
||||
|
||||
bespin/tests contains the unit tests.
|
|
@ -0,0 +1,33 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
# BEGIN VERSION BLOCK
|
||||
VERSION = 'tip'
|
||||
VERSION_NAME = 'DEVELOPMENT MODE'
|
||||
API_VERSION = 'dev'
|
||||
# END VERSION BLOCK
|
|
@ -0,0 +1,65 @@
|
|||
"""Failed login tracking.
|
||||
|
||||
Keep track of the number of failed attempts to log in per user over a given time
|
||||
period. If there are too many failed login attempts during that period, the user
|
||||
will be locked out.
|
||||
"""
|
||||
|
||||
import time
|
||||
|
||||
class FailedLoginInfo(object):
|
||||
def __init__(self, username, can_log_in, failed_attempts):
|
||||
self.username = username
|
||||
self.can_log_in = can_log_in
|
||||
self.failed_attempts = failed_attempts
|
||||
|
||||
class DoNothingFailedLoginTracker(object):
|
||||
def can_log_in(self, username):
|
||||
"""Returns FailedLoginInfo. Check the return result.can_log_in to
|
||||
verify that the user is allowed to log in."""
|
||||
return FailedLoginInfo(username, True, 0)
|
||||
|
||||
def login_failed(self, fli):
|
||||
"""Pass in the FailedLoginInfo from can_log_in and a failed login
|
||||
attempt will be tracked."""
|
||||
pass
|
||||
|
||||
def login_successful(self, fli):
|
||||
"""Pass in the FailedLoginInfo from can_log_in and the successful
|
||||
login will be tracked."""
|
||||
pass
|
||||
|
||||
class MemoryFailedLoginTracker(object):
|
||||
"""Stores the information in memory. This is really only for development/testing
|
||||
purposes. You would not use this in production. The failed logins are not
|
||||
automatically expired."""
|
||||
|
||||
def __init__(self, number_of_attempts, lockout_period):
|
||||
self.number_of_attempts = number_of_attempts
|
||||
self.lockout_period = lockout_period
|
||||
self.store = {}
|
||||
|
||||
def can_log_in(self, username):
|
||||
now = time.time()
|
||||
current = self.store.get(username, [0, now])
|
||||
if now > current[1]:
|
||||
# reset if we've passed the time out
|
||||
current = [0, 0]
|
||||
del self.store[username]
|
||||
|
||||
if current[0] >= self.number_of_attempts:
|
||||
return FailedLoginInfo(username, False, current[0])
|
||||
return FailedLoginInfo(username, True, current[0])
|
||||
|
||||
def login_failed(self, fli):
|
||||
current = self.store.setdefault(fli.username, [0, 0])
|
||||
current[0] += 1
|
||||
current[1] = time.time() + self.lockout_period
|
||||
|
||||
def login_successful(self, fli):
|
||||
try:
|
||||
del self.store[fli.username]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
|
@ -0,0 +1,335 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import logging.handlers
|
||||
import ConfigParser
|
||||
import sys
|
||||
|
||||
import pkg_resources
|
||||
from path import path
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
|
||||
from bespin import stats, auth
|
||||
|
||||
class InvalidConfiguration(Exception):
|
||||
pass
|
||||
|
||||
class Bunch(dict):
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
return self[attr]
|
||||
except KeyError:
|
||||
raise AttributeError("%s not found" % attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
self[attr] = value
|
||||
|
||||
c = Bunch()
|
||||
c.dburl = None
|
||||
c.db_pool_size = 10
|
||||
c.db_pool_overflow = 10
|
||||
c.secret = "This is the phrase that is used for secret stuff."
|
||||
c.pw_secret = "This phrase encrypts passwords."
|
||||
c.static_dir = path(os.path.abspath("%s/../../../frontend" % os.path.dirname(__file__)))
|
||||
|
||||
c.template_file_dir = None
|
||||
|
||||
c.docs_dir = os.path.abspath("%s/../../../docs" % os.path.dirname(__file__))
|
||||
c.log_file = os.path.abspath("%s/../devserver.log" % os.path.dirname(__file__))
|
||||
c.default_quota = 15
|
||||
c.secure_cookie = True
|
||||
c.http_only_cookie = True
|
||||
c.template_path = [path(__file__).dirname().abspath()]
|
||||
|
||||
c.base_url = "https://bespin.mozilla.com/"
|
||||
|
||||
# Settings for sending email
|
||||
c.email_from = "invalid@ThisIsNotAValidEmailAddressUseSomethingElse.com"
|
||||
c.email_host = "localhost"
|
||||
c.email_port = 25
|
||||
|
||||
# additional mappings from top-level of URL to directory
|
||||
# in the config file, this can be provided as
|
||||
# static_map=foo=/path/to/files;bar=/path/to/other/files
|
||||
c.static_map = {}
|
||||
|
||||
# additionally, a directory can be specified as the tree of
|
||||
# "first resort". This directory will be checked for static
|
||||
# files first, and then the default Bespin static files will
|
||||
# be used. This is a simple way to override Bespin's static
|
||||
# resources without altering Bespin's sources.
|
||||
c.static_override = None
|
||||
|
||||
# turns on asynchronous running of long jobs (like vcs)
|
||||
c.async_jobs = True
|
||||
|
||||
# beanstalkd host and port
|
||||
c.queue_host = None
|
||||
c.queue_port = None
|
||||
|
||||
# holds the actual queue object
|
||||
c.queue = None
|
||||
|
||||
# timeout for VCS jobs. Default is 5 minutes, which seems plenty generous.
|
||||
# expressed in seconds
|
||||
c.vcs_timeout = 300
|
||||
|
||||
# stats type: none, memory, redis
|
||||
# memory just holds the stats in a dictionary
|
||||
# redis stores the stats in a redis server
|
||||
# http://code.google.com/p/redis/
|
||||
c.stats_type = "none"
|
||||
c.redis_host = None
|
||||
c.redis_port = None
|
||||
|
||||
# login failure tracking: none, memory, redis
|
||||
# memory holds the login failure attempts in a dictionary and should
|
||||
# not be used in production
|
||||
# redis holds the login failures in redis (using the same redis
|
||||
# as above for stats)
|
||||
c.login_failure_tracking = "none"
|
||||
|
||||
# number of attempts before a user is locked out
|
||||
c.login_attempts = 10
|
||||
|
||||
# how long a user is locked out (in seconds)
|
||||
c.lockout_period = 600
|
||||
|
||||
# The options for mobwrite_implementation are defined in controllers.py.
|
||||
# Currently: MobwriteInProcess, MobwriteTelnetProxy, or MobwriteHttpProxy
|
||||
c.mobwrite_implementation = "MobwriteHttpProxy"
|
||||
c.mobwrite_server_port = 3017
|
||||
c.mobwrite_server_address = "127.0.0.1"
|
||||
|
||||
# if this is true, the user's UUID will be used as their
|
||||
# user directory name. If it's false, their username will
|
||||
# be used. Generally, you'll only want this to be false
|
||||
# in development.
|
||||
c.use_uuid_as_dir_identifier = True
|
||||
|
||||
c.fslevels = 3
|
||||
|
||||
c.max_import_file_size = 20000000
|
||||
|
||||
c.log_requests_to_stdout = False
|
||||
c.log_to_stdout = False
|
||||
|
||||
# should Project and User names be restricted to a subset
|
||||
# of characters
|
||||
# (see bespin.model._check_identifiers)
|
||||
c.restrict_identifiers = True
|
||||
|
||||
# The set of users that are allowed to view the system stats
|
||||
# at /stats/. stats_type should either be
|
||||
# memory or redis for this to make any sense
|
||||
# this can either be a set in Python or
|
||||
# a comma separated string
|
||||
c.stats_users = set()
|
||||
|
||||
# a list of keys to display other than the base set
|
||||
c.stats_display = set()
|
||||
|
||||
# Locations that should be added to Dojo's module path for loading
|
||||
# client side code.
|
||||
# See http://www.dojotoolkit.org/book/dojo-book-0-9/part-3-programmatic-dijit-and-dojo/modules-and-namespaces/creating-your-own-modul
|
||||
c.dojo_module_path = {}
|
||||
|
||||
# Client side plugin modules that should be loaded automatically by the client.
|
||||
# Should be a list of dotted names
|
||||
c.javascript_plugins = []
|
||||
|
||||
# List of capabilities provided by the server. This is just a list of strings
|
||||
# to be interpreted by the client. This will adjust the user interface to
|
||||
# focus the user on the capabilities provided by this server.
|
||||
c.capabilities = set(["vcs", "collab"])
|
||||
|
||||
# Set this variable to point to the location of a Thunderhead
|
||||
# source directory and that will be used during development.
|
||||
c.th_src = None
|
||||
|
||||
c.using_dojo_source = False
|
||||
|
||||
def set_profile(profile):
|
||||
if profile == "test":
|
||||
# this import will install the bespin_test store
|
||||
c.dburl = "sqlite://"
|
||||
c.fsroot = os.path.abspath("%s/../testfiles"
|
||||
% os.path.dirname(__file__))
|
||||
c.async_jobs = False
|
||||
c.mobwrite_implementation = "MobwriteInProcess"
|
||||
c.fslevels = 0
|
||||
elif profile == "dev":
|
||||
c.dburl = "sqlite:///%s" % (os.path.abspath("devdata.db"))
|
||||
c.fsroot = os.path.abspath("%s/../../../devfiles"
|
||||
% os.path.dirname(__file__))
|
||||
root_log = logging.getLogger()
|
||||
root_log.setLevel(logging.DEBUG)
|
||||
|
||||
file_handler = logging.handlers.RotatingFileHandler(c.log_file)
|
||||
file_handler.setFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
|
||||
root_log.addHandler(file_handler)
|
||||
|
||||
paste_log = logging.getLogger("paste.httpserver.ThreadPool")
|
||||
paste_log.setLevel(logging.ERROR)
|
||||
|
||||
# turn off the secure cookie, because localhost connections
|
||||
# will be HTTP
|
||||
c.secure_cookie = False
|
||||
c.use_uuid_as_dir_identifier = False
|
||||
c.default_quota = 10000
|
||||
c.log_requests_to_stdout = True
|
||||
c.log_to_stdout = True
|
||||
c.mobwrite_implementation = "MobwriteInProcess"
|
||||
c.async_jobs = False
|
||||
c.fslevels = 0
|
||||
c.base_url = "http://localhost:8080/"
|
||||
c.email_host = None
|
||||
c.vcs_timeout = -1
|
||||
|
||||
def load_config(configfile):
|
||||
cp = ConfigParser.ConfigParser()
|
||||
cp.read(configfile)
|
||||
c.update(cp.items("config"))
|
||||
|
||||
def load_pyconfig(configfile):
|
||||
data = open(configfile).read()
|
||||
code = compile(data, configfile, "exec")
|
||||
exec(code)
|
||||
print(c.fsroot)
|
||||
|
||||
def activate_profile():
|
||||
for ep in pkg_resources.iter_entry_points("bespin_extensions"):
|
||||
ep.load()
|
||||
|
||||
if c.th_src:
|
||||
# in development, assume a Th directory above the bespin root
|
||||
c.static_map['js/thsrc'] = c.th_src
|
||||
|
||||
if isinstance(c.email_port, basestring):
|
||||
c.email_port = int(c.email_port)
|
||||
|
||||
if isinstance(c.static_map, basestring):
|
||||
static_map = {}
|
||||
mappings = c.static_map.split(";")
|
||||
for mapping in mappings:
|
||||
name, directory = mapping.split("=")
|
||||
static_map[name] = directory
|
||||
|
||||
engine_options = dict()
|
||||
|
||||
if not c.dburl.startswith("sqlite"):
|
||||
engine_options.update(pool_size=c.db_pool_size,
|
||||
max_overflow=c.db_pool_overflow)
|
||||
|
||||
# recycle connections for MySQL's benefit (by default, a MySQL
|
||||
# server will disconnect automatically after a time.)
|
||||
if c.dburl.startswith("mysql"):
|
||||
# set it to 4 hours. default MySQL drops connection after 8 hours.
|
||||
engine_options['pool_recycle'] = 14400
|
||||
|
||||
c.dbengine = create_engine(c.dburl, **engine_options)
|
||||
c.session_factory = scoped_session(sessionmaker(bind=c.dbengine))
|
||||
c.fsroot = path(c.fsroot)
|
||||
|
||||
c.static_dir = path(c.static_dir)
|
||||
|
||||
if not c.template_file_dir:
|
||||
c.template_file_dir = c.static_dir / "templates"
|
||||
|
||||
c.template_file_dir = path(c.template_file_dir)
|
||||
|
||||
if not c.fsroot.exists:
|
||||
c.fsroot.makedirs()
|
||||
|
||||
if c.async_jobs:
|
||||
if c.queue_port:
|
||||
c.queue_port = int(c.queue_port)
|
||||
|
||||
from bespin import queue
|
||||
c.queue = queue.BeanstalkQueue(c.queue_host, c.queue_port)
|
||||
|
||||
if c.redis_port:
|
||||
c.redis_port = int(c.redis_port)
|
||||
|
||||
if c.stats_type == "redis" or c.login_failure_tracking == "redis":
|
||||
from bespin import redis
|
||||
redis_client = redis.Redis(c.redis_host, c.redis_port)
|
||||
else:
|
||||
redis_client = None
|
||||
|
||||
if c.stats_type == "redis":
|
||||
if not redis_client:
|
||||
raise InvalidConfiguration("Stats is set to redis, but redis is not configured")
|
||||
c.stats = stats.RedisStats(redis_client)
|
||||
elif c.stats_type == "memory":
|
||||
c.stats = stats.MemoryStats()
|
||||
else:
|
||||
c.stats = stats.DoNothingStats()
|
||||
|
||||
if isinstance(c.stats_users, basestring):
|
||||
c.stats_users = set(c.stats_users.split(','))
|
||||
if isinstance(c.stats_display, basestring):
|
||||
c.stats_display = set(c.stats_display.split(','))
|
||||
|
||||
if c.login_attempts:
|
||||
c.login_attempts = int(c.login_attempts)
|
||||
|
||||
if c.lockout_period:
|
||||
c.lockout_period = int(c.lockout_period)
|
||||
|
||||
if c.login_failure_tracking == "redis":
|
||||
if not redis_client:
|
||||
raise InvalidConfiguration("Login failure tracking is set to redis, but redis is not configured")
|
||||
elif c.login_failure_tracking == "memory":
|
||||
c.login_tracker = auth.MemoryFailedLoginTracker(c.login_attempts,
|
||||
c.lockout_period)
|
||||
else:
|
||||
c.login_tracker = auth.DoNothingFailedLoginTracker()
|
||||
|
||||
if c.log_to_stdout:
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(logging.Formatter("%(relativeCreated)6d %(name)9s %(levelname)5s: %(message)s"))
|
||||
logging.getLogger().addHandler(stdout_handler)
|
||||
|
||||
def dev_spawning_factory(spawning_config):
|
||||
spawning_config['app_factory'] = spawning_config['args'][0]
|
||||
set_profile('dev')
|
||||
here = os.path.dirname(__file__)
|
||||
dbfile = os.path.abspath(os.path.join(here, "..", "devdata.db"))
|
||||
c.dburl = "sqlite:///%s" % (dbfile)
|
||||
activate_profile()
|
||||
return spawning_config
|
||||
|
||||
def dev_factory(config):
|
||||
from bespin.controllers import make_app
|
||||
return make_app()
|
||||
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,736 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
"""Data classes for working with files/projects/users."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from uuid import uuid4
|
||||
import simplejson
|
||||
from hashlib import sha256
|
||||
|
||||
from path import path as path_obj
|
||||
from pathutils import LockError as PULockError, Lock, LockFile
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import (Column, PickleType, String, Integer,
|
||||
Boolean, ForeignKey, Binary,
|
||||
DateTime, Text)
|
||||
from sqlalchemy.orm import relation
|
||||
from sqlalchemy.exc import DBAPIError
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
from bespin import config, filesystem
|
||||
from bespin.utils import _check_identifiers, BadValue
|
||||
from bespin.filesystem import get_project, Project, LockError
|
||||
|
||||
log = logging.getLogger("bespin.model")
|
||||
|
||||
class ConflictError(Exception):
|
||||
pass
|
||||
|
||||
def debug():
|
||||
for table in [ User, Group, GroupMembership ]:
|
||||
for found in _get_session().query(table).all():
|
||||
print found
|
||||
|
||||
def _get_session():
|
||||
return config.c.session_factory()
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class Connection(Base):
|
||||
__tablename__ = "connections"
|
||||
|
||||
followed_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'), primary_key=True)
|
||||
followed = relation('User', primaryjoin='User.id==Connection.followed_id')
|
||||
following_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'), primary_key=True)
|
||||
following = relation('User', primaryjoin='User.id==Connection.following_id')
|
||||
|
||||
followed_viewable = Column(Boolean, default=False)
|
||||
|
||||
class Message(Base):
|
||||
__tablename__ = "messages"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id", ondelete="cascade"))
|
||||
when = Column(DateTime, default=datetime.now)
|
||||
message = Column(Text)
|
||||
|
||||
def __str__(self):
|
||||
return "Message[id=%s, msg=%s]" % (self.id, self.message)
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
uuid = Column(String(36), unique=True)
|
||||
username = Column(String(128), unique=True)
|
||||
email = Column(String(128))
|
||||
password = Column(String(64))
|
||||
settings = Column(PickleType())
|
||||
quota = Column(Integer, default=10)
|
||||
amount_used = Column(Integer, default=0)
|
||||
file_location = Column(String(200))
|
||||
everyone_viewable = Column(Boolean, default=False)
|
||||
messages = relation(Message, order_by=Message.when, backref="user")
|
||||
|
||||
i_follow = relation(Connection,
|
||||
primaryjoin=Connection.following_id==id,
|
||||
secondary=Connection.__table__,
|
||||
secondaryjoin=id==Connection.followed_id)
|
||||
|
||||
following_me = relation(Connection,
|
||||
primaryjoin=Connection.followed_id==id,
|
||||
secondary=Connection.__table__,
|
||||
secondaryjoin=id==Connection.following_id)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def generate_password(password):
|
||||
password_hash = sha256()
|
||||
password_hash.update(config.c.pw_secret + password)
|
||||
return password_hash.hexdigest()
|
||||
|
||||
@classmethod
|
||||
def create_user(cls, username, password, email, override_location=None):
|
||||
"""Adds a new user with the given username and password.
|
||||
This raises a ConflictError is the user already
|
||||
exists."""
|
||||
_check_identifiers("Usernames", username)
|
||||
|
||||
log.debug("Creating user %s", username)
|
||||
password = User.generate_password(password)
|
||||
|
||||
user = cls(username, password, email)
|
||||
if override_location is not None:
|
||||
user.file_location = override_location
|
||||
_get_session().add(user)
|
||||
# flush to ensure that the user is unique
|
||||
try:
|
||||
_get_session().flush()
|
||||
except DBAPIError, e:
|
||||
raise ConflictError("Username %s is already in use" % username)
|
||||
|
||||
project = get_project(user, user, "SampleProject", create=True)
|
||||
project.install_template()
|
||||
config.c.stats.incr("users")
|
||||
return user
|
||||
|
||||
@classmethod
|
||||
def find_user(cls, username, password=None):
|
||||
"""Looks up a user by username. If password is provided, the password
|
||||
will be verified. Returns None if the user is not
|
||||
found or the password does not match."""
|
||||
user = _get_session().query(cls).filter_by(username=username).first()
|
||||
if user and password is not None:
|
||||
digest = User.generate_password(password)
|
||||
if str(user.password) != digest:
|
||||
user = None
|
||||
return user
|
||||
|
||||
@classmethod
|
||||
def find_by_email(cls, email):
|
||||
"""Looks up a user by email address."""
|
||||
users = _get_session().query(cls).filter_by(email=email).all()
|
||||
return users
|
||||
|
||||
def __init__(self, username, password, email):
|
||||
self.username = username
|
||||
self.email = email
|
||||
self.password = password
|
||||
self.settings = {}
|
||||
self.quota = config.c.default_quota
|
||||
self.uuid = str(uuid4())
|
||||
if config.c.use_uuid_as_dir_identifier:
|
||||
file_location = self.uuid
|
||||
else:
|
||||
file_location = username
|
||||
|
||||
if config.c.fslevels:
|
||||
levels = config.c.fslevels
|
||||
file_location = "/".join(file_location[:levels]) + "/" + file_location
|
||||
|
||||
self.file_location = file_location
|
||||
|
||||
def __str__(self):
|
||||
return "User[%s id=%s]" % (self.username, self.id)
|
||||
|
||||
def check_save(self, amount):
|
||||
"""Confirms that the user can save this amount. Returns True
|
||||
if the user has enough available in their quota, False otherwise.
|
||||
"""
|
||||
return (self.quota * filesystem.QUOTA_UNITS - self.amount_used - amount) > 0
|
||||
|
||||
def quota_info(self):
|
||||
"""Returns the tuple of quota and amount_used"""
|
||||
return (self.quota * filesystem.QUOTA_UNITS, self.amount_used)
|
||||
|
||||
def get_location(self):
|
||||
file_loc = self.file_location
|
||||
if file_loc.startswith("/"):
|
||||
location = path_obj(file_loc)
|
||||
else:
|
||||
location = config.c.fsroot / file_loc
|
||||
if not location.exists():
|
||||
location.makedirs()
|
||||
return location
|
||||
|
||||
@property
|
||||
def projects(self):
|
||||
location = self.get_location()
|
||||
result = [Project(self, name.basename(), location / name)
|
||||
for name in location.dirs()
|
||||
if not name.basename().startswith(".")]
|
||||
result = sorted(result, key=lambda item: item.name)
|
||||
return result
|
||||
|
||||
def get_all_projects(self, include_shared=False):
|
||||
"""Find all the projects that are accessible to the given user.
|
||||
See also user.projects, however this method also takes into account
|
||||
projects that have been shared by this users followees"""
|
||||
location = self.get_location()
|
||||
result = [Project(self, name.basename(), location / name)
|
||||
for name in location.dirs()
|
||||
if not name.basename().startswith(".")]
|
||||
result = sorted(result, key=lambda item: item.name)
|
||||
if include_shared:
|
||||
for followee_connection in self.users_i_follow():
|
||||
followee = followee_connection.followed
|
||||
for project in followee.projects:
|
||||
if followee.is_project_shared(project, self):
|
||||
result.append(project)
|
||||
return result
|
||||
|
||||
@property
|
||||
def statusfile(self):
|
||||
return self.get_location() / ".bespin-status.json"
|
||||
|
||||
def recompute_files(self):
|
||||
"""Recomputes how much space the user has used."""
|
||||
total = 0
|
||||
# add up all of the directory contents
|
||||
# by only looking at directories, we skip
|
||||
# over our metadata files
|
||||
for proj in self.projects:
|
||||
additional = proj.scan_files()
|
||||
total += additional
|
||||
self.amount_used = total
|
||||
|
||||
#def mark_opened(self, file_obj, mode):
|
||||
# """Keeps track of this file as being currently open by the
|
||||
# user with the mode provided."""
|
||||
# statusfile = self.statusfile
|
||||
# try:
|
||||
# lock = Lock(statusfile)
|
||||
# lock.lock()
|
||||
# if statusfile.exists():
|
||||
# statusinfo = statusfile.bytes()
|
||||
# statusinfo = simplejson.loads(statusinfo)
|
||||
# else:
|
||||
# statusinfo = dict()
|
||||
#
|
||||
# open_files = statusinfo.setdefault("open", {})
|
||||
# project_files = open_files.setdefault(file_obj.project.name, {})
|
||||
# project_files[file_obj.name] = {'mode' : mode}
|
||||
# statusfile.write_bytes(simplejson.dumps(statusinfo))
|
||||
# lock.unlock()
|
||||
# except PULockError, e:
|
||||
# raise LockError("Problem tracking open status for file %s: %s" %
|
||||
# (file_obj.name, str(e)))
|
||||
|
||||
def close(self, file_obj):
|
||||
"""Keeps track of this file as being currently closed by the
|
||||
user."""
|
||||
statusfile = self.statusfile
|
||||
if not statusfile.exists():
|
||||
return
|
||||
|
||||
try:
|
||||
lock = Lock(statusfile)
|
||||
lock.lock()
|
||||
if statusfile.exists():
|
||||
statusinfo = statusfile.bytes()
|
||||
statusinfo = simplejson.loads(statusinfo)
|
||||
else:
|
||||
statusinfo = dict()
|
||||
|
||||
open_files = statusinfo.setdefault("open", {})
|
||||
project_files = open_files.get(file_obj.project.name)
|
||||
if project_files is not None:
|
||||
try:
|
||||
del project_files[file_obj.name]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if not project_files:
|
||||
del open_files[file_obj.project.name]
|
||||
|
||||
statusfile.write_bytes(simplejson.dumps(statusinfo))
|
||||
|
||||
lock.unlock()
|
||||
except PULockError, e:
|
||||
raise LockError("Problem tracking open status for file %s: %s" %
|
||||
(file_obj.name, str(e)))
|
||||
|
||||
@property
|
||||
def files(self):
|
||||
"""Returns a dictionary of the form::
|
||||
|
||||
{'project' : {'path/to/file' : {'mode' : 'rw'}}}
|
||||
"""
|
||||
if not self.statusfile.exists():
|
||||
return {}
|
||||
try:
|
||||
statusfile = LockFile(self.statusfile)
|
||||
statusinfo = statusfile.read()
|
||||
statusfile.close()
|
||||
except PULockError, e:
|
||||
raise LockError("Problem reading open file status: %s", str(e))
|
||||
|
||||
statusinfo = simplejson.loads(statusinfo)
|
||||
return statusinfo.get("open", {})
|
||||
|
||||
def get_settings(self):
|
||||
"""Load a user's settings from BespinSettings/settings.
|
||||
Returns a dictionary."""
|
||||
location = self.get_location()
|
||||
settings_file = location / "BespinSettings" / "settings"
|
||||
if not settings_file.exists():
|
||||
return {}
|
||||
settings = {}
|
||||
for line in settings_file.lines(retain=False):
|
||||
info = line.split(" ", 1)
|
||||
if len(info) != 2:
|
||||
continue
|
||||
settings[info[0]] = info[1]
|
||||
return settings
|
||||
|
||||
def find_member(self, member):
|
||||
"""When a user refers to X, is this a reference to a user or a group or
|
||||
even the everyone setting"""
|
||||
if isinstance(member, User):
|
||||
return member
|
||||
if isinstance(member, Group):
|
||||
return member
|
||||
if isinstance(member, str):
|
||||
if member == 'everyone':
|
||||
return member
|
||||
else:
|
||||
group = self.get_group(member)
|
||||
if group != None:
|
||||
return group
|
||||
else:
|
||||
user = User.find_user(member)
|
||||
if user != None:
|
||||
return user
|
||||
raise BadValue("No groups or users found called '%s'" % (member))
|
||||
|
||||
def users_i_follow(self):
|
||||
"""Retrieve a list of the users that someone follows."""
|
||||
return _get_session().query(Connection).filter_by(following=self).all()
|
||||
|
||||
def users_following_me(self):
|
||||
"""Retrieve a list of the users that someone is following"""
|
||||
return _get_session().query(Connection).filter_by(followed=self).all()
|
||||
|
||||
def follow(self, followed_user):
|
||||
"""Add a follow connection between 2 users"""
|
||||
if (followed_user == self):
|
||||
raise ConflictError("You can't follow yourself")
|
||||
|
||||
following_user_name = self.username;
|
||||
followed_user_name = followed_user.username;
|
||||
_get_session().add(Connection(followed=followed_user, following=self))
|
||||
try:
|
||||
_get_session().flush()
|
||||
except DBAPIError:
|
||||
_get_session().rollback()
|
||||
raise ConflictError("%s is already following %s" % (following_user_name, followed_user_name))
|
||||
|
||||
def unfollow(self, followed_user):
|
||||
"""Remove a follow connection between 2 users"""
|
||||
following_user_name = self.username;
|
||||
followed_user_name = followed_user.username;
|
||||
rows = _get_session().query(Connection) \
|
||||
.filter_by(followed=followed_user) \
|
||||
.filter_by(following=self) \
|
||||
.delete()
|
||||
if rows == 0:
|
||||
raise ConflictError("%s is not following %s" % (following_user_name, followed_user_name))
|
||||
|
||||
def get_group(self, group_name, create_on_not_found=False, raise_on_not_found=False):
|
||||
"""Check to see if the given member name represents a group"""
|
||||
match = _get_session().query(Group) \
|
||||
.filter_by(owner_id=self.id) \
|
||||
.filter_by(name=group_name) \
|
||||
.first()
|
||||
|
||||
if match != None:
|
||||
return match
|
||||
|
||||
if create_on_not_found:
|
||||
return self.add_group(group_name)
|
||||
elif raise_on_not_found:
|
||||
raise ConflictError("%s does not have a group called '%s'" % (self.username, group_name))
|
||||
else:
|
||||
return None
|
||||
|
||||
def add_group(self, group_name):
|
||||
"""Create (and return) a new group for the given user, with the given name"""
|
||||
group = Group(self, group_name)
|
||||
_get_session().add(group)
|
||||
_get_session().flush()
|
||||
return group
|
||||
|
||||
def get_groups(self, with_member=None):
|
||||
"""Retrieve a list of the groups created by a given user."""
|
||||
query = _get_session().query(Group).filter_by(owner_id=self.id)
|
||||
if with_member != None:
|
||||
query = query.filter(GroupMembership.user_id==with_member.id) \
|
||||
.filter(Group.id==GroupMembership.group_id)
|
||||
return query.all()
|
||||
|
||||
def get_sharing(self, project=None, member=None):
|
||||
"""Retrieve a list of the shares (at all levels) made by a given user,
|
||||
optionally filtered by project and by invited member"""
|
||||
if member == None:
|
||||
return self._get_user_sharing(project) + \
|
||||
self._get_group_sharing(project) + \
|
||||
self._get_everyone_sharing(project)
|
||||
else:
|
||||
if member == 'everyone':
|
||||
# The user and group shares are irrelevant if we're only looking
|
||||
# at everyone sharing
|
||||
return self._get_everyone_sharing(project)
|
||||
else:
|
||||
if isinstance(member, Group):
|
||||
# The user shares are irrelevant if we're only looking at
|
||||
# group level sharing
|
||||
return self._get_group_sharing(project, member) + \
|
||||
self._get_everyone_sharing(project)
|
||||
else:
|
||||
return self._get_user_sharing(project, member) + \
|
||||
self._get_group_sharing(project, member) + \
|
||||
self._get_everyone_sharing(project)
|
||||
|
||||
def _get_user_sharing(self, project=None, invited_user=None):
|
||||
"""Retrieve a list of the user level shares made by a user, optionally
|
||||
filtered by project and by invited user"""
|
||||
query = _get_session().query(UserSharing).filter_by(owner_id=self.id)
|
||||
if project != None:
|
||||
query = query.filter_by(project_name=project.name)
|
||||
if invited_user != None:
|
||||
query = query.filter_by(invited_user_id=invited_user.id)
|
||||
return [self._create_share_record(self.username, 'user', sharing) for sharing in query.all()]
|
||||
|
||||
def _get_group_sharing(self, project=None, invited_group=None):
|
||||
"""Retrieve a list of the group level shares made by a user, optionally
|
||||
filtered by project and by invited group"""
|
||||
query = _get_session().query(GroupSharing).filter_by(owner_id=self.id)
|
||||
if project != None:
|
||||
query = query.filter_by(project_name=project.name)
|
||||
if invited_group != None:
|
||||
query = query.filter_by(invited_group_id=invited_group.id)
|
||||
return [self._create_share_record(self.username, 'group', sharing) for sharing in query.all()]
|
||||
|
||||
def _get_everyone_sharing(self, project=None):
|
||||
"""Retrieve a list of the public level shares made by a user, optionally
|
||||
filtered by project"""
|
||||
query = _get_session().query(EveryoneSharing).filter_by(owner_id=self.id)
|
||||
if project != None:
|
||||
query = query.filter_by(project_name=project.name)
|
||||
return [self._create_share_record(self.username, 'everyone', sharing) for sharing in query.all()]
|
||||
|
||||
def _create_share_record(self, owner_name, type, sharing):
|
||||
"""For internal use by the get_*_sharing methods"""
|
||||
return {
|
||||
'owner':owner_name,
|
||||
'project':sharing.project_name,
|
||||
'type':type,
|
||||
'recipient':sharing.invited_name,
|
||||
'edit':sharing.edit,
|
||||
'loadany':sharing.loadany
|
||||
}
|
||||
|
||||
def is_project_shared(self, project, user, require_write=False):
|
||||
if self._is_project_everyone_shared(project, require_write):
|
||||
return True
|
||||
if self._is_project_user_shared(project, user, require_write):
|
||||
return True
|
||||
groups = self.get_groups(user)
|
||||
for group in groups:
|
||||
if self._is_project_group_shared(project, group, require_write):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_project_user_shared(self, project, user, require_write=False):
|
||||
if isinstance(project, Project):
|
||||
project = project.name
|
||||
query = _get_session().query(UserSharing)
|
||||
query = query.filter_by(owner_id=self.id)
|
||||
query = query.filter_by(project_name=project)
|
||||
query = query.filter_by(invited_user_id=user.id)
|
||||
if require_write:
|
||||
query = query.filter_by(edit=True)
|
||||
return query.first() != None
|
||||
|
||||
def _is_project_group_shared(self, project, group, require_write=False):
|
||||
if isinstance(project, Project):
|
||||
project = project.name
|
||||
query = _get_session().query(GroupSharing)
|
||||
query = query.filter_by(owner_id=self.id)
|
||||
query = query.filter_by(project_name=project)
|
||||
query = query.filter_by(invited_group_id=group.id)
|
||||
if require_write:
|
||||
query = query.filter_by(edit=True)
|
||||
return query.first() != None
|
||||
|
||||
def _is_project_everyone_shared(self, project, require_write=False):
|
||||
if isinstance(project, Project):
|
||||
project = project.name
|
||||
query = _get_session().query(EveryoneSharing)
|
||||
query = query.filter_by(owner_id=self.id)
|
||||
query = query.filter_by(project_name=project)
|
||||
if require_write:
|
||||
query = query.filter_by(edit=True)
|
||||
return query.first() != None
|
||||
|
||||
def add_sharing(self, project, member, edit=False, loadany=False):
|
||||
if member == 'everyone':
|
||||
return self._add_everyone_sharing(project, edit, loadany)
|
||||
else:
|
||||
if isinstance(member, Group):
|
||||
return self._add_group_sharing(project, member, edit, loadany)
|
||||
else:
|
||||
return self._add_user_sharing(project, member, edit, loadany)
|
||||
|
||||
def _add_user_sharing(self, project, invited_user, edit=False, loadany=False):
|
||||
sharing = UserSharing(self, project.name, invited_user, edit, loadany)
|
||||
_get_session().add(sharing)
|
||||
return sharing
|
||||
|
||||
def _add_group_sharing(self, project, invited_group, edit=False, loadany=False):
|
||||
sharing = GroupSharing(self, project.name, invited_group, edit, loadany)
|
||||
_get_session().add(sharing)
|
||||
return sharing
|
||||
|
||||
def _add_everyone_sharing(self, project, edit=False, loadany=False):
|
||||
sharing = EveryoneSharing(self, project.name, edit, loadany)
|
||||
_get_session().add(sharing)
|
||||
return sharing
|
||||
|
||||
def remove_sharing(self, project, member=None):
|
||||
if member == None:
|
||||
rows = 0
|
||||
rows += self._remove_user_sharing(project)
|
||||
rows += self._remove_group_sharing(project)
|
||||
rows += self._remove_everyone_sharing(project)
|
||||
return rows
|
||||
else:
|
||||
if member == 'everyone':
|
||||
return self._remove_everyone_sharing(project)
|
||||
else:
|
||||
if isinstance(member, Group):
|
||||
return self._remove_group_sharing(project, member)
|
||||
else:
|
||||
return self._remove_user_sharing(project, member)
|
||||
|
||||
def _remove_user_sharing(self, project, invited_user=None):
|
||||
user_query = _get_session().query(UserSharing).filter_by(owner_id=self.id)
|
||||
if project != None:
|
||||
user_query = user_query.filter_by(project_name=project.name)
|
||||
if invited_user != None:
|
||||
user_query = user_query.filter_by(invited_user_id=invited_user.id)
|
||||
return user_query.delete()
|
||||
|
||||
def _remove_group_sharing(self, project, invited_group=None):
|
||||
group_query = _get_session().query(GroupSharing).filter_by(owner_id=self.id)
|
||||
if project != None:
|
||||
group_query = group_query.filter_by(project_name=project.name)
|
||||
if invited_group != None:
|
||||
group_query = group_query.filter_by(invited_group_id=invited_group.id)
|
||||
return group_query.delete()
|
||||
|
||||
def _remove_everyone_sharing(self, project):
|
||||
everyone_query = _get_session().query(EveryoneSharing).filter_by(owner_id=self.id)
|
||||
if project != None:
|
||||
everyone_query = everyone_query.filter_by(project_name=project.name)
|
||||
return everyone_query.delete()
|
||||
|
||||
def get_viewme(self, member=None):
|
||||
return [ "Not implemented", member ]
|
||||
|
||||
def set_viewme(self, member, value):
|
||||
return [ "Not implemented", member, value ]
|
||||
|
||||
def publish(self, message_obj):
|
||||
data = simplejson.dumps(message_obj)
|
||||
message = Message(user=self, message=data)
|
||||
self.messages.append(message)
|
||||
print(message)
|
||||
|
||||
def pop_messages(self):
|
||||
messages = []
|
||||
for message in self.messages:
|
||||
messages.append(message.message)
|
||||
_get_session().delete(message)
|
||||
return messages
|
||||
|
||||
class Group(Base):
|
||||
__tablename__ = "groups"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
name = Column(String(128))
|
||||
owner_viewable = Column(Boolean, default=False)
|
||||
|
||||
__table_args__ = (UniqueConstraint("owner_id", "name"), {})
|
||||
|
||||
def __init__(self, owner, name, owner_viewable=False):
|
||||
self.owner_id = owner.id
|
||||
self.name = name
|
||||
self.owner_viewable = owner_viewable
|
||||
|
||||
def __str__(self):
|
||||
return "Group[%s id=%s owner_id=%s]" % (self.name, self.id, self.owner_id)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a group (and all its members) from the owning users profile"""
|
||||
return _get_session().query(Group). \
|
||||
filter_by(id=self.id). \
|
||||
delete()
|
||||
|
||||
def get_members(self):
|
||||
"""Retrieve a list of the members of a given users group"""
|
||||
return _get_session().query(GroupMembership) \
|
||||
.filter_by(group_id=self.id) \
|
||||
.all()
|
||||
|
||||
def add_member(self, other_user):
|
||||
"""Add a member to a given users group."""
|
||||
if self.owner_id == other_user.id:
|
||||
raise ConflictError("You can't be a member of your own group")
|
||||
membership = GroupMembership(self, other_user)
|
||||
_get_session().add(membership)
|
||||
return membership
|
||||
|
||||
def remove_member(self, other_user):
|
||||
"""Remove a member from a given users group."""
|
||||
return _get_session().query(GroupMembership) \
|
||||
.filter_by(group_id=self.id) \
|
||||
.filter_by(user_id=other_user.id) \
|
||||
.delete()
|
||||
|
||||
def remove_all_members(self):
|
||||
"""Remove all the members of a given group"""
|
||||
return _get_session().query(GroupMembership) \
|
||||
.filter_by(group_id=self.id) \
|
||||
.delete()
|
||||
|
||||
class GroupMembership(Base):
|
||||
__tablename__ = "group_memberships"
|
||||
|
||||
group_id = Column(Integer, ForeignKey('groups.id', ondelete='cascade'), primary_key=True)
|
||||
group = relation(Group, primaryjoin=Group.id==group_id)
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'), primary_key=True)
|
||||
user = relation(User, primaryjoin=User.id==user_id)
|
||||
|
||||
def __init__(self, group, user):
|
||||
if group.id == None:
|
||||
raise BadValue("Null group.id for " + group.name)
|
||||
self.group_id = group.id
|
||||
self.user_id = user.id
|
||||
|
||||
def __str__(self):
|
||||
return "GroupMembership[group_id=%s, user_id=%s]" % (self.group_id, self.user_id)
|
||||
|
||||
class UserSharing(Base):
|
||||
__tablename__ = "user_sharing"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
project_name = Column(String(128))
|
||||
invited_user_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
invited = relation(User, primaryjoin=User.id==invited_user_id)
|
||||
edit = Column(Boolean, default=False)
|
||||
loadany = Column(Boolean, default=False)
|
||||
|
||||
__table_args__ = (UniqueConstraint("owner_id", "project_name", "invited_user_id"), {})
|
||||
|
||||
def __init__(self, owner, project_name, user, edit, loadany):
|
||||
self.owner_id = owner.id
|
||||
self.project_name = project_name
|
||||
self.invited_user_id = user.id
|
||||
#self.invited = user
|
||||
self.edit = edit
|
||||
self.loadany = loadany
|
||||
|
||||
@property
|
||||
def invited_name(self):
|
||||
return self.invited.username
|
||||
|
||||
class GroupSharing(Base):
|
||||
__tablename__ = "group_sharing"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
project_name = Column(String(128))
|
||||
invited_group_id = Column(Integer, ForeignKey('groups.id', ondelete='cascade'))
|
||||
invited = relation(Group, primaryjoin=Group.id==invited_group_id)
|
||||
edit = Column(Boolean, default=False)
|
||||
loadany = Column(Boolean, default=False)
|
||||
|
||||
__table_args__ = (UniqueConstraint("owner_id", "project_name", "invited_group_id"), {})
|
||||
|
||||
def __init__(self, owner, project_name, group, edit, loadany):
|
||||
self.owner_id = owner.id
|
||||
self.project_name = project_name
|
||||
self.invited_group_id = group.id
|
||||
#self.invited = group
|
||||
self.edit = edit
|
||||
self.loadany = loadany
|
||||
|
||||
@property
|
||||
def invited_name(self):
|
||||
return self.invited.name
|
||||
|
||||
class EveryoneSharing(Base):
|
||||
__tablename__ = "everyone_sharing"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
project_name = Column(String(128))
|
||||
edit = Column(Boolean, default=False)
|
||||
loadany = Column(Boolean, default=False)
|
||||
|
||||
__table_args__ = (UniqueConstraint("owner_id", "project_name"), {})
|
||||
|
||||
def __init__(self, owner, project_name, edit, loadany):
|
||||
self.owner_id = owner.id
|
||||
self.project_name = project_name
|
||||
self.edit = edit
|
||||
self.loadany = loadany
|
||||
|
||||
@property
|
||||
def invited_name(self):
|
||||
return 'everyone'
|
|
@ -0,0 +1,4 @@
|
|||
This is a database migration repository.
|
||||
|
||||
More information at
|
||||
http://code.google.com/p/sqlalchemy-migrate/
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
from migrate.versioning.shell import main
|
||||
|
||||
main(repository='bespin/db_versions')
|
|
@ -0,0 +1,20 @@
|
|||
[db_settings]
|
||||
# Used to identify which repository this database is versioned under.
|
||||
# You can use the name of your project.
|
||||
repository_id=Bespin
|
||||
|
||||
# The name of the database table used to track the schema version.
|
||||
# This name shouldn't already be used by your project.
|
||||
# If this is changed once a database is under version control, you'll need to
|
||||
# change the table name in each database too.
|
||||
version_table=migrate_version
|
||||
|
||||
# When committing a change script, Migrate will attempt to generate the
|
||||
# sql for all supported databases; normally, if one of them fails - probably
|
||||
# because you don't have that database installed - it is ignored and the
|
||||
# commit continues, perhaps ending successfully.
|
||||
# Databases in this list MUST compile successfully during a commit, or the
|
||||
# entire commit will fail. List the databases your application will actually
|
||||
# be using to ensure your updates to that database work properly.
|
||||
# This must be a list; example: ['postgres','sqlite']
|
||||
required_dbs=[]
|
|
@ -0,0 +1,71 @@
|
|||
from sqlalchemy import *
|
||||
from migrate import *
|
||||
from migrate.changeset import *
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relation, deferred, mapper, backref
|
||||
|
||||
Base = declarative_base()
|
||||
Base.metadata.bind = migrate_engine
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
username = Column(String(20), unique=True)
|
||||
email = Column(String(128))
|
||||
password = Column(String(20))
|
||||
settings = Column(PickleType())
|
||||
private_project = Column(String(50))
|
||||
projects = relation('Project', backref='owner')
|
||||
|
||||
class Project(Base):
|
||||
__tablename__ = "projects"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(60), unique=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id'))
|
||||
|
||||
class File(Base):
|
||||
__tablename__ = "files"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(700), unique=True)
|
||||
saved_size = Column(Integer)
|
||||
data = deferred(Column(Binary))
|
||||
edits = deferred(Column(PickleType))
|
||||
dir_id = Column(Integer, ForeignKey('directories.id'))
|
||||
dir = relation('Directory', backref="files")
|
||||
|
||||
|
||||
quota = Column('quota', Integer, default=10)
|
||||
amount_used = Column('amount_used', Integer, default=0)
|
||||
|
||||
user_table = User.__table__
|
||||
utc = user_table.c
|
||||
file_table = File.__table__
|
||||
ftc = file_table.c
|
||||
project_table = Project.__table__
|
||||
ptc = project_table.c
|
||||
|
||||
def upgrade():
|
||||
# Upgrade operations go here. Don't create your own engine; use the engine
|
||||
# named 'migrate_engine' imported from migrate.
|
||||
quota.create(user_table)
|
||||
amount_used.create(user_table)
|
||||
user_table.update().execute(quota=15)
|
||||
query = select([utc.id, func.sum(ftc.saved_size)])
|
||||
query = query.where(ptc.user_id==utc.id)
|
||||
query = query.where("files.name like projects.name || '/%'")
|
||||
query = query.group_by(utc.id)
|
||||
result = query.execute()
|
||||
for row in list(result):
|
||||
query = user_table.update().where(utc.id==row[0])
|
||||
query.execute(amount_used=row[1])
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Operations to reverse the above upgrade go here.
|
||||
# sqlite doesn't support this operation, sorry!
|
||||
quota.drop(user_table)
|
||||
amount_used.drop(user_table)
|
|
@ -0,0 +1,217 @@
|
|||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import *
|
||||
from migrate import *
|
||||
|
||||
from migrate.changeset import *
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relation, deferred, mapper, backref
|
||||
|
||||
Base = declarative_base()
|
||||
Base.metadata.bind = migrate_engine
|
||||
migrate_engine.echo = True
|
||||
|
||||
class File(Base):
|
||||
__tablename__ = "files"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(700), nullable=False)
|
||||
created = Column(DateTime, default=datetime.now)
|
||||
modified = Column(DateTime, onupdate=datetime.now)
|
||||
saved_size = Column(Integer)
|
||||
data = deferred(Column(Binary))
|
||||
edits = deferred(Column(PickleType))
|
||||
dir_id = Column(Integer, ForeignKey('directories.id'))
|
||||
dir = relation('Directory', backref="files")
|
||||
|
||||
class Directory(Base):
|
||||
__tablename__ = "directories"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(700), nullable=False)
|
||||
parent_id = Column(Integer, ForeignKey('directories.id'))
|
||||
subdirs = relation('Directory', backref=backref("parent",
|
||||
remote_side=[id]))
|
||||
|
||||
class Project(Base):
|
||||
__tablename__ = "projects"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(60), nullable=False)
|
||||
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
|
||||
|
||||
|
||||
file_table = File.__table__
|
||||
directory_table = Directory.__table__
|
||||
project_table = Project.__table__
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Upgrade operations go here. Don't create your own engine; use the engine
|
||||
# named 'migrate_engine' imported from migrate.
|
||||
if migrate_engine.name == "sqlite":
|
||||
import sys
|
||||
print "sqlite development databases are not supported by this migration"
|
||||
print "In other words, you need to recreate your development database."
|
||||
print "Sorry! To create your database, run"
|
||||
print "bin/paver create_db"
|
||||
sys.exit(1)
|
||||
|
||||
file_project_id = Column('project_id', Integer, ForeignKey('projects.id', ondelete='cascade'))
|
||||
directory_project_id = Column('project_id', Integer, ForeignKey('projects.id', ondelete='cascade'))
|
||||
|
||||
file_project_id.create(file_table)
|
||||
directory_project_id.create(directory_table)
|
||||
conn = migrate_engine.connect()
|
||||
conn.execute("ALTER TABLE files DROP INDEX name")
|
||||
conn.execute("ALTER TABLE directories DROP INDEX name")
|
||||
conn.execute("ALTER TABLE projects DROP INDEX name")
|
||||
conn.execute("ALTER TABLE files DROP FOREIGN KEY files_ibfk_1")
|
||||
conn.execute("""ALTER TABLE files
|
||||
ADD CONSTRAINT files_ibfk_1
|
||||
FOREIGN KEY (dir_id)
|
||||
REFERENCES directories(id)
|
||||
ON DELETE CASCADE
|
||||
""")
|
||||
conn.execute("ALTER TABLE directories DROP FOREIGN KEY directories_ibfk_1")
|
||||
conn.execute("""ALTER TABLE directories
|
||||
ADD CONSTRAINT directories_ibfk_1
|
||||
FOREIGN KEY (parent_id)
|
||||
REFERENCES directories(id)
|
||||
ON DELETE CASCADE
|
||||
""")
|
||||
conn.execute("ALTER TABLE filestatus DROP FOREIGN KEY filestatus_ibfk_1")
|
||||
conn.execute("""ALTER TABLE filestatus
|
||||
ADD CONSTRAINT filestatus_ibfk_1
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES users(id)
|
||||
ON DELETE CASCADE
|
||||
""")
|
||||
conn.execute("ALTER TABLE filestatus DROP FOREIGN KEY filestatus_ibfk_2")
|
||||
conn.execute("""ALTER TABLE filestatus
|
||||
ADD CONSTRAINT filestatus_ibfk_2
|
||||
FOREIGN KEY (file_id)
|
||||
REFERENCES files(id)
|
||||
ON DELETE CASCADE
|
||||
""")
|
||||
conn.execute("ALTER TABLE members DROP FOREIGN KEY members_ibfk_1")
|
||||
conn.execute("""ALTER TABLE members
|
||||
ADD CONSTRAINT members_ibfk_1
|
||||
FOREIGN KEY (project_id)
|
||||
REFERENCES projects(id)
|
||||
ON DELETE CASCADE
|
||||
""")
|
||||
conn.execute("ALTER TABLE members DROP FOREIGN KEY members_ibfk_2")
|
||||
conn.execute("""ALTER TABLE members
|
||||
ADD CONSTRAINT members_ibfk_2
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES users(id)
|
||||
ON DELETE CASCADE
|
||||
""")
|
||||
conn.execute("DELETE FROM projects WHERE user_id IS NULL OR user_id=0")
|
||||
|
||||
migrate_engine.echo = False
|
||||
transaction = conn.begin()
|
||||
result = select([func.count(file_table.c.id)]).execute().fetchall()
|
||||
num_files = result[0][0]
|
||||
try:
|
||||
current_project_name = None
|
||||
current_project_id = None
|
||||
delete_these = set()
|
||||
counter = 0
|
||||
for file_obj in select([file_table.c.id,
|
||||
file_table.c.name]).order_by(file_table.c.name).execute():
|
||||
counter += 1
|
||||
if counter % 500 == 0:
|
||||
print "%s out of %s (%5.2f%%)" % (counter, num_files,
|
||||
float(counter)*100/num_files)
|
||||
project_name, path = file_obj.name.split('/', 1)
|
||||
|
||||
if project_name != current_project_name:
|
||||
current_project_name = project_name
|
||||
result = select([project_table.c.id]) \
|
||||
.where(project_table.c.name==project_name).execute()
|
||||
try:
|
||||
result = iter(result).next()
|
||||
current_project_id = result.id
|
||||
except StopIteration:
|
||||
print "Project %s is bogus" % (project_name)
|
||||
current_project_id = None
|
||||
if current_project_id is None:
|
||||
delete_these.add(file_obj.id)
|
||||
continue
|
||||
query = file_table.update().where(file_table.c.id==file_obj.id) \
|
||||
.values(project_id=current_project_id, name=path).execute()
|
||||
|
||||
print "Deleting %s bad files" % (len(delete_these))
|
||||
for id in delete_these:
|
||||
file_table.delete().where(file_table.c.id==id).execute()
|
||||
|
||||
current_project_name = None
|
||||
current_project_id = None
|
||||
delete_these = set()
|
||||
result = select([func.count(directory_table.c.id)]).execute().fetchall()
|
||||
num_dirs = result[0][0]
|
||||
counter = 0
|
||||
for dir_obj in select([directory_table.c.id,
|
||||
directory_table.c.name]).order_by(directory_table.c.name).execute():
|
||||
counter += 1
|
||||
if counter % 500 == 0:
|
||||
print "%s out of %s (%5.2f%%)" % (counter, num_dirs,
|
||||
float(counter)*100/num_dirs)
|
||||
project_name, dir_name = dir_obj.name.split('/', 1)
|
||||
if project_name != current_project_name:
|
||||
current_project_name = project_name
|
||||
result = select([project_table.c.id]) \
|
||||
.where(project_table.c.name==project_name).execute()
|
||||
try:
|
||||
result = iter(result).next()
|
||||
current_project_id = result.id
|
||||
except StopIteration:
|
||||
print "Project %s is bogus" % (project_name)
|
||||
current_project_id = None
|
||||
if current_project_id is None:
|
||||
delete_these.add(dir_obj.id)
|
||||
continue
|
||||
query = directory_table.update().where(directory_table.c.id==dir_obj.id) \
|
||||
.values(project_id=current_project_id, name=dir_name).execute()
|
||||
|
||||
print "Deleting %s bad directories" % (len(delete_these))
|
||||
for id in delete_these:
|
||||
directory_table.update().where(directory_table.c.parent_id==id) \
|
||||
.values(parent_id=None).execute()
|
||||
directory_table.delete().where(directory_table.c.id==id).execute()
|
||||
transaction.commit()
|
||||
except:
|
||||
transaction.rollback()
|
||||
raise
|
||||
|
||||
migrate_engine.echo = True
|
||||
conn.execute("""ALTER TABLE projects DROP FOREIGN KEY projects_ibfk_1""")
|
||||
conn.execute("""ALTER TABLE projects
|
||||
ADD CONSTRAINT projects_ibfk_1
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES users(id)
|
||||
ON DELETE CASCADE
|
||||
""")
|
||||
conn.execute("""ALTER TABLE projects ADD INDEX name (name)""")
|
||||
conn.execute("""UPDATE projects, users SET projects.name='BespinSettings'
|
||||
WHERE projects.name=users.private_project""")
|
||||
conn.execute("ALTER TABLE users DROP COLUMN private_project")
|
||||
conn.execute("""ALTER TABLE files
|
||||
CHANGE name name VARCHAR(700) NOT NULL,
|
||||
CHANGE project_id project_id INTEGER NOT NULL,
|
||||
ADD UNIQUE (project_id, name)""")
|
||||
conn.execute("""ALTER TABLE directories
|
||||
CHANGE name name VARCHAR(700) NOT NULL,
|
||||
CHANGE project_id project_id INTEGER NOT NULL,
|
||||
ADD UNIQUE (project_id, name)""")
|
||||
conn.execute("""ALTER TABLE projects
|
||||
CHANGE name name VARCHAR(60) NOT NULL,
|
||||
CHANGE user_id user_id INTEGER NOT NULL,
|
||||
ADD UNIQUE (user_id, name)""")
|
||||
|
||||
def downgrade():
|
||||
# Operations to reverse the above upgrade go here.
|
||||
print "Downgrade is not available for this one. Sorry!"
|
|
@ -0,0 +1,59 @@
|
|||
import re
|
||||
import sys
|
||||
|
||||
from sqlalchemy import *
|
||||
from migrate import *
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relation, deferred, mapper, backref
|
||||
|
||||
Base = declarative_base()
|
||||
Base.metadata.bind = migrate_engine
|
||||
|
||||
bad_characters = "<>| '\""
|
||||
invalid_chars = re.compile(r'[%s]' % bad_characters)
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
username = Column(String(128), unique=True)
|
||||
email = Column(String(128))
|
||||
password = Column(String(20))
|
||||
settings = Column(PickleType())
|
||||
projects = relation('Project', backref='owner')
|
||||
quota = Column(Integer, default=10)
|
||||
amount_used = Column(Integer, default=0)
|
||||
|
||||
usertable = User.__table__
|
||||
|
||||
changed_names = dict()
|
||||
|
||||
def check_name(new_name):
|
||||
result = select([func.count('*')]).where(usertable.c.username==new_name).execute()
|
||||
row = result.fetchone()
|
||||
return row[0]
|
||||
|
||||
def upgrade():
|
||||
# Upgrade operations go here. Don't create your own engine; use the engine
|
||||
# named 'migrate_engine' imported from migrate.
|
||||
for row in select([usertable.c.username]).execute():
|
||||
name = row.username
|
||||
if invalid_chars.search(name):
|
||||
changed_names[name] = invalid_chars.sub("", name)
|
||||
for old_name, new_name in changed_names.items():
|
||||
if check_name(new_name):
|
||||
print "%s is in use for %s" % (new_name, old_name)
|
||||
new_name = invalid_chars.sub("-", old_name)
|
||||
changed_names[old_name] = new_name
|
||||
if check_name(new_name):
|
||||
print "EVEN WORSE: %s is in use for %s also" % (new_name, old_name)
|
||||
print "Can't continue"
|
||||
sys.exit(1)
|
||||
for old_name, new_name in changed_names.items():
|
||||
update(usertable).where(usertable.c.username==old_name).execute(username=new_name)
|
||||
|
||||
|
||||
def downgrade():
|
||||
for old_name, new_name in changed_names.items():
|
||||
update(usertable).where(usertable.c.username==new_name).execute(username=old_name)
|
|
@ -0,0 +1,172 @@
|
|||
import uuid
|
||||
import re
|
||||
from pprint import pprint
|
||||
|
||||
from sqlalchemy import *
|
||||
from migrate import *
|
||||
from path import path
|
||||
|
||||
from bespin.config import c
|
||||
|
||||
metadata = MetaData()
|
||||
metadata.bind = migrate_engine
|
||||
metadata.reflect()
|
||||
|
||||
files_table = metadata.tables['files']
|
||||
users_table = metadata.tables['users']
|
||||
projects_table = metadata.tables['projects']
|
||||
|
||||
class Directory(object):
|
||||
def __init__(self, project, name):
|
||||
if "../" in name:
|
||||
raise BadValue("Relative directories are not allowed")
|
||||
|
||||
# chop off any leading slashes
|
||||
while name and name.startswith("/"):
|
||||
name = name[1:]
|
||||
|
||||
if not name.endswith("/"):
|
||||
name += "/"
|
||||
self.name = name
|
||||
|
||||
self.location = project.location / name
|
||||
|
||||
@property
|
||||
def short_name(self):
|
||||
return self.name.parent.basename() + "/"
|
||||
|
||||
class File(object):
|
||||
def __init__(self, project, name):
|
||||
if "../" in name:
|
||||
raise BadValue("Relative directories are not allowed")
|
||||
|
||||
# chop off any leading slashes
|
||||
while name and name.startswith("/"):
|
||||
name = name[1:]
|
||||
|
||||
self.project = project
|
||||
self.name = name
|
||||
self.location = project.location / name
|
||||
self._info = None
|
||||
|
||||
def save(self, contents):
|
||||
file_obj = self.location.write_bytes(contents)
|
||||
|
||||
class Project(object):
|
||||
"""Provides access to the files in a project."""
|
||||
|
||||
def __init__(self, name, location):
|
||||
self.name = name
|
||||
self.location = location
|
||||
|
||||
def save_file(self, destpath, contents=None):
|
||||
"""Saves the contents to the file path provided, creating
|
||||
directories as needed in between. If last_edit is not provided,
|
||||
the file must not be opened for editing. Otherwise, the
|
||||
last_edit parameter should include the last edit ID received by
|
||||
the user."""
|
||||
if "../" in destpath:
|
||||
raise BadValue("Relative directories are not allowed")
|
||||
|
||||
# chop off any leading slashes
|
||||
while destpath and destpath.startswith("/"):
|
||||
destpath = destpath[1:]
|
||||
|
||||
file_loc = self.location / destpath
|
||||
|
||||
# this is the case where save_file is being used to
|
||||
# create a directory
|
||||
if contents is None:
|
||||
if destpath.endswith("/"):
|
||||
if file_loc.exists():
|
||||
if file_loc.isfile():
|
||||
raise FileConflict("Cannot create directory %s "
|
||||
"because there is already a file there."
|
||||
% destpath)
|
||||
else:
|
||||
file_loc.makedirs()
|
||||
return
|
||||
else:
|
||||
raise FSException("Cannot create %s because no content "
|
||||
" was provided for the file" % destpath)
|
||||
|
||||
if file_loc.isdir():
|
||||
raise FileConflict("Cannot save file at %s in project "
|
||||
"%s, because there is already a directory with that name."
|
||||
% (destpath, self.name))
|
||||
|
||||
file_dir = file_loc.dirname()
|
||||
if not file_dir.exists():
|
||||
file_dir.makedirs()
|
||||
|
||||
file = File(self, destpath)
|
||||
file.save(contents)
|
||||
return file
|
||||
|
||||
def upgrade():
|
||||
# Upgrade operations go here. Don't create your own engine; use the engine
|
||||
# named 'migrate_engine' imported from migrate.
|
||||
conn2 = migrate_engine.connect()
|
||||
conn2.execute("""ALTER TABLE users
|
||||
ADD COLUMN uuid VARCHAR(36) UNIQUE,
|
||||
ADD COLUMN file_location VARCHAR(200),
|
||||
ADD COLUMN everyone_viewable TINYINT(1) DEFAULT NULL
|
||||
""")
|
||||
data = users_table.select().execute()
|
||||
conn3 = migrate_engine.connect()
|
||||
conn4 = migrate_engine.connect()
|
||||
total = 0
|
||||
invalid = 0
|
||||
invalids = dict()
|
||||
files_written = 0
|
||||
for user in data:
|
||||
total += 1
|
||||
username = user.username
|
||||
|
||||
user_uuid = str(uuid.uuid4())
|
||||
user_location = user_uuid
|
||||
user_location = path("/".join(user_location[:4]) + "/" + user_location)
|
||||
# normally, we'd want to ensure that we use a prepared statement.
|
||||
# in this case, it's fine to just substitute in strings, because we
|
||||
# know exactly what these values look like
|
||||
conn2.execute("""UPDATE users SET uuid='%s', file_location='%s', username='%s' WHERE id=%s""" % (user_uuid, user_location, username, user.id))
|
||||
|
||||
projects = projects_table.select().where(projects_table.c.user_id==user.id).execute(bind=conn2)
|
||||
for project in projects:
|
||||
projectname = project.name
|
||||
if projectname.startswith("SampleProjectFor:"):
|
||||
projectname = "SampleProject"
|
||||
else:
|
||||
projectname = projectname
|
||||
|
||||
project_location = path(c.fsroot) / user_location / projectname
|
||||
project_obj = Project(projectname, project_location)
|
||||
|
||||
files = files_table.select().where(files_table.c.project_id==project.id).execute(bind=conn3)
|
||||
for file in files:
|
||||
# filter out a weird case where there's a file that has
|
||||
# no name but has data
|
||||
content = str(file.data)
|
||||
if not file.name or (file.name.endswith('/') and len(content) > 0) \
|
||||
or file.name.endswith('yourcommands.js/yourcommands.js') \
|
||||
or file.name.endswith("README/README"):
|
||||
continue
|
||||
project_obj.save_file(file.name, content)
|
||||
files_written += 1
|
||||
if not files_written % 500:
|
||||
print "Files written:", files_written
|
||||
|
||||
pprint(invalids)
|
||||
print "Users: Total:", total, " Invalid:", invalid
|
||||
|
||||
data = select([func.count(files_table.c.id)]).execute()
|
||||
print "Number of files:", list(data)
|
||||
|
||||
def downgrade():
|
||||
# Operations to reverse the above upgrade go here.
|
||||
conn = migrate_engine.connect()
|
||||
conn.execute("""ALTER TABLE USERS
|
||||
DROP COLUMN uuid,
|
||||
DROP COLUMN file_location,
|
||||
DROP COLUMN everyone_viewable""")
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import *
|
||||
from migrate import *
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import (Column, PickleType, String, Integer,
|
||||
Boolean, Binary, Table, ForeignKey,
|
||||
DateTime, func, UniqueConstraint, Text)
|
||||
from sqlalchemy.orm import relation, deferred, mapper, backref
|
||||
from sqlalchemy.exc import DBAPIError
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
metadata = MetaData()
|
||||
metadata.bind = migrate_engine
|
||||
Base = declarative_base(metadata=metadata)
|
||||
|
||||
class Connection(Base):
|
||||
__tablename__ = "connections"
|
||||
|
||||
followed_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'), primary_key=True)
|
||||
followed = relation('User', primaryjoin='User.id==Connection.followed_id')
|
||||
following_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'), primary_key=True)
|
||||
following = relation('User', primaryjoin='User.id==Connection.following_id')
|
||||
|
||||
followed_viewable = Column(Boolean, default=False)
|
||||
|
||||
class Message(Base):
|
||||
__tablename__ = "messages"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id", ondelete="cascade"))
|
||||
when = Column(DateTime, default=datetime.now)
|
||||
message = Column(Text)
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
uuid = Column(String(36), unique=True)
|
||||
username = Column(String(128), unique=True)
|
||||
email = Column(String(128))
|
||||
password = Column(String(20))
|
||||
settings = Column(PickleType())
|
||||
quota = Column(Integer, default=10)
|
||||
amount_used = Column(Integer, default=0)
|
||||
file_location = Column(String(200))
|
||||
everyone_viewable = Column(Boolean, default=False)
|
||||
messages = relation(Message, order_by=Message.when, backref="user")
|
||||
|
||||
i_follow = relation(Connection,
|
||||
primaryjoin=Connection.following_id==id,
|
||||
secondary=Connection.__table__,
|
||||
secondaryjoin=id==Connection.followed_id)
|
||||
|
||||
following_me = relation(Connection,
|
||||
primaryjoin=Connection.followed_id==id,
|
||||
secondary=Connection.__table__,
|
||||
secondaryjoin=id==Connection.following_id)
|
||||
|
||||
class Group(Base):
|
||||
__tablename__ = "groups"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
name = Column(String(128))
|
||||
owner_viewable = Column(Boolean, default=False)
|
||||
|
||||
__table_args__ = (UniqueConstraint("owner_id", "name"), {})
|
||||
|
||||
class GroupMembership(Base):
|
||||
__tablename__ = "group_memberships"
|
||||
|
||||
group_id = Column(Integer, ForeignKey('groups.id', ondelete='cascade'), primary_key=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'), primary_key=True)
|
||||
|
||||
class UserSharing(Base):
|
||||
__tablename__ = "user_sharing"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
project_name = Column(String(128))
|
||||
invited_user_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
invited_name = relation(User, primaryjoin=User.id==invited_user_id)
|
||||
edit = Column(Boolean, default=False)
|
||||
loadany = Column(Boolean, default=False)
|
||||
|
||||
__table_args__ = (UniqueConstraint("owner_id", "project_name", "invited_user_id"), {})
|
||||
|
||||
class GroupSharing(Base):
|
||||
__tablename__ = "group_sharing"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner_id = Column(Integer, ForeignKey('users.id', ondelete='cascade'))
|
||||
project_name = Column(String(128))
|
||||
invited_group_id = Column(Integer, ForeignKey('groups.id', ondelete='cascade'))
|
||||
invited_name = relation(Group, primaryjoin=Group.id==invited_group_id)
|
||||
edit = Column(Boolean, default=False)
|
||||
loadany = Column(Boolean, default=False)
|
||||
|
||||
__table_args__ = (UniqueConstraint("owner_id", "project_name", "invited_group_id"), {})
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Upgrade operations go here. Don't create your own engine; use the engine
|
||||
# named 'migrate_engine' imported from migrate.
|
||||
|
||||
# create_all will check for table existence first
|
||||
metadata.create_all()
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Operations to reverse the above upgrade go here.
|
||||
|
||||
Message.__table__.drop(bind=migrate_engine)
|
||||
Connection.__table__.drop(bind=migrate_engine)
|
||||
GroupMembership.__table__.drop(bind=migrate_engine)
|
||||
GroupSharing.__table__.drop(bind=migrate_engine)
|
||||
Group.__table__.drop(bind=migrate_engine)
|
||||
UserSharing.__table__.drop(bind=migrate_engine)
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
from hashlib import sha256
|
||||
|
||||
from sqlalchemy import *
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from migrate import *
|
||||
|
||||
from bespin.config import c
|
||||
|
||||
metadata = MetaData()
|
||||
metadata.bind = migrate_engine
|
||||
Base = declarative_base(metadata=metadata)
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
uuid = Column(String(36), unique=True)
|
||||
username = Column(String(128), unique=True)
|
||||
email = Column(String(128))
|
||||
password = Column(String(20))
|
||||
settings = Column(PickleType())
|
||||
quota = Column(Integer, default=10)
|
||||
amount_used = Column(Integer, default=0)
|
||||
file_location = Column(String(200))
|
||||
everyone_viewable = Column(Boolean, default=False)
|
||||
|
||||
pwinfo = dict()
|
||||
|
||||
def upgrade():
|
||||
# Upgrade operations go here. Don't create your own engine; use the engine
|
||||
# named 'migrate_engine' imported from migrate.
|
||||
user_table = User.__table__
|
||||
pwbackup = open("pwbackup", "w")
|
||||
for row in select([user_table.c.username, user_table.c.password]).execute():
|
||||
pwbackup.write("%s %s\n" % (row.username, row.password))
|
||||
pwinfo[row.username] = row.password
|
||||
pwbackup.close()
|
||||
|
||||
conn2 = migrate_engine.connect()
|
||||
if migrate_engine.name == "sqlite":
|
||||
conn2.execute("""
|
||||
CREATE TABLE users_temp (
|
||||
id INTEGER NOT NULL,
|
||||
uuid VARCHAR(36),
|
||||
username VARCHAR(128),
|
||||
email VARCHAR(128),
|
||||
password VARCHAR(64),
|
||||
settings BLOB,
|
||||
quota INTEGER,
|
||||
amount_used INTEGER,
|
||||
file_location VARCHAR(200),
|
||||
everyone_viewable BOOLEAN,
|
||||
PRIMARY KEY (id),
|
||||
UNIQUE (uuid),
|
||||
UNIQUE (username)
|
||||
);
|
||||
""")
|
||||
conn2.execute("""INSERT INTO users_temp (id, uuid, username,
|
||||
email, password, settings, quota, amount_used, file_location,
|
||||
everyone_viewable) SELECT id, uuid, username,
|
||||
email, password, settings, quota, amount_used, file_location,
|
||||
everyone_viewable FROM users;""")
|
||||
conn2.execute("""DROP TABLE USERS;""")
|
||||
conn2.execute("""ALTER TABLE users_temp RENAME TO users;""")
|
||||
else:
|
||||
conn2.execute("""ALTER TABLE users
|
||||
CHANGE password password VARCHAR(64)""")
|
||||
|
||||
count = 0
|
||||
for username, password in pwinfo.items():
|
||||
password_hash = sha256()
|
||||
password_hash.update(c.pw_secret + password)
|
||||
|
||||
update(user_table).where(user_table.c.username==username).execute(password=password_hash.hexdigest())
|
||||
count += 1
|
||||
if count % 500 == 0:
|
||||
print count
|
||||
|
||||
def downgrade():
|
||||
# Operations to reverse the above upgrade go here.
|
||||
user_table = User.__table__
|
||||
conn2 = migrate_engine.connect()
|
||||
if migrate_engine.name == "sqlite":
|
||||
conn2.execute("""
|
||||
CREATE TABLE users_temp (
|
||||
id INTEGER NOT NULL,
|
||||
uuid VARCHAR(36),
|
||||
username VARCHAR(128),
|
||||
email VARCHAR(128),
|
||||
password VARCHAR(32),
|
||||
settings BLOB,
|
||||
quota INTEGER,
|
||||
amount_used INTEGER,
|
||||
file_location VARCHAR(200),
|
||||
everyone_viewable BOOLEAN,
|
||||
PRIMARY KEY (id),
|
||||
UNIQUE (uuid),
|
||||
UNIQUE (username)
|
||||
);
|
||||
""")
|
||||
conn2.execute("""INSERT INTO users_temp (id, uuid, username,
|
||||
email, password, settings, quota, amount_used, file_location,
|
||||
everyone_viewable) SELECT id, uuid, username,
|
||||
email, password, settings, quota, amount_used, file_location,
|
||||
everyone_viewable FROM users;""")
|
||||
conn2.execute("""DROP TABLE USERS;""")
|
||||
conn2.execute("""ALTER TABLE users_temp RENAME TO users;""")
|
||||
else:
|
||||
conn2.execute("""ALTER TABLE users
|
||||
CHANGE password password VARCHAR(32)""")
|
||||
count = 0
|
||||
for username, password in pwinfo.items():
|
||||
update(user_table).where(user_table.c.username==username).execute(password=password)
|
||||
count += 1
|
||||
if count % 500 == 0:
|
||||
print count
|
||||
|
|
@ -0,0 +1,325 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import os
|
||||
import time
|
||||
from cStringIO import StringIO
|
||||
from urllib import quote
|
||||
from traceback import format_exc
|
||||
import logging
|
||||
|
||||
from simplejson import dumps, loads
|
||||
from omnisync.main import OmniSync
|
||||
from omnisync.configuration import Configuration
|
||||
|
||||
from bespin.vcs import KeyChain, TempSSHKeyFile
|
||||
from bespin import queue
|
||||
from bespin.filesystem import get_project
|
||||
from bespin.database import User, Message, _get_session
|
||||
|
||||
|
||||
log = logging.getLogger("bespin.deploy")
|
||||
|
||||
# Ideally, we would have a better "remote server" abstraction than this!
|
||||
class DeploymentKeyChain(KeyChain):
|
||||
"""A keychain with deployment-specific information."""
|
||||
|
||||
def set_ssh_for_project(self, project, username):
|
||||
"""Saves the fact that SSH keys are being used for
|
||||
deployment of this project. Returns the public key."""
|
||||
kcdata = self.kcdata
|
||||
pubkey = self.get_ssh_key()
|
||||
deployments = kcdata.setdefault("deploy", {})
|
||||
deployments[project.full_name] = dict(type="ssh",
|
||||
username=username)
|
||||
|
||||
self._save()
|
||||
|
||||
def set_credentials_for_project(self, project, username,
|
||||
password):
|
||||
"""Stores the username/password credentials for this project."""
|
||||
kcdata = self.kcdata
|
||||
deployments = kcdata.setdefault("deploy", {})
|
||||
deployments[project.full_name] = dict(type="password",
|
||||
username=username, password=password)
|
||||
|
||||
self._save()
|
||||
|
||||
def get_credentials_for_project(self, project):
|
||||
"""Returns a dictionary with the user's information for
|
||||
the given project. The dictionary will have 'type'
|
||||
with values 'ssh', or 'password'. If the type is ssh,
|
||||
there will be an ssh_key entry. If the type is password,
|
||||
there will be username and password entries. If there
|
||||
are no credentials stored for the given project,
|
||||
None is returned."""
|
||||
kcdata = self.kcdata
|
||||
deployments = kcdata.setdefault("deploy", {})
|
||||
|
||||
value = deployments.get(project.full_name)
|
||||
|
||||
if value is not None:
|
||||
# we're going to make a copy of the data so that it
|
||||
# doesn't get mutated against our wishes
|
||||
value = dict(value)
|
||||
|
||||
# for SSH, we need to change the SSH key name into the key itself.
|
||||
if value['type'] == "ssh":
|
||||
value['ssh_private_key'] = kcdata['ssh']['private']
|
||||
value['ssh_public_key'] = kcdata['ssh']['public']
|
||||
|
||||
return value
|
||||
|
||||
def delete_credentials_for_project(self, project):
|
||||
"""Forget the authentication information provided
|
||||
for the given project. Note that this will not
|
||||
remove any SSH keys used by the project."""
|
||||
kcdata = self.kcdata
|
||||
deployments = kcdata.setdefault("deploy", {})
|
||||
try:
|
||||
del deployments[project.full_name]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
self._save()
|
||||
|
||||
class ProjectDeploymentOptions(object):
|
||||
"""Manages the deployment options for a project."""
|
||||
|
||||
supported_types = set(['sftp'])
|
||||
|
||||
@classmethod
|
||||
def get(cls, project):
|
||||
"""Retrieve the deployment options for this project.
|
||||
Returns None if the options aren't set."""
|
||||
md = project.metadata
|
||||
info_json = md.get("deployment")
|
||||
if info_json is None:
|
||||
return None
|
||||
info_json = loads(info_json)
|
||||
# keyword argument names must be str objects, not unicode
|
||||
kw = dict((key.encode("ascii"), value)
|
||||
for key, value in info_json.items())
|
||||
return cls(project, **kw)
|
||||
|
||||
def __init__(self, project, remote_host, remote_directory, type):
|
||||
if type not in self.supported_types:
|
||||
raise InvalidConfiguration("Type must be one of %s" %
|
||||
(",".join(self.supported_types)))
|
||||
self.project = project
|
||||
self.remote_host = remote_host
|
||||
self.remote_directory = remote_directory
|
||||
self.type = type
|
||||
|
||||
def save(self):
|
||||
"""Save the options in the project metadata."""
|
||||
md = self.project.metadata
|
||||
info_dict = dict(remote_host = self.remote_host,
|
||||
remote_directory = self.remote_directory,
|
||||
type = self.type)
|
||||
md["deployment"] = dumps(info_dict)
|
||||
|
||||
# Deployment-specific Exceptions
|
||||
|
||||
class NotConfigured(Exception):
|
||||
pass
|
||||
|
||||
class InvalidConfiguration(Exception):
|
||||
pass
|
||||
|
||||
class OmniSyncExit(Exception):
|
||||
def __init__(self, return_code):
|
||||
super(OmniSyncExit, self).__init__()
|
||||
self.return_code = return_code
|
||||
|
||||
def deploy_error(qi, e):
|
||||
"""Handles errors that come up during deployment."""
|
||||
log.debug("Handling deploy error: %s", e)
|
||||
s = _get_session()
|
||||
user = qi.message['user']
|
||||
# if the user hadn't already been looked up, go ahead and pull
|
||||
# them out of the database
|
||||
if isinstance(user, basestring):
|
||||
user = User.find_user(user)
|
||||
else:
|
||||
s.add(user)
|
||||
|
||||
# if we didn't find the user in the database, there's not much
|
||||
# we can do.
|
||||
if user:
|
||||
# it looks like a programming error and we
|
||||
# want more information
|
||||
tb = format_exc()
|
||||
print "E:", tb
|
||||
message = dict(jobid=qi.id, output=dict(output=tb,
|
||||
error=True))
|
||||
message['asyncDone'] = True
|
||||
retval = Message(user_id=user.id, message=dumps(message))
|
||||
s.add(retval)
|
||||
|
||||
def run_deploy(user, project, kcpass, options):
|
||||
"""Add the deployment request to the worker queue."""
|
||||
pdo = ProjectDeploymentOptions.get(project)
|
||||
if not pdo:
|
||||
raise NotConfigured("Deployment is not yet configured.")
|
||||
user = user.username
|
||||
project = project.name
|
||||
job_body = dict(user=user, project=project, kcpass=kcpass,
|
||||
options=options)
|
||||
return queue.enqueue("vcs", job_body, execute="bespin.deploy:deploy_impl",
|
||||
error_handler="bespin.deploy:deploy_error",
|
||||
use_db=True)
|
||||
|
||||
def deploy_impl(qi):
|
||||
"""Executed via the worker queue to actually deploy the
|
||||
project."""
|
||||
message = qi.message
|
||||
kcpass = message['kcpass']
|
||||
options = _OptionHolder(message['options'])
|
||||
|
||||
s = _get_session()
|
||||
|
||||
user = User.find_user(message['user'])
|
||||
project = get_project(user, user, message['project'])
|
||||
pdo = ProjectDeploymentOptions.get(project)
|
||||
keychain = DeploymentKeyChain(user, kcpass)
|
||||
credentials = keychain.get_credentials_for_project(project)
|
||||
cwd = os.getcwd()
|
||||
|
||||
keyfile = None
|
||||
|
||||
options.username = credentials['username']
|
||||
|
||||
if credentials['type'] == 'ssh':
|
||||
keyfile = TempSSHKeyFile()
|
||||
keyfile.store(credentials['ssh_public_key'],
|
||||
credentials['ssh_private_key'])
|
||||
options.sshkey = keyfile.filename
|
||||
else:
|
||||
options.password = credentials['password']
|
||||
|
||||
desturl = "sftp://%s/%s" % (quote(pdo.remote_host, safe=""),
|
||||
quote(pdo.remote_directory))
|
||||
|
||||
try:
|
||||
os.chdir(project.location)
|
||||
log.debug("Computed destination URL: %s", desturl)
|
||||
log.debug("Running with options: %r", options)
|
||||
error, output = _launch_sync(qi.id, user.id, desturl, options)
|
||||
|
||||
# there's an extra layer around the output that is
|
||||
# expected by the client
|
||||
result = dict(output=dict(output=output, error=error))
|
||||
|
||||
result.update(dict(jobid=qi.id, asyncDone=True))
|
||||
retvalue = Message(user_id=user.id, message=dumps(result))
|
||||
s.add(retvalue)
|
||||
finally:
|
||||
if keyfile:
|
||||
keyfile.delete()
|
||||
os.chdir(cwd)
|
||||
|
||||
|
||||
|
||||
class _OptionHolder(object):
|
||||
"""Mimics the command line options for OmniSync."""
|
||||
verbosity = 1
|
||||
delete = False
|
||||
attributes = []
|
||||
dry_run = False
|
||||
update = False
|
||||
recursive = True
|
||||
exclude_files = []
|
||||
include_files = []
|
||||
exclude_dirs = r"\.svn|\.hg|\.git|\.bzr"
|
||||
include_dirs = []
|
||||
|
||||
# because Bespin is a shared environment, we cannot reasonably
|
||||
# set these values on the remote system
|
||||
exclude_attributes = set(["owner", "group", "perms"])
|
||||
|
||||
def __init__(self, opts):
|
||||
for key, value in opts.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def repr(self):
|
||||
return repr(self.__dict__)
|
||||
|
||||
class BespinOmniSync(OmniSync):
|
||||
def __init__(self, qid, user_id, *args, **kw):
|
||||
super(BespinOmniSync, self).__init__(*args, **kw)
|
||||
self.qid = qid
|
||||
self.user_id = user_id
|
||||
self.output_stream = StringIO()
|
||||
self.handler = logging.StreamHandler(self.output_stream)
|
||||
self.handler.setLevel(logging.INFO)
|
||||
self.handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
self.log = logging.getLogger("omnisync")
|
||||
self.log.setLevel(logging.INFO)
|
||||
self.log.addHandler(self.handler)
|
||||
self.last_display_time = 0
|
||||
|
||||
def file_done(self):
|
||||
super(BespinOmniSync, self).file_done()
|
||||
if time.time() - self.last_display_time > 5:
|
||||
s = _get_session()
|
||||
message_body = dict(jobid=self.qid, asyncDone=False,
|
||||
output="%s files and %s bytes copied" % (
|
||||
self.file_counter, self.bytes_total))
|
||||
message = Message(user_id = self.user_id,
|
||||
message=dumps(message_body))
|
||||
s.add(message)
|
||||
s.commit()
|
||||
|
||||
self.last_display_time = time.time()
|
||||
|
||||
def get_output(self):
|
||||
return self.output_stream.getvalue()
|
||||
|
||||
def cleanup(self):
|
||||
self.log.removeHandler(self.handler)
|
||||
self.output_stream = None
|
||||
|
||||
def report_file_progress(self, prog, bytes_done):
|
||||
pass
|
||||
|
||||
def exit(self, return_code):
|
||||
raise OmniSyncExit(return_code)
|
||||
|
||||
def _launch_sync(qid, user_id, desturl, options):
|
||||
omnisync = BespinOmniSync(qid, user_id)
|
||||
omnisync.config = Configuration(options)
|
||||
|
||||
try:
|
||||
omnisync.sync(".", desturl)
|
||||
error = False
|
||||
except OmniSyncExit, e:
|
||||
if e.return_code:
|
||||
error = True
|
||||
else:
|
||||
error = False
|
||||
|
||||
output = omnisync.get_output()
|
||||
omnisync.cleanup()
|
||||
return error, output
|
||||
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,163 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
from urlrelay import url
|
||||
from webob import Request, Response
|
||||
import logging
|
||||
|
||||
from bespin import filesystem, database, config
|
||||
from bespin.__init__ import API_VERSION
|
||||
from bespin.database import User
|
||||
|
||||
log = logging.getLogger("bespin.framework")
|
||||
|
||||
class BadRequest(Exception):
|
||||
pass
|
||||
|
||||
class BespinRequest(Request):
|
||||
"""Custom request object for Bespin.
|
||||
|
||||
Provides the user object and the username of the
|
||||
logged in user, among other features."""
|
||||
def __init__(self, environ):
|
||||
super(BespinRequest, self).__init__(environ)
|
||||
|
||||
if 'bespin.user' in environ:
|
||||
self._user = environ['bespin.user']
|
||||
else:
|
||||
self._user = None
|
||||
self.username = environ.get('REMOTE_USER')
|
||||
self.kwargs = environ.get('wsgiorg.routing_args')[1]
|
||||
self.session_token = environ.get("HTTP_X_DOMAIN_TOKEN")
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
if self._user:
|
||||
return self._user
|
||||
if self.username:
|
||||
self._user = User.find_user(self.username)
|
||||
return self._user
|
||||
return None
|
||||
|
||||
class BespinResponse(Response):
|
||||
def __init__(self, environ, start_request, **kw):
|
||||
super(BespinResponse, self).__init__(**kw)
|
||||
self.environ = environ
|
||||
self.start_request = start_request
|
||||
|
||||
def __call__(self):
|
||||
return super(BespinResponse, self).__call__(self.environ, self.start_request)
|
||||
|
||||
def error(self, status, e):
|
||||
self.status = status
|
||||
self.body = str(e)
|
||||
self.environ['bespin.docommit'] = False
|
||||
|
||||
def _add_base_headers(response):
|
||||
response.headers['X-Bespin-API'] = API_VERSION
|
||||
response.headers['Cache-Control'] = "no-store, no-cache, must-revalidate, post-check=0, pre-check=0, private"
|
||||
response.headers['Pragma'] = "no-cache"
|
||||
|
||||
def expose(url_pattern, method=None, auth=True, skip_token_check=False, profile=False):
|
||||
"""Expose this function to the world, matching the given URL pattern
|
||||
and, optionally, HTTP method. By default, the user is required to
|
||||
be authenticated. If auth is False, the user is not required to be
|
||||
authenticated."""
|
||||
def entangle(func):
|
||||
@url(url_pattern, method)
|
||||
def wrapped(environ, start_response):
|
||||
|
||||
# reply and action are somewhat nasty but needed to allow the
|
||||
# profiler to run code by a "action()" string. Why?
|
||||
reply = []
|
||||
def action():
|
||||
if auth and 'REMOTE_USER' not in environ:
|
||||
response = Response(status='401')
|
||||
_add_base_headers(response)
|
||||
reply.append(response(environ, start_response))
|
||||
return
|
||||
|
||||
config.c.stats.incr("requests_DATE")
|
||||
config.c.stats.incr("requests")
|
||||
|
||||
request = BespinRequest(environ)
|
||||
response = BespinResponse(environ, start_response)
|
||||
skip_test = environ.get("BespinTestApp")
|
||||
|
||||
if not skip_token_check and skip_test != "True":
|
||||
cookie_token = request.cookies.get("Domain-Token")
|
||||
header_token = environ.get("HTTP_X_DOMAIN_TOKEN")
|
||||
|
||||
if cookie_token is None or header_token != cookie_token:
|
||||
log.error("request.url=%s" % request.url)
|
||||
log.error("cookies[Domain-Token]=%s" % cookie_token)
|
||||
log.error("headers[X-Domain-Token]=%s" % header_token)
|
||||
log.error("WARNING: The anti CSRF attack trip wire just went off. This means an unprotected request has been made. This could be a hacking attempt, or incomplete protection. The request has NOT been halted")
|
||||
config.c.stats.incr("csrf_fail_DATE")
|
||||
|
||||
# Do we need to do this?
|
||||
user = request.user
|
||||
_add_base_headers(response)
|
||||
try:
|
||||
reply.append(func(request, response))
|
||||
return
|
||||
except filesystem.NotAuthorized, e:
|
||||
response.error("401 Not Authorized", e)
|
||||
except filesystem.FileNotFound, e:
|
||||
environ['bespin.good_url_but_not_found'] = True
|
||||
response.error("404 Not Found", e)
|
||||
except filesystem.FileConflict, e:
|
||||
response.error("409 Conflict", e)
|
||||
except database.ConflictError, e:
|
||||
response.error("409 Conflict", e)
|
||||
except filesystem.OverQuota, e:
|
||||
response.error("400 Bad Request", "Over quota")
|
||||
except filesystem.FSException, e:
|
||||
response.error("400 Bad Request", e)
|
||||
except filesystem.BadValue, e:
|
||||
response.error("400 Bad Request", e)
|
||||
except BadRequest, e:
|
||||
response.error("400 Bad Request", e)
|
||||
reply.append(response())
|
||||
return
|
||||
|
||||
if profile:
|
||||
# The output probably needs tuning for your needs
|
||||
import cProfile, pstats, StringIO
|
||||
prof = cProfile.Profile()
|
||||
prof = prof.runctx("action()", globals(), locals())
|
||||
stream = StringIO.StringIO()
|
||||
stats = pstats.Stats(prof, stream=stream)
|
||||
stats.sort_stats("time") # Or cumulative
|
||||
stats.print_stats(80) # 80 = how many to print
|
||||
# The rest is optional.
|
||||
stats.print_callees()
|
||||
stats.print_callers()
|
||||
log.info("Profile data:\n%s", stream.getvalue())
|
||||
else:
|
||||
action()
|
||||
|
||||
return reply.pop()
|
||||
|
||||
return entangle
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
meta: {{}}
|
||||
|
||||
bespin.plugins.loader.moduleLoaded("{{script_name}}", function(require,exports){ {{script}}
|
||||
return exports;
|
||||
});
|
|
@ -0,0 +1,762 @@
|
|||
#!/usr/bin/python -S
|
||||
#
|
||||
# Copyright (C) 2009 Andy Chu
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# $Id$
|
||||
|
||||
"""Python implementation of json-template.
|
||||
|
||||
JSON Template is a minimal and powerful templating language for transforming a
|
||||
JSON dictionary to arbitrary text.
|
||||
|
||||
To use this module, you will typically use the Template constructor, and catch
|
||||
various exceptions thrown. You may also want to use the FromFile/FromString
|
||||
methods, which allow Template constructor options to be embedded in the template
|
||||
string itself.
|
||||
|
||||
Other functions are exposed for tools which may want to process templates.
|
||||
"""
|
||||
|
||||
__author__ = 'Andy Chu'
|
||||
|
||||
|
||||
import cStringIO
|
||||
import pprint
|
||||
import re
|
||||
import sys
|
||||
|
||||
# For formatters
|
||||
import cgi # cgi.escape
|
||||
import urllib # for urllib.encode
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for all exceptions in this module.
|
||||
|
||||
Thus you can "except jsontemplate.Error: to catch all exceptions thrown by
|
||||
this module.
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
"""This helps people debug their templates.
|
||||
|
||||
If a variable isn't defined, then some context is shown in the traceback.
|
||||
TODO: Attach context for other errors.
|
||||
"""
|
||||
if hasattr(self, 'near'):
|
||||
return '%s\n\nNear: %s' % (self.args[0], pprint.pformat(self.near))
|
||||
else:
|
||||
return self.args[0]
|
||||
|
||||
|
||||
class CompilationError(Error):
|
||||
"""Base class for errors that happen during the compilation stage."""
|
||||
|
||||
|
||||
class EvaluationError(Error):
|
||||
"""Base class for errors that happen when expanding the template.
|
||||
|
||||
This class of errors generally involve the data dictionary or the execution of
|
||||
the formatters.
|
||||
"""
|
||||
def __init__(self, msg, original_exception=None):
|
||||
Error.__init__(self, msg)
|
||||
self.original_exception = original_exception
|
||||
|
||||
|
||||
class BadFormatter(CompilationError):
|
||||
"""A bad formatter was specified, e.g. {variable|BAD}"""
|
||||
|
||||
class MissingFormatter(CompilationError):
|
||||
"""
|
||||
Raised when formatters are required, and a variable is missing a formatter.
|
||||
"""
|
||||
|
||||
class ConfigurationError(CompilationError):
|
||||
"""
|
||||
Raised when the Template options are invalid and it can't even be compiled.
|
||||
"""
|
||||
|
||||
class TemplateSyntaxError(CompilationError):
|
||||
"""Syntax error in the template text."""
|
||||
|
||||
class UndefinedVariable(EvaluationError):
|
||||
"""The template contains a variable not defined by the data dictionary."""
|
||||
|
||||
|
||||
_SECTION_RE = re.compile(r'(repeated)?\s*(section)\s+(\S+)')
|
||||
|
||||
|
||||
class _ProgramBuilder(object):
|
||||
"""
|
||||
Receives method calls from the parser, and constructs a tree of _Section()
|
||||
instances.
|
||||
"""
|
||||
|
||||
def __init__(self, more_formatters):
|
||||
"""
|
||||
Args:
|
||||
more_formatters: A function which returns a function to apply to the
|
||||
value, given a format string. It can return None, in which case the
|
||||
_DEFAULT_FORMATTERS dictionary is consulted.
|
||||
"""
|
||||
self.current_block = _Section()
|
||||
self.stack = [self.current_block]
|
||||
self.more_formatters = more_formatters
|
||||
|
||||
def Append(self, statement):
|
||||
"""
|
||||
Args:
|
||||
statement: Append a literal
|
||||
"""
|
||||
self.current_block.Append(statement)
|
||||
|
||||
def _GetFormatter(self, format_str):
|
||||
"""
|
||||
The user's formatters are consulted first, then the default formatters.
|
||||
"""
|
||||
formatter = (
|
||||
self.more_formatters(format_str) or _DEFAULT_FORMATTERS.get(format_str))
|
||||
|
||||
if formatter:
|
||||
return formatter
|
||||
else:
|
||||
raise BadFormatter('%r is not a valid formatter' % format_str)
|
||||
|
||||
def AppendSubstitution(self, name, formatters):
|
||||
formatters = [self._GetFormatter(f) for f in formatters]
|
||||
self.current_block.Append((_DoSubstitute, (name, formatters)))
|
||||
|
||||
def NewSection(self, repeated, section_name):
|
||||
"""For sections or repeated sections."""
|
||||
|
||||
new_block = _Section(section_name)
|
||||
if repeated:
|
||||
func = _DoRepeatedSection
|
||||
else:
|
||||
func = _DoSection
|
||||
|
||||
self.current_block.Append((func, new_block))
|
||||
self.stack.append(new_block)
|
||||
self.current_block = new_block
|
||||
|
||||
def NewClause(self, name):
|
||||
# TODO: Raise errors if the clause isn't appropriate for the current block
|
||||
# isn't a 'repeated section' (e.g. alternates with in a non-repeated
|
||||
# section)
|
||||
self.current_block.NewClause(name)
|
||||
|
||||
def EndSection(self):
|
||||
self.stack.pop()
|
||||
self.current_block = self.stack[-1]
|
||||
|
||||
def Root(self):
|
||||
# It's assumed that we call this at the end of the program
|
||||
return self.current_block
|
||||
|
||||
|
||||
class _Section(object):
|
||||
"""Represents a (repeated) section."""
|
||||
|
||||
def __init__(self, section_name=None):
|
||||
"""
|
||||
Args:
|
||||
section_name: name given as an argument to the section
|
||||
"""
|
||||
self.section_name = section_name
|
||||
|
||||
# Pairs of func, args, or a literal string
|
||||
self.current_clause = []
|
||||
self.statements = {'default': self.current_clause}
|
||||
|
||||
def __repr__(self):
|
||||
return '<Block %s>' % self.section_name
|
||||
|
||||
def Statements(self, clause='default'):
|
||||
return self.statements.get(clause, [])
|
||||
|
||||
def NewClause(self, clause_name):
|
||||
new_clause = []
|
||||
self.statements[clause_name] = new_clause
|
||||
self.current_clause = new_clause
|
||||
|
||||
def Append(self, statement):
|
||||
"""Append a statement to this block."""
|
||||
self.current_clause.append(statement)
|
||||
|
||||
|
||||
class _ScopedContext(object):
|
||||
"""Allows scoped lookup of variables.
|
||||
|
||||
If the variable isn't in the current context, then we search up the stack.
|
||||
"""
|
||||
|
||||
def __init__(self, context):
|
||||
self.stack = [context]
|
||||
|
||||
def PushSection(self, name):
|
||||
new_context = self.stack[-1].get(name)
|
||||
self.stack.append(new_context)
|
||||
return new_context
|
||||
|
||||
def Pop(self):
|
||||
self.stack.pop()
|
||||
|
||||
def CursorValue(self):
|
||||
return self.stack[-1]
|
||||
|
||||
def __iter__(self):
|
||||
"""Assumes that the top of the stack is a list."""
|
||||
|
||||
# The top of the stack is always the current context.
|
||||
self.stack.append(None)
|
||||
for item in self.stack[-2]:
|
||||
self.stack[-1] = item
|
||||
yield item
|
||||
self.stack.pop()
|
||||
|
||||
def Lookup(self, name):
|
||||
"""
|
||||
Get the value associated with a name in the current context. The current
|
||||
context could be an dictionary in a list, or a dictionary outside a list.
|
||||
"""
|
||||
i = len(self.stack) - 1
|
||||
while 1:
|
||||
context = self.stack[i]
|
||||
|
||||
if not isinstance(context, dict): # Can't look up names in a list or atom
|
||||
i -= 1
|
||||
else:
|
||||
value = context.get(name)
|
||||
if value is None: # A key of None or a missing key are treated the same
|
||||
i -= 1
|
||||
else:
|
||||
return value
|
||||
|
||||
if i <= -1: # Couldn't find it anywhere
|
||||
raise UndefinedVariable('%r is not defined' % name)
|
||||
|
||||
|
||||
def _ToString(x):
|
||||
if type(x) in (str, unicode):
|
||||
return x
|
||||
else:
|
||||
return pprint.pformat(x)
|
||||
|
||||
|
||||
def _HtmlAttrValue(x):
|
||||
return cgi.escape(x, quote=True)
|
||||
|
||||
|
||||
# See http://google-ctemplate.googlecode.com/svn/trunk/doc/howto.html for more
|
||||
# escape types.
|
||||
#
|
||||
# Also, we might want to take a look at Django filters.
|
||||
#
|
||||
# This is a *public* constant, so that callers can use it construct their own
|
||||
# formatter lookup dictionaries, and pass them in to Template.
|
||||
_DEFAULT_FORMATTERS = {
|
||||
'html': cgi.escape,
|
||||
|
||||
# The 'htmltag' name is deprecated. The html-attr-value name is preferred
|
||||
# because it can be read with "as":
|
||||
# {url|html-attr-value} means:
|
||||
# "substitute 'url' as an HTML attribute value"
|
||||
'html-attr-value': _HtmlAttrValue,
|
||||
'htmltag': _HtmlAttrValue,
|
||||
|
||||
'raw': lambda x: x,
|
||||
# Used for the length of a list. Can be used for the size of a dictionary
|
||||
# too, though I haven't run into that use case.
|
||||
'size': lambda value: str(len(value)),
|
||||
|
||||
# The argument is a dictionary, and we get a a=1&b=2 string back.
|
||||
'url-params': urllib.urlencode,
|
||||
|
||||
# The argument is an atom, and it takes 'Search query?' -> 'Search+query%3F'
|
||||
'url-param-value': urllib.quote_plus, # param is an atom
|
||||
|
||||
# The default formatter, when no other default is specifier. For debugging,
|
||||
# this could be lambda x: json.dumps(x, indent=2), but here we want to be
|
||||
# compatible to Python 2.4.
|
||||
'str': _ToString,
|
||||
|
||||
# Just show a plain URL on an HTML page (without anchor text).
|
||||
'plain-url': lambda x: '<a href="%s">%s</a>' % (
|
||||
cgi.escape(x, quote=True), cgi.escape(x)),
|
||||
|
||||
# Placeholders for "standard names". We're not including them by default
|
||||
# since they require additional dependencies. We can provide a part of the
|
||||
# "lookup chain" in formatters.py for people people want the dependency.
|
||||
|
||||
# 'json' formats arbitrary data dictionary nodes as JSON strings. 'json'
|
||||
# and 'js-string' are identical (since a JavaScript string *is* JSON). The
|
||||
# latter is meant to be serve as extra documentation when you want a string
|
||||
# argument only, which is a common case.
|
||||
'json': None,
|
||||
'js-string': None,
|
||||
}
|
||||
|
||||
|
||||
def SplitMeta(meta):
|
||||
"""Split and validate metacharacters.
|
||||
|
||||
Example: '{}' -> ('{', '}')
|
||||
|
||||
This is public so the syntax highlighter and other tools can use it.
|
||||
"""
|
||||
n = len(meta)
|
||||
if n % 2 == 1:
|
||||
raise ConfigurationError(
|
||||
'%r has an odd number of metacharacters' % meta)
|
||||
return meta[:n/2], meta[n/2:]
|
||||
|
||||
|
||||
_token_re_cache = {}
|
||||
|
||||
def MakeTokenRegex(meta_left, meta_right):
|
||||
"""Return a (compiled) regular expression for tokenization.
|
||||
|
||||
Args:
|
||||
meta_left, meta_right: e.g. '{' and '}'
|
||||
|
||||
- The regular expressions are memoized.
|
||||
- This function is public so the syntax highlighter can use it.
|
||||
"""
|
||||
key = meta_left, meta_right
|
||||
if key not in _token_re_cache:
|
||||
# Need () for re.split
|
||||
_token_re_cache[key] = re.compile(
|
||||
r'(' +
|
||||
re.escape(meta_left) +
|
||||
# For simplicity, we allow all characters except newlines inside
|
||||
# metacharacters ({} / [])
|
||||
r'.+?' +
|
||||
re.escape(meta_right) +
|
||||
# Some declarations also include the newline at the end -- that is, we
|
||||
# don't expand the newline in that case
|
||||
r'\n?)')
|
||||
return _token_re_cache[key]
|
||||
|
||||
|
||||
def CompileTemplate(
|
||||
template_str, builder=None, meta='{}', format_char='|',
|
||||
more_formatters=lambda x: None, default_formatter='str'):
|
||||
"""Compile the template string, calling methods on the 'program builder'.
|
||||
|
||||
Args:
|
||||
template_str: The template string. It should not have any compilation
|
||||
options in the header -- those are parsed by FromString/FromFile
|
||||
builder: Something with the interface of _ProgramBuilder
|
||||
meta: The metacharacters to use
|
||||
more_formatters: A function which maps format strings to
|
||||
*other functions*. The resulting functions should take a data
|
||||
dictionary value (a JSON atom, or a dictionary itself), and return a
|
||||
string to be shown on the page. These are often used for HTML escaping,
|
||||
etc. There is a default set of formatters available if more_formatters
|
||||
is not passed.
|
||||
default_formatter: The formatter to use for substitutions that are missing a
|
||||
formatter. The 'str' formatter the "default default" -- it just tries
|
||||
to convert the context value to a string in some unspecified manner.
|
||||
|
||||
Returns:
|
||||
The compiled program (obtained from the builder)
|
||||
|
||||
Raises:
|
||||
The various subclasses of CompilationError. For example, if
|
||||
default_formatter=None, and a variable is missing a formatter, then
|
||||
MissingFormatter is raised.
|
||||
|
||||
This function is public so it can be used by other tools, e.g. a syntax
|
||||
checking tool run before submitting a template to source control.
|
||||
"""
|
||||
builder = builder or _ProgramBuilder(more_formatters)
|
||||
meta_left, meta_right = SplitMeta(meta)
|
||||
|
||||
# : is meant to look like Python 3000 formatting {foo:.3f}. According to
|
||||
# PEP 3101, that's also what .NET uses.
|
||||
# | is more readable, but, more importantly, reminiscent of pipes, which is
|
||||
# useful for multiple formatters, e.g. {name|js-string|html}
|
||||
if format_char not in (':', '|'):
|
||||
raise ConfigurationError(
|
||||
'Only format characters : and | are accepted (got %r)' % format_char)
|
||||
|
||||
# Need () for re.split
|
||||
token_re = MakeTokenRegex(meta_left, meta_right)
|
||||
tokens = token_re.split(template_str)
|
||||
|
||||
# If we go to -1, then we got too many {end}. If end at 1, then we're missing
|
||||
# an {end}.
|
||||
balance_counter = 0
|
||||
|
||||
for i, token in enumerate(tokens):
|
||||
|
||||
# By the definition of re.split, even tokens are literal strings, and odd
|
||||
# tokens are directives.
|
||||
if i % 2 == 0:
|
||||
# A literal string
|
||||
if token:
|
||||
builder.Append(token)
|
||||
|
||||
else:
|
||||
had_newline = False
|
||||
if token.endswith('\n'):
|
||||
token = token[:-1]
|
||||
had_newline = True
|
||||
|
||||
assert token.startswith(meta_left), token
|
||||
assert token.endswith(meta_right), token
|
||||
|
||||
token = token[len(meta_left) : -len(meta_right)]
|
||||
|
||||
# It's a comment
|
||||
if token.startswith('#'):
|
||||
continue
|
||||
|
||||
# It's a "keyword" directive
|
||||
if token.startswith('.'):
|
||||
token = token[1:]
|
||||
|
||||
literal = {
|
||||
'meta-left': meta_left,
|
||||
'meta-right': meta_right,
|
||||
'space': ' ',
|
||||
}.get(token)
|
||||
|
||||
if literal is not None:
|
||||
builder.Append(literal)
|
||||
continue
|
||||
|
||||
match = _SECTION_RE.match(token)
|
||||
|
||||
if match:
|
||||
repeated, _, section_name = match.groups()
|
||||
builder.NewSection(repeated, section_name)
|
||||
balance_counter += 1
|
||||
continue
|
||||
|
||||
if token in ('or', 'alternates with'):
|
||||
builder.NewClause(token)
|
||||
continue
|
||||
|
||||
if token == 'end':
|
||||
balance_counter -= 1
|
||||
if balance_counter < 0:
|
||||
# TODO: Show some context for errors
|
||||
raise TemplateSyntaxError(
|
||||
'Got too many %send%s statements. You may have mistyped an '
|
||||
"earlier 'section' or 'repeated section' directive."
|
||||
% (meta_left, meta_right))
|
||||
builder.EndSection()
|
||||
continue
|
||||
|
||||
# Now we know the directive is a substitution.
|
||||
parts = token.split(format_char)
|
||||
if len(parts) == 1:
|
||||
if default_formatter is None:
|
||||
raise MissingFormatter('This template requires explicit formatters.')
|
||||
# If no formatter is specified, the default is the 'str' formatter,
|
||||
# which the user can define however they desire.
|
||||
name = token
|
||||
formatters = [default_formatter]
|
||||
else:
|
||||
name = parts[0]
|
||||
formatters = parts[1:]
|
||||
|
||||
builder.AppendSubstitution(name, formatters)
|
||||
if had_newline:
|
||||
builder.Append('\n')
|
||||
|
||||
if balance_counter != 0:
|
||||
raise TemplateSyntaxError('Got too few %send%s statements' %
|
||||
(meta_left, meta_right))
|
||||
|
||||
return builder.Root()
|
||||
|
||||
|
||||
_OPTION_RE = re.compile(r'^([a-zA-Z\-]+):\s*(.*)')
|
||||
# TODO: whitespace mode, etc.
|
||||
_OPTION_NAMES = ['meta', 'format-char', 'default-formatter']
|
||||
|
||||
|
||||
def FromString(s, more_formatters=lambda x: None, _constructor=None):
|
||||
"""Like FromFile, but takes a string."""
|
||||
|
||||
f = cStringIO.StringIO(s)
|
||||
return FromFile(f, more_formatters=more_formatters, _constructor=_constructor)
|
||||
|
||||
|
||||
def FromFile(f, more_formatters=lambda x: None, _constructor=None):
|
||||
"""Parse a template from a file, using a simple file format.
|
||||
|
||||
This is useful when you want to include template options in a data file,
|
||||
rather than in the source code.
|
||||
|
||||
The format is similar to HTTP or E-mail headers. The first lines of the file
|
||||
can specify template options, such as the metacharacters to use. One blank
|
||||
line must separate the options from the template body.
|
||||
|
||||
Example:
|
||||
|
||||
default-formatter: none
|
||||
meta: {{}}
|
||||
format-char: :
|
||||
<blank line required>
|
||||
Template goes here: {{variable:html}}
|
||||
|
||||
Args:
|
||||
f: A file handle to read from. Caller is responsible for opening and
|
||||
closing it.
|
||||
"""
|
||||
_constructor = _constructor or Template
|
||||
|
||||
options = {}
|
||||
|
||||
# Parse lines until the first one that doesn't look like an option
|
||||
while 1:
|
||||
line = f.readline()
|
||||
match = _OPTION_RE.match(line)
|
||||
if match:
|
||||
name, value = match.group(1), match.group(2)
|
||||
|
||||
# Accept something like 'Default-Formatter: raw'. This syntax is like
|
||||
# HTTP/E-mail headers.
|
||||
name = name.lower()
|
||||
|
||||
if name in _OPTION_NAMES:
|
||||
name = name.replace('-', '_')
|
||||
value = value.strip()
|
||||
if name == 'default_formatter' and value.lower() == 'none':
|
||||
value = None
|
||||
options[name] = value
|
||||
else:
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
if options:
|
||||
if line.strip():
|
||||
raise CompilationError(
|
||||
'Must be one blank line between template options and body (got %r)'
|
||||
% line)
|
||||
body = f.read()
|
||||
else:
|
||||
# There were no options, so no blank line is necessary.
|
||||
body = line + f.read()
|
||||
|
||||
return _constructor(body, more_formatters=more_formatters, **options)
|
||||
|
||||
|
||||
class Template(object):
|
||||
"""Represents a compiled template.
|
||||
|
||||
Like many template systems, the template string is compiled into a program,
|
||||
and then it can be expanded any number of times. For example, in a web app,
|
||||
you can compile the templates once at server startup, and use the expand()
|
||||
method at request handling time. expand() uses the compiled representation.
|
||||
|
||||
There are various options for controlling parsing -- see CompileTemplate.
|
||||
Don't go crazy with metacharacters. {}, [], {{}} or <> should cover nearly
|
||||
any circumstance, e.g. generating HTML, CSS XML, JavaScript, C programs, text
|
||||
files, etc.
|
||||
"""
|
||||
|
||||
def __init__(self, template_str, builder=None, **compile_options):
|
||||
"""
|
||||
Args:
|
||||
template_str: The template string.
|
||||
|
||||
It also accepts all the compile options that CompileTemplate does.
|
||||
"""
|
||||
self._program = CompileTemplate(
|
||||
template_str, builder=builder, **compile_options)
|
||||
|
||||
#
|
||||
# Public API
|
||||
#
|
||||
|
||||
def render(self, data_dict, callback):
|
||||
"""Low level method to expands the template piece by piece.
|
||||
|
||||
Args:
|
||||
data_dict: The JSON data dictionary.
|
||||
callback: A callback which should be called with each expanded token.
|
||||
|
||||
Example: You can pass 'f.write' as the callback to write directly to a file
|
||||
handle.
|
||||
"""
|
||||
_Execute(self._program.Statements(), _ScopedContext(data_dict), callback)
|
||||
|
||||
def expand(self, *args, **kwargs):
|
||||
"""Expands the template with the given data dictionary, returning a string.
|
||||
|
||||
This is a small wrapper around render(), and is the most convenient
|
||||
interface.
|
||||
|
||||
Args:
|
||||
The JSON data dictionary. Like the builtin dict() constructor, it can
|
||||
take a single dictionary as a positional argument, or arbitrary keyword
|
||||
arguments.
|
||||
|
||||
Returns:
|
||||
The return value could be a str() or unicode() instance, depending on the
|
||||
the type of the template string passed in, and what the types the strings
|
||||
in the dictionary are.
|
||||
"""
|
||||
if args:
|
||||
if len(args) == 1:
|
||||
data_dict = args[0]
|
||||
else:
|
||||
raise TypeError(
|
||||
'expand() only takes 1 positional argument (got %s)' % args)
|
||||
else:
|
||||
data_dict = kwargs
|
||||
|
||||
tokens = []
|
||||
self.render(data_dict, tokens.append)
|
||||
return ''.join(tokens)
|
||||
|
||||
def tokenstream(self, data_dict):
|
||||
"""Yields a list of tokens resulting from expansion.
|
||||
|
||||
This may be useful for WSGI apps. NOTE: In the current implementation, the
|
||||
entire expanded template must be stored memory.
|
||||
|
||||
NOTE: This is a generator, but JavaScript doesn't have generators.
|
||||
"""
|
||||
tokens = []
|
||||
self.render(data_dict, tokens.append)
|
||||
for token in tokens:
|
||||
yield token
|
||||
|
||||
|
||||
def _DoRepeatedSection(args, context, callback):
|
||||
"""{repeated section foo}"""
|
||||
|
||||
block = args
|
||||
|
||||
if block.section_name == '@':
|
||||
# If the name is @, we stay in the enclosing context, but assume it's a
|
||||
# list, and repeat this block many times.
|
||||
items = context.CursorValue()
|
||||
if type(items) is not list:
|
||||
raise EvaluationError('Expected a list; got %s' % type(items))
|
||||
pushed = False
|
||||
else:
|
||||
items = context.PushSection(block.section_name)
|
||||
pushed = True
|
||||
|
||||
# TODO: what if items is a dictionary?
|
||||
|
||||
if items:
|
||||
last_index = len(items) - 1
|
||||
statements = block.Statements()
|
||||
alt_statements = block.Statements('alternates with')
|
||||
# NOTE: Iteration mutates the context!
|
||||
for i, _ in enumerate(context):
|
||||
# Execute the statements in the block for every item in the list. Execute
|
||||
# the alternate block on every iteration except the last.
|
||||
# Each item could be an atom (string, integer, etc.) or a dictionary.
|
||||
_Execute(statements, context, callback)
|
||||
if i != last_index:
|
||||
_Execute(alt_statements, context, callback)
|
||||
|
||||
else:
|
||||
_Execute(block.Statements('or'), context, callback)
|
||||
|
||||
if pushed:
|
||||
context.Pop()
|
||||
|
||||
|
||||
def _DoSection(args, context, callback):
|
||||
"""{section foo}"""
|
||||
|
||||
block = args
|
||||
# If a section isn't present in the dictionary, or is None, then don't show it
|
||||
# at all.
|
||||
if context.PushSection(block.section_name):
|
||||
_Execute(block.Statements(), context, callback)
|
||||
context.Pop()
|
||||
else: # Empty list, None, False, etc.
|
||||
context.Pop()
|
||||
_Execute(block.Statements('or'), context, callback)
|
||||
|
||||
|
||||
def _DoSubstitute(args, context, callback):
|
||||
"""Variable substitution, e.g. {foo}"""
|
||||
|
||||
name, formatters = args
|
||||
|
||||
# So we can have {.section is_new}new since {@}{.end}. Hopefully this idiom
|
||||
# is OK.
|
||||
if name == '@':
|
||||
value = context.CursorValue()
|
||||
else:
|
||||
try:
|
||||
value = context.Lookup(name)
|
||||
except TypeError, e:
|
||||
raise EvaluationError(
|
||||
'Error evaluating %r in context %r: %r' % (name, context, e))
|
||||
|
||||
for f in formatters:
|
||||
try:
|
||||
value = f(value)
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except Exception, e:
|
||||
raise EvaluationError(
|
||||
'Formatting value %r with formatter %s raised exception: %r' %
|
||||
(value, formatters, e), original_exception=e)
|
||||
|
||||
# TODO: Require a string/unicode instance here?
|
||||
if value is None:
|
||||
raise EvaluationError('Evaluating %r gave None value' % name)
|
||||
callback(value)
|
||||
|
||||
|
||||
def _Execute(statements, context, callback):
|
||||
"""Execute a bunch of template statements in a ScopedContext.
|
||||
|
||||
Args:
|
||||
callback: Strings are "written" to this callback function.
|
||||
|
||||
This is called in a mutually recursive fashion.
|
||||
"""
|
||||
|
||||
for i, statement in enumerate(statements):
|
||||
if isinstance(statement, basestring):
|
||||
callback(statement)
|
||||
else:
|
||||
# In the case of a substitution, args is a pair (name, formatter).
|
||||
# In the case of a section, it's a _Section instance.
|
||||
try:
|
||||
func, args = statement
|
||||
func(args, context, callback)
|
||||
except UndefinedVariable, e:
|
||||
# Show context for statements
|
||||
start = max(0, i-3)
|
||||
end = i+3
|
||||
e.near = statements[start:end]
|
||||
raise
|
||||
|
||||
|
||||
def expand(template_str, dictionary, **kwargs):
|
||||
"""Free function to expands a template string with a data dictionary.
|
||||
|
||||
This is useful for cases where you don't care about saving the result of
|
||||
compilation (similar to re.match('.*', s) vs DOT_STAR.match(s))
|
||||
"""
|
||||
t = Template(template_str, **kwargs)
|
||||
return t.expand(dictionary)
|
|
@ -0,0 +1,10 @@
|
|||
We've received a request to change the password for user
|
||||
{username} for the Bespin project server at {base_url}.
|
||||
|
||||
If you didn't make this request, please ignore this message and no
|
||||
change will take place.
|
||||
|
||||
To change your password, click the link below and you'll be given a chance
|
||||
to enter a new password.
|
||||
|
||||
{change_url}
|
|
@ -0,0 +1,11 @@
|
|||
We've received a request to retrieve your username for the Bespin project
|
||||
server at {base_url}.
|
||||
|
||||
If you didn't request this, please ignore this message.
|
||||
|
||||
Your username is:
|
||||
{.repeated section usernames}
|
||||
{username}
|
||||
{.end}
|
||||
|
||||
You can now go to {base_url} and login!
|
|
@ -0,0 +1,28 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,107 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
from bespin.database import User, get_project
|
||||
import logging
|
||||
|
||||
log = logging.getLogger("mobwrite.integrate")
|
||||
|
||||
|
||||
def get_username_from_handle(handle):
|
||||
"""The handle added by the user (in controllers.py) is of the form
|
||||
{User.username}:{ip address}, which is what is expected for reporting on
|
||||
collaborators, but error messages just want the username part"""
|
||||
(requester, sep, ipstr) = handle.partition(':')
|
||||
return requester
|
||||
|
||||
|
||||
class Access:
|
||||
"""Constants for use by Persister"""
|
||||
Denied = 1
|
||||
ReadOnly = 2
|
||||
ReadWrite = 3
|
||||
|
||||
|
||||
class Persister:
|
||||
"""A plug-in for mobwrite_daemon that diverts calls to Bespin"""
|
||||
|
||||
def load(self, name):
|
||||
"""Load a temporary file by extracting the project from the filename
|
||||
and calling project.get_temp_file"""
|
||||
try:
|
||||
(project, path) = self._split(name)
|
||||
log.debug("loading temp file for: %s/%s" % (project.name, path))
|
||||
bytes = project.get_temp_file(path)
|
||||
# mobwrite gets things into unicode by doing bytes.encode("utf-8")
|
||||
# which uses the 'strict' error handling technique, which raises
|
||||
# on failure. Since we're not tracking content-type on the server
|
||||
# we could have anything at this point so, and we don't want to die
|
||||
# so we fudge the issue by ignoring things that are not utf-8
|
||||
return bytes.decode("utf-8", "ignore")
|
||||
except:
|
||||
log.exception("Error in Persister.load() for name=%s", name)
|
||||
return ""
|
||||
|
||||
def save(self, name, contents):
|
||||
"""Load a temporary file by extracting the project from the filename
|
||||
and calling project.save_temp_file"""
|
||||
try:
|
||||
(project, path) = self._split(name)
|
||||
log.debug("saving to temp file for: %s/%s" % (project.name, path))
|
||||
project.save_temp_file(path, contents)
|
||||
except:
|
||||
log.exception("Error in Persister.save() for name=%s", name)
|
||||
|
||||
def check_access(self, name, handle):
|
||||
"""Check to see what level of access user has over an owner's project.
|
||||
Returns one of: Access.Denied, Access.ReadOnly or Access.ReadWrite
|
||||
Note that if user==owner then no check of project_name is performed, and
|
||||
Access.ReadWrite is returned straight away"""
|
||||
try:
|
||||
(user_name, project_name, path) = name.split("/", 2)
|
||||
|
||||
requester = get_username_from_handle(handle)
|
||||
user = User.find_user(requester)
|
||||
owner = User.find_user(user_name)
|
||||
|
||||
if user == owner:
|
||||
return Access.ReadWrite
|
||||
if user != owner:
|
||||
if owner.is_project_shared(project_name, user, require_write=True):
|
||||
return Access.ReadWrite
|
||||
if owner.is_project_shared(project_name, user, require_write=False):
|
||||
return Access.ReadOnly
|
||||
else:
|
||||
return Access.Denied
|
||||
except Error, e:
|
||||
log.exception("Error in Persister.check_access() for name=%s, handle=%s",
|
||||
name, handle)
|
||||
return Access.Denied
|
||||
|
||||
def _split(self, name):
|
||||
"""Cut a name into the username, projectname, path parts and lookup
|
||||
a project under the given user"""
|
||||
(user_name, project_name, path) = name.split("/", 2)
|
||||
user = User.find_user(user_name)
|
||||
project = get_project(user, user, project_name)
|
||||
return (project, path)
|
|
@ -0,0 +1,300 @@
|
|||
#!/usr/bin/python
|
||||
"""MobWrite - Real-time Synchronization and Collaboration Service
|
||||
|
||||
Copyright 2009 Google Inc.
|
||||
http://code.google.com/p/google-mobwrite/
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
"""Core functions for a MobWrite client/server in Python.
|
||||
"""
|
||||
|
||||
__author__ = "fraser@google.com (Neil Fraser)"
|
||||
|
||||
import datetime
|
||||
import diff_match_patch as dmp_module
|
||||
import logging
|
||||
import re
|
||||
import simplejson
|
||||
|
||||
# Global Diff/Match/Patch object.
|
||||
DMP = dmp_module.diff_match_patch()
|
||||
DMP.Diff_Timeout = 0.1
|
||||
|
||||
# Demo usage should limit the maximum size of any text.
|
||||
# Set to 0 to disable limit.
|
||||
MAX_CHARS = 0
|
||||
|
||||
# Delete any view which hasn't been accessed in half an hour.
|
||||
# Mozilla: Keeping views for longer than necessary wastes memory, and makes the
|
||||
# Collaborators view more likely to be out of date
|
||||
TIMEOUT_VIEW = datetime.timedelta(minutes=2)
|
||||
|
||||
# Delete any text which hasn't been accessed in a day.
|
||||
# TIMEOUT_TEXT should be longer than the length of TIMEOUT_VIEW
|
||||
TIMEOUT_TEXT = datetime.timedelta(days=1)
|
||||
|
||||
# Delete any buffer which hasn't been written to in a quarter of an hour.
|
||||
TIMEOUT_BUFFER = datetime.timedelta(minutes=15)
|
||||
|
||||
LOG = logging.getLogger("mobwrite")
|
||||
# Choose from: CRITICAL, ERROR, WARNING, INFO, DEBUG
|
||||
LOG.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class TextObj:
|
||||
# An object which stores a text.
|
||||
|
||||
# Object properties:
|
||||
# .name - The unique name for this text, e.g 'proposal'
|
||||
# .text - The text itself.
|
||||
# .changed - Has the text changed since the last time it was saved.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup this object
|
||||
self.name = kwargs.get("name")
|
||||
self.text = None
|
||||
self.changed = False
|
||||
|
||||
def setText(self, newtext):
|
||||
# Scrub the text before setting it.
|
||||
if newtext != None:
|
||||
# Keep the text within the length limit.
|
||||
if MAX_CHARS != 0 and len(newtext) > MAX_CHARS:
|
||||
newtext = newtext[-MAX_CHARS:]
|
||||
LOG.warning("Truncated text to %d characters." % MAX_CHARS)
|
||||
# Normalize linebreaks to LF.
|
||||
newtext = re.sub(r"(\r\n|\r|\n)", "\n", newtext)
|
||||
if self.text != newtext:
|
||||
self.text = newtext
|
||||
self.changed = True
|
||||
|
||||
|
||||
class ViewObj:
|
||||
# An object which contains one user's view of one text.
|
||||
|
||||
# Object properties:
|
||||
# .username - The name for the user, e.g 'fraser'
|
||||
# .filename - The name for the file, e.g 'proposal'
|
||||
# .shadow - The last version of the text sent to client.
|
||||
# .backup_shadow - The previous version of the text sent to client.
|
||||
# .shadow_client_version - The client's version for the shadow (n).
|
||||
# .shadow_server_version - The server's version for the shadow (m).
|
||||
# .backup_shadow_server_version - the server's version for the backup
|
||||
# shadow (m).
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup this object
|
||||
self.username = kwargs["username"]
|
||||
self.filename = kwargs["filename"]
|
||||
self.shadow_client_version = kwargs.get("shadow_client_version", 0)
|
||||
self.shadow_server_version = kwargs.get("shadow_server_version", 0)
|
||||
self.backup_shadow_server_version = kwargs.get("backup_shadow_server_version", 0)
|
||||
self.shadow = kwargs.get("shadow", u"")
|
||||
self.backup_shadow = kwargs.get("backup_shadow", u"")
|
||||
|
||||
|
||||
class MobWrite:
|
||||
|
||||
def parseRequest(self, data):
|
||||
"""Parse the raw MobWrite commands into a list of specific actions.
|
||||
See: http://code.google.com/p/google-mobwrite/wiki/Protocol
|
||||
|
||||
Args:
|
||||
data: A multi-line string of MobWrite commands.
|
||||
|
||||
Returns:
|
||||
A list of actions, each action is a dictionary. Typical action:
|
||||
{"username":"fred",
|
||||
"filename":"report",
|
||||
"mode":"delta",
|
||||
"data":"=10+Hello-7=2",
|
||||
"force":False,
|
||||
"server_version":3,
|
||||
"client_version":3,
|
||||
"echo_username":False
|
||||
}
|
||||
"""
|
||||
# Passing a Unicode string is an easy way to cause numerous subtle bugs.
|
||||
if type(data) != str:
|
||||
LOG.critical("parseRequest data type is %s" % type(data))
|
||||
return []
|
||||
if not (data.endswith("\n\n") or data.endswith("\r\r") or
|
||||
data.endswith("\n\r\n\r") or data.endswith("\r\n\r\n")):
|
||||
# There must be a linefeed followed by a blank line.
|
||||
# Truncated data. Abort.
|
||||
LOG.warning("Truncated data: '%s'" % data)
|
||||
return []
|
||||
|
||||
# Parse the lines
|
||||
actions = []
|
||||
username = None
|
||||
filename = None
|
||||
handle = None
|
||||
metadata = {}
|
||||
server_version = None
|
||||
echo_username = False
|
||||
echo_collaborators = False
|
||||
for line in data.splitlines():
|
||||
if not line:
|
||||
# Terminate on blank line.
|
||||
break
|
||||
if line.find(":") != 1:
|
||||
# Invalid line.
|
||||
continue
|
||||
(name, value) = (line[:1], line[2:])
|
||||
|
||||
# Parse out a version number for file, delta or raw.
|
||||
version = None
|
||||
if ("FfDdRr".find(name) != -1):
|
||||
div = value.find(":")
|
||||
if div > 0:
|
||||
try:
|
||||
version = int(value[:div])
|
||||
except ValueError:
|
||||
LOG.warning("Invalid version number: %s" % line)
|
||||
continue
|
||||
value = value[div + 1:]
|
||||
else:
|
||||
LOG.warning("Missing version number: %s" % line)
|
||||
continue
|
||||
|
||||
if name == "b" or name == "B":
|
||||
# Decode and store this entry into a buffer.
|
||||
try:
|
||||
(name, size, index, text) = value.split(" ", 3)
|
||||
size = int(size)
|
||||
index = int(index)
|
||||
except ValueError:
|
||||
LOG.warning("Invalid buffer format: %s" % value)
|
||||
continue
|
||||
# Store this buffer fragment.
|
||||
text = self.feedBuffer(name, size, index, text)
|
||||
# Check to see if the buffer is complete. If so, execute it.
|
||||
if text:
|
||||
LOG.info("Executing buffer: %s_%d" % (name, size))
|
||||
# Duplicate last character. Should be a line break.
|
||||
# Note that buffers are not intended to be mixed with other commands.
|
||||
return self.parseRequest(text + text[-1])
|
||||
|
||||
elif name == "u" or name == "U":
|
||||
# Remember the username.
|
||||
username = value
|
||||
# Client may request explicit usernames in response.
|
||||
echo_username = (name == "U")
|
||||
|
||||
elif name == "h" or name == "H":
|
||||
# Remember the username.
|
||||
handle = value
|
||||
# Client may request explicit collaborator handles in response.
|
||||
echo_collaborators = (name == "H")
|
||||
|
||||
elif name == "m":
|
||||
try:
|
||||
metadata = simplejson.loads(value)
|
||||
except:
|
||||
LOG.info("Error deserializing clientData. ignoring")
|
||||
# TODO: We should run a check that the passed object really is an
|
||||
# object and not an array, string, number, etc.
|
||||
metadata = {}
|
||||
|
||||
elif name == "f" or name == "F":
|
||||
# Remember the filename and version.
|
||||
filename = value
|
||||
server_version = version
|
||||
|
||||
elif name == "n" or name == "N":
|
||||
# Nullify this file.
|
||||
filename = value
|
||||
if username and filename:
|
||||
action = {}
|
||||
action["username"] = username
|
||||
action["filename"] = filename
|
||||
action["mode"] = "null"
|
||||
actions.append(action)
|
||||
|
||||
else:
|
||||
# A delta or raw action.
|
||||
action = {}
|
||||
if name == "d" or name == "D":
|
||||
action["mode"] = "delta"
|
||||
elif name == "r" or name == "R":
|
||||
action["mode"] = "raw"
|
||||
elif name == "x":
|
||||
action["mode"] = "close"
|
||||
action["to_close"] = value
|
||||
else:
|
||||
action["mode"] = None
|
||||
if name.isupper():
|
||||
action["force"] = True
|
||||
else:
|
||||
action["force"] = False
|
||||
action["server_version"] = server_version
|
||||
action["client_version"] = version
|
||||
action["data"] = value
|
||||
action["handle"] = handle
|
||||
action["metadata"] = metadata
|
||||
action["echo_username"] = echo_username
|
||||
action["echo_collaborators"] = echo_collaborators
|
||||
if username and action["mode"]:
|
||||
action["username"] = username
|
||||
action["filename"] = filename
|
||||
actions.append(action)
|
||||
else:
|
||||
LOG.warning("Skipping " + str(action) + ": username=" + str(username) + ", filename=" + str(filename) + ", action[mode]=" + str(action["mode"]))
|
||||
|
||||
return actions
|
||||
|
||||
|
||||
def applyPatches(self, viewobj, diffs, action):
|
||||
"""Apply a set of patches onto the view and text objects. This function must
|
||||
be enclosed in a lock or transaction since the text object is shared.
|
||||
|
||||
Args:
|
||||
textobj: The shared server text to be updated.
|
||||
viewobj: The user's view to be updated.
|
||||
diffs: List of diffs to apply to both the view and the server.
|
||||
action: Parameters for how forcefully to make the patch; may be modified.
|
||||
"""
|
||||
# Expand the fragile diffs into a full set of patches.
|
||||
patches = DMP.patch_make(viewobj.shadow, diffs)
|
||||
|
||||
# First, update the client's shadow.
|
||||
viewobj.shadow = DMP.diff_text2(diffs)
|
||||
viewobj.backup_shadow = viewobj.shadow
|
||||
viewobj.backup_shadow_server_version = viewobj.shadow_server_version
|
||||
|
||||
# Second, deal with the server's text.
|
||||
textobj = viewobj.textobj
|
||||
if textobj.text is None:
|
||||
# A view is sending a valid delta on a file we've never heard of.
|
||||
textobj.setText(viewobj.shadow)
|
||||
action["force"] = False
|
||||
LOG.debug("Set content: '%s@%s'" %
|
||||
(viewobj.username, viewobj.filename))
|
||||
else:
|
||||
if action["force"]:
|
||||
# Clobber the server's text if a change was received.
|
||||
if patches:
|
||||
mastertext = viewobj.shadow
|
||||
LOG.debug("Overwrote content: '%s@%s'" %
|
||||
(viewobj.username, viewobj.filename))
|
||||
else:
|
||||
mastertext = textobj.text
|
||||
else:
|
||||
(mastertext, results) = DMP.patch_apply(patches, textobj.text)
|
||||
LOG.debug("Patched (%s): '%s@%s'" %
|
||||
(",".join(["%s" % (x) for x in results]),
|
||||
viewobj.username, viewobj.filename))
|
||||
textobj.setText(mastertext)
|
|
@ -0,0 +1,923 @@
|
|||
#!/usr/bin/python
|
||||
"""MobWrite - Real-time Synchronization and Collaboration Service
|
||||
|
||||
Copyright 2006 Google Inc.
|
||||
http://code.google.com/p/google-mobwrite/
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
"""This file is the server-side daemon.
|
||||
|
||||
Runs in the background listening to a port, accepting synchronization sessions
|
||||
from clients.
|
||||
"""
|
||||
|
||||
__author__ = "fraser@google.com (Neil Fraser)"
|
||||
|
||||
import datetime
|
||||
import glob
|
||||
import os
|
||||
import socket
|
||||
import SocketServer
|
||||
import sys
|
||||
import time
|
||||
import thread
|
||||
import urllib
|
||||
import simplejson
|
||||
|
||||
import mobwrite_core
|
||||
from bespin.mobwrite.integrate import Persister, Access, get_username_from_handle
|
||||
|
||||
# Demo usage should limit the maximum number of connected views.
|
||||
# Set to 0 to disable limit.
|
||||
MAX_VIEWS = 10000
|
||||
|
||||
# How should data be stored.
|
||||
MEMORY = 0
|
||||
FILE = 1
|
||||
BDB = 2
|
||||
PERSISTER = 3
|
||||
STORAGE_MODE = PERSISTER
|
||||
|
||||
# Port to listen on.
|
||||
LOCAL_PORT = 3017
|
||||
|
||||
# If the Telnet connection stalls for more than 2 seconds, give up.
|
||||
TIMEOUT_TELNET = 2.0
|
||||
|
||||
# Restrict all Telnet connections to come from this location.
|
||||
# Set to "" to allow connections from anywhere.
|
||||
CONNECTION_ORIGIN = "127.0.0.1"
|
||||
|
||||
# Dictionary of all text objects.
|
||||
texts = {}
|
||||
|
||||
# Berkeley Databases
|
||||
texts_db = None
|
||||
lasttime_db = None
|
||||
|
||||
# Lock to prevent simultaneous changes to the texts dictionary.
|
||||
lock_texts = thread.allocate_lock()
|
||||
|
||||
# A special mode to save on every change which should reduce the impact of
|
||||
# server crashes and restarts at the expense of server-load
|
||||
PARANOID_SAVE = True
|
||||
|
||||
class TextObj(mobwrite_core.TextObj):
|
||||
# A persistent object which stores a text.
|
||||
|
||||
# Object properties:
|
||||
# .lock - Access control for writing to the text on this object.
|
||||
# .views - Views currently connected to this text.
|
||||
# .lasttime - The last time that this text was modified.
|
||||
|
||||
# Inherited properties:
|
||||
# .name - The unique name for this text, e.g 'proposal'.
|
||||
# .text - The text itself.
|
||||
# .changed - Has the text changed since the last time it was saved.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup this object
|
||||
mobwrite_core.TextObj.__init__(self, *args, **kwargs)
|
||||
self.persister = kwargs.get("persister")
|
||||
self.views = []
|
||||
self.lasttime = datetime.datetime.now()
|
||||
self.lock = thread.allocate_lock()
|
||||
self.load()
|
||||
|
||||
# lock_texts must be acquired by the caller to prevent simultaneous
|
||||
# creations of the same text.
|
||||
assert lock_texts.locked(), "Can't create TextObj unless locked."
|
||||
global texts
|
||||
texts[self.name] = self
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return "TextObj[len(text)=" + str(len(self.text)) + ", chgd=" + str(self.changed) + ", len(views)=" + str(len(self.views)) + "]"
|
||||
|
||||
|
||||
def setText(self, newText, justLoaded=False):
|
||||
mobwrite_core.TextObj.setText(self, newText)
|
||||
self.lasttime = datetime.datetime.now()
|
||||
if self.changed and PARANOID_SAVE and not justLoaded:
|
||||
if self.lock.locked():
|
||||
self.save()
|
||||
else:
|
||||
self.lock.acquire()
|
||||
try:
|
||||
self.save()
|
||||
finally:
|
||||
self.lock.release()
|
||||
|
||||
def cleanup(self):
|
||||
# General cleanup task.
|
||||
if len(self.views) > 0:
|
||||
return
|
||||
terminate = False
|
||||
# Lock must be acquired to prevent simultaneous deletions.
|
||||
mobwrite_core.LOG.debug("text.lock.acquire on %s", self.name)
|
||||
self.lock.acquire()
|
||||
try:
|
||||
if STORAGE_MODE == MEMORY:
|
||||
if self.lasttime < datetime.datetime.now() - mobwrite_core.TIMEOUT_TEXT:
|
||||
mobwrite_core.LOG.info("Expired text: '%s'" % self.name)
|
||||
terminate = True
|
||||
else:
|
||||
# Delete myself from memory if there are no attached views.
|
||||
mobwrite_core.LOG.info("Unloading text: '%s'" % self.name)
|
||||
terminate = True
|
||||
|
||||
if terminate:
|
||||
# Save to disk/database.
|
||||
self.save()
|
||||
# Terminate in-memory copy.
|
||||
global texts
|
||||
mobwrite_core.LOG.debug("lock_texts.acquire")
|
||||
lock_texts.acquire()
|
||||
try:
|
||||
del texts[self.name]
|
||||
except KeyError:
|
||||
mobwrite_core.LOG.error("Text object not in text list: '%s'" % self.name)
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("lock_texts.release")
|
||||
lock_texts.release()
|
||||
else:
|
||||
if self.changed:
|
||||
self.save()
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("text.lock.release on %s", self.name)
|
||||
self.lock.release()
|
||||
|
||||
|
||||
def load(self):
|
||||
# Load the text object from non-volatile storage.
|
||||
if STORAGE_MODE == PERSISTER:
|
||||
contents = self.persister.load(self.name)
|
||||
self.setText(contents, justLoaded=True)
|
||||
self.changed = False
|
||||
|
||||
if STORAGE_MODE == FILE:
|
||||
# Load the text (if present) from disk.
|
||||
filename = "%s/%s.txt" % (DATA_DIR, urllib.quote(self.name, ""))
|
||||
if os.path.exists(filename):
|
||||
try:
|
||||
infile = open(filename, "r")
|
||||
self.setText(infile.read().decode("utf-8"))
|
||||
infile.close()
|
||||
self.changed = False
|
||||
mobwrite_core.LOG.info("Loaded file: '%s'" % filename)
|
||||
except:
|
||||
mobwrite_core.LOG.critical("Can't read file: %s" % filename)
|
||||
else:
|
||||
self.setText(None)
|
||||
self.changed = False
|
||||
|
||||
if STORAGE_MODE == BDB:
|
||||
# Load the text (if present) from database.
|
||||
if texts_db.has_key(self.name):
|
||||
self.setText(texts_db[self.name].decode("utf-8"))
|
||||
mobwrite_core.LOG.info("Loaded from DB: '%s'" % self.name)
|
||||
else:
|
||||
self.setText(None)
|
||||
self.changed = False
|
||||
|
||||
|
||||
def save(self):
|
||||
# Save the text object to non-volatile storage.
|
||||
# Lock must be acquired by the caller to prevent simultaneous saves.
|
||||
assert self.lock.locked(), "Can't save unless locked."
|
||||
|
||||
if STORAGE_MODE == PERSISTER:
|
||||
self.persister.save(self.name, self.text)
|
||||
self.changed = False
|
||||
|
||||
if STORAGE_MODE == FILE:
|
||||
# Save the text to disk.
|
||||
filename = "%s/%s.txt" % (DATA_DIR, urllib.quote(self.name, ''))
|
||||
if self.text is None:
|
||||
# Nullified text equates to no file.
|
||||
if os.path.exists(filename):
|
||||
try:
|
||||
os.remove(filename)
|
||||
mobwrite_core.LOG.info("Nullified file: '%s'" % filename)
|
||||
except:
|
||||
mobwrite_core.LOG.critical("Can't nullify file: %s" % filename)
|
||||
else:
|
||||
try:
|
||||
outfile = open(filename, "w")
|
||||
outfile.write(self.text.encode("utf-8"))
|
||||
outfile.close()
|
||||
self.changed = False
|
||||
mobwrite_core.LOG.info("Saved file: '%s'" % filename)
|
||||
except:
|
||||
mobwrite_core.LOG.critical("Can't save file: %s" % filename)
|
||||
|
||||
if STORAGE_MODE == BDB:
|
||||
# Save the text to database.
|
||||
if self.text is None:
|
||||
if lasttime_db.has_key(self.name):
|
||||
del lasttime_db[self.name]
|
||||
if texts_db.has_key(self.name):
|
||||
del texts_db[self.name]
|
||||
mobwrite_core.LOG.info("Nullified from DB: '%s'" % self.name)
|
||||
else:
|
||||
mobwrite_core.LOG.info("Saved to DB: '%s'" % self.name)
|
||||
texts_db[self.name] = self.text.encode("utf-8")
|
||||
lasttime_db[self.name] = str(int(time.time()))
|
||||
self.changed = False
|
||||
|
||||
|
||||
def fetch_textobj(name, view, persister):
|
||||
# Retrieve the named text object. Create it if it doesn't exist.
|
||||
# Add the given view into the text object's list of connected views.
|
||||
# Don't let two simultaneous creations happen, or a deletion during a
|
||||
# retrieval.
|
||||
mobwrite_core.LOG.debug("lock_texts.acquire")
|
||||
lock_texts.acquire()
|
||||
try:
|
||||
if texts.has_key(name):
|
||||
textobj = texts[name]
|
||||
mobwrite_core.LOG.debug("Accepted text: '%s'" % name)
|
||||
else:
|
||||
textobj = TextObj(name=name, persister=persister)
|
||||
mobwrite_core.LOG.debug("Creating text: '%s'" % name)
|
||||
textobj.views.append(view)
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("lock_texts.release")
|
||||
lock_texts.release()
|
||||
return textobj
|
||||
|
||||
|
||||
# Dictionary of all view objects.
|
||||
views = {}
|
||||
|
||||
# Lock to prevent simultaneous changes to the views dictionary.
|
||||
lock_views = thread.allocate_lock()
|
||||
|
||||
class ViewObj(mobwrite_core.ViewObj):
|
||||
# A persistent object which contains one user's view of one text.
|
||||
|
||||
# Object properties:
|
||||
# .edit_stack - List of unacknowledged edits sent to the client.
|
||||
# .lasttime - The last time that a web connection serviced this object.
|
||||
# .lock - Access control for writing to the text on this object.
|
||||
# .textobj - The shared text object being worked on.
|
||||
|
||||
# Inherited properties:
|
||||
# .username - The name for the user, e.g 'fraser'
|
||||
# .filename - The name for the file, e.g 'proposal'
|
||||
# .shadow - The last version of the text sent to client.
|
||||
# .backup_shadow - The previous version of the text sent to client.
|
||||
# .shadow_client_version - The client's version for the shadow (n).
|
||||
# .shadow_server_version - The server's version for the shadow (m).
|
||||
# .backup_shadow_server_version - the server's version for the backup
|
||||
# shadow (m).
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup this object
|
||||
mobwrite_core.ViewObj.__init__(self, *args, **kwargs)
|
||||
self.handle = kwargs.get("handle")
|
||||
self.metadata = kwargs.get("metadata")
|
||||
self.edit_stack = []
|
||||
self.lasttime = datetime.datetime.now()
|
||||
self.lock = thread.allocate_lock()
|
||||
self.textobj = fetch_textobj(self.filename, self, kwargs.get("persister"))
|
||||
|
||||
# lock_views must be acquired by the caller to prevent simultaneous
|
||||
# creations of the same view.
|
||||
assert lock_views.locked(), "Can't create ViewObj unless locked."
|
||||
global views
|
||||
views[(self.username, self.filename)] = self
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return "ViewObj[scv=" + str(self.shadow_client_version) + ", ssv=" + str(self.shadow_server_version) + ", handle=" + self.handle + ", textobj.name=" + self.textobj.name + "]"
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
# General cleanup task.
|
||||
# Delete myself if I've been idle too long.
|
||||
# Don't delete during a retrieval.
|
||||
mobwrite_core.LOG.debug("lock_views.acquire")
|
||||
lock_views.acquire()
|
||||
try:
|
||||
if self.lasttime < datetime.datetime.now() - mobwrite_core.TIMEOUT_VIEW:
|
||||
mobwrite_core.LOG.info("Idle out: '%s@%s'" % (self.username, self.filename))
|
||||
global views
|
||||
try:
|
||||
del views[(self.username, self.filename)]
|
||||
except KeyError:
|
||||
mobwrite_core.LOG.error("View object not in view list: '%s %s'" % (self.username, self.filename))
|
||||
try:
|
||||
self.textobj.views.remove(self)
|
||||
except ValueError:
|
||||
mobwrite_core.LOG.error("self not in views list: '%s %s'" % (self.username, self.filename))
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("lock_views.release")
|
||||
lock_views.release()
|
||||
|
||||
def nullify(self):
|
||||
self.lasttime = datetime.datetime.min
|
||||
self.cleanup()
|
||||
|
||||
|
||||
def fetch_viewobj(username, filename, handle=None, metadata=None, persister=None):
|
||||
# Retrieve the named view object. Create it if it doesn't exist.
|
||||
# Don't let two simultaneous creations happen, or a deletion during a
|
||||
# retrieval.
|
||||
mobwrite_core.LOG.debug("lock_views.acquire")
|
||||
lock_views.acquire()
|
||||
try:
|
||||
key = (username, filename)
|
||||
if views.has_key(key):
|
||||
viewobj = views[key]
|
||||
viewobj.lasttime = datetime.datetime.now()
|
||||
viewobj.metadata = metadata
|
||||
mobwrite_core.LOG.debug("Accepting view: '%s@%s'" % key)
|
||||
else:
|
||||
if MAX_VIEWS != 0 and len(views) > MAX_VIEWS:
|
||||
viewobj = None
|
||||
mobwrite_core.LOG.critical("Overflow: Can't create new view.")
|
||||
else:
|
||||
viewobj = ViewObj(username=username, filename=filename, handle=handle, metadata=metadata, persister=persister)
|
||||
mobwrite_core.LOG.debug("Creating view: '%s@%s'" % key)
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("lock_views.release")
|
||||
lock_views.release()
|
||||
return viewobj
|
||||
|
||||
|
||||
# Dictionary of all buffer objects.
|
||||
buffers = {}
|
||||
|
||||
# Lock to prevent simultaneous changes to the buffers dictionary.
|
||||
lock_buffers = thread.allocate_lock()
|
||||
|
||||
class BufferObj:
|
||||
# A persistent object which assembles large commands from fragments.
|
||||
|
||||
# Object properties:
|
||||
# .name - The name (and size) of the buffer, e.g. 'alpha:12'
|
||||
# .lasttime - The last time that a web connection wrote to this object.
|
||||
# .data - The contents of the buffer.
|
||||
# .lock - Access control for writing to the text on this object.
|
||||
|
||||
def __init__(self, name, size):
|
||||
# Setup this object
|
||||
self.name = name
|
||||
self.lasttime = datetime.datetime.now()
|
||||
self.lock = thread.allocate_lock()
|
||||
|
||||
# Initialize the buffer with a set number of slots.
|
||||
# Null characters form dividers between each slot.
|
||||
array = []
|
||||
for x in xrange(size - 1):
|
||||
array.append("\0")
|
||||
self.data = "".join(array)
|
||||
|
||||
# lock_buffers must be acquired by the caller to prevent simultaneous
|
||||
# creations of the same view.
|
||||
assert lock_buffers.locked(), "Can't create BufferObj unless locked."
|
||||
global buffers
|
||||
buffers[name] = self
|
||||
mobwrite_core.LOG.debug("Buffer initialized to %d slots: %s" % (size, name))
|
||||
|
||||
def __str__(self):
|
||||
return "BufferObj[name=" + self.name + ", len(data)=" + len(self.data) + "]"
|
||||
|
||||
|
||||
def set(self, n, text):
|
||||
# Set the nth slot of this buffer with text.
|
||||
assert self.lock.locked(), "Can't edit BufferObj unless locked."
|
||||
# n is 1-based.
|
||||
n -= 1
|
||||
array = self.data.split("\0")
|
||||
assert 0 <= n < len(array), "Invalid buffer insertion"
|
||||
array[n] = text
|
||||
self.data = "\0".join(array)
|
||||
mobwrite_core.LOG.debug("Inserted into slot %d of a %d slot buffer: %s" %
|
||||
(n + 1, len(array), self.name))
|
||||
|
||||
def get(self):
|
||||
# Fetch the completed text from the buffer.
|
||||
if ("\0" + self.data + "\0").find("\0\0") == -1:
|
||||
text = self.data.replace("\0", "")
|
||||
# Delete this buffer.
|
||||
self.lasttime = datetime.datetime.min
|
||||
self.cleanup()
|
||||
return text
|
||||
# Not complete yet.
|
||||
return None
|
||||
|
||||
def cleanup(self):
|
||||
# General cleanup task.
|
||||
# Delete myself if I've been idle too long.
|
||||
# Don't delete during a retrieval.
|
||||
mobwrite_core.LOG.debug("lock_buffers.acquire")
|
||||
lock_buffers.acquire()
|
||||
try:
|
||||
if self.lasttime < datetime.datetime.now() - mobwrite_core.TIMEOUT_BUFFER:
|
||||
mobwrite_core.LOG.info("Expired buffer: '%s'" % self.name)
|
||||
global buffers
|
||||
del buffers[self.name]
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("lock_buffers.release")
|
||||
lock_buffers.release()
|
||||
|
||||
|
||||
class DaemonMobWrite(mobwrite_core.MobWrite):
|
||||
def __init__(self):
|
||||
self.persister = Persister()
|
||||
|
||||
def handleRequest(self, text):
|
||||
try:
|
||||
mobwrite_core.LOG.debug("Incoming: " + text)
|
||||
actions = self.parseRequest(text)
|
||||
reply = self.doActions(actions)
|
||||
mobwrite_core.LOG.debug("Reply: " + reply)
|
||||
return reply
|
||||
except:
|
||||
mobwrite_core.LOG.exception("Error handling request: " + text)
|
||||
return "E:all:Processing error"
|
||||
|
||||
def doActions(self, actions):
|
||||
output = []
|
||||
last_username = None
|
||||
last_filename = None
|
||||
|
||||
for action_index in xrange(len(actions)):
|
||||
action = actions[action_index]
|
||||
mobwrite_core.LOG.debug("action %s = %s", action_index, action)
|
||||
|
||||
for action_index in xrange(len(actions)):
|
||||
# Use an indexed loop in order to peek ahead one step to detect
|
||||
# username/filename boundaries.
|
||||
action = actions[action_index]
|
||||
|
||||
# Close mode doesn't need a filename or handle for the 'close all' case
|
||||
# If killing a specific view, then the id is in the 'data'
|
||||
if action["mode"] == "close":
|
||||
to_close = action.get("data")
|
||||
if to_close == "all":
|
||||
kill_views_for_user(action["username"])
|
||||
elif to_close is not None:
|
||||
kill_view(action["username"], to_close)
|
||||
continue
|
||||
|
||||
viewobj = fetch_viewobj(action["username"], action["filename"], handle=action["handle"], metadata=action["metadata"], persister=self.persister)
|
||||
if viewobj is None:
|
||||
# Too many views connected at once.
|
||||
# Send back nothing. Pretend the return packet was lost.
|
||||
return ""
|
||||
|
||||
delta_ok = True
|
||||
mobwrite_core.LOG.debug("view.lock.acquire on %s@%s", viewobj.username, viewobj.filename)
|
||||
viewobj.lock.acquire()
|
||||
textobj = viewobj.textobj
|
||||
|
||||
try:
|
||||
access = self.persister.check_access(action["filename"], action["handle"])
|
||||
if access == Access.Denied:
|
||||
name = get_username_from_handle(action["handle"])
|
||||
message = "%s does not have access to %s" % (name, action["filename"])
|
||||
mobwrite_core.LOG.warning(message)
|
||||
output.append("E:" + action["filename"] + ":" + message + "\n")
|
||||
continue
|
||||
|
||||
if action["mode"] == "null":
|
||||
if access == Access.ReadOnly:
|
||||
output.append("O:" + action["filename"] + "\n")
|
||||
else:
|
||||
# Nullify the text.
|
||||
mobwrite_core.LOG.debug("Nullifying: '%s@%s'" %
|
||||
(viewobj.username, viewobj.filename))
|
||||
mobwrite_core.LOG.debug("text.lock.acquire on %s", textobj.name)
|
||||
textobj.lock.acquire()
|
||||
try:
|
||||
textobj.setText(None)
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("text.lock.release on %s", textobj.name)
|
||||
textobj.lock.release()
|
||||
viewobj.nullify()
|
||||
continue
|
||||
|
||||
if (action["server_version"] != viewobj.shadow_server_version and
|
||||
action["server_version"] == viewobj.backup_shadow_server_version):
|
||||
# Client did not receive the last response. Roll back the shadow.
|
||||
mobwrite_core.LOG.warning("Rollback from shadow %d to backup shadow %d" %
|
||||
(viewobj.shadow_server_version, viewobj.backup_shadow_server_version))
|
||||
viewobj.shadow = viewobj.backup_shadow
|
||||
viewobj.shadow_server_version = viewobj.backup_shadow_server_version
|
||||
viewobj.edit_stack = []
|
||||
|
||||
# Remove any elements from the edit stack with low version numbers which
|
||||
# have been acked by the client.
|
||||
x = 0
|
||||
while x < len(viewobj.edit_stack):
|
||||
if viewobj.edit_stack[x][0] <= action["server_version"]:
|
||||
del viewobj.edit_stack[x]
|
||||
else:
|
||||
x += 1
|
||||
|
||||
if action["mode"] == "raw":
|
||||
# It's a raw text dump.
|
||||
data = urllib.unquote(action["data"]).decode("utf-8")
|
||||
mobwrite_core.LOG.info("Got %db raw text: '%s@%s'" %
|
||||
(len(data), viewobj.username, viewobj.filename))
|
||||
delta_ok = True
|
||||
# First, update the client's shadow.
|
||||
viewobj.shadow = data
|
||||
viewobj.shadow_client_version = action["client_version"]
|
||||
viewobj.shadow_server_version = action["server_version"]
|
||||
viewobj.backup_shadow = viewobj.shadow
|
||||
viewobj.backup_shadow_server_version = viewobj.shadow_server_version
|
||||
viewobj.edit_stack = []
|
||||
if access == Access.ReadOnly:
|
||||
output.append("O:" + action["filename"] + "\n")
|
||||
elif action["force"] or textobj.text == None:
|
||||
# Clobber the server's text.
|
||||
mobwrite_core.LOG.debug("text.lock.acquire on %s", textobj.name)
|
||||
textobj.lock.acquire()
|
||||
try:
|
||||
if textobj.text != data:
|
||||
textobj.setText(data)
|
||||
mobwrite_core.LOG.debug("Overwrote content: '%s@%s'" %
|
||||
(viewobj.username, viewobj.filename))
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("text.lock.release on %s", textobj.name)
|
||||
textobj.lock.release()
|
||||
|
||||
elif action["mode"] == "delta":
|
||||
# It's a delta.
|
||||
mobwrite_core.LOG.debug("Got delta: %s@%s",
|
||||
viewobj.username, viewobj.filename)
|
||||
# mobwrite_core.LOG.debug("Got '%s' delta: '%s@%s'" %
|
||||
# (action["data"], viewobj.username, viewobj.filename))
|
||||
if action["server_version"] != viewobj.shadow_server_version:
|
||||
# Can't apply a delta on a mismatched shadow version.
|
||||
delta_ok = False
|
||||
mobwrite_core.LOG.warning("Shadow version mismatch: %d != %d" %
|
||||
(action["server_version"], viewobj.shadow_server_version))
|
||||
elif action["client_version"] > viewobj.shadow_client_version:
|
||||
# Client has a version in the future?
|
||||
delta_ok = False
|
||||
mobwrite_core.LOG.warning("Future delta: %d > %d" %
|
||||
(action["client_version"], viewobj.shadow_client_version))
|
||||
elif action["client_version"] < viewobj.shadow_client_version:
|
||||
# We've already seen this diff.
|
||||
pass
|
||||
mobwrite_core.LOG.warning("Repeated delta: %d < %d" %
|
||||
(action["client_version"], viewobj.shadow_client_version))
|
||||
else:
|
||||
# Expand the delta into a diff using the client shadow.
|
||||
try:
|
||||
diffs = mobwrite_core.DMP.diff_fromDelta(viewobj.shadow, action["data"])
|
||||
except ValueError:
|
||||
diffs = None
|
||||
delta_ok = False
|
||||
mobwrite_core.LOG.warning("Delta failure, expected %d length: '%s@%s'" %
|
||||
(len(viewobj.shadow), viewobj.username, viewobj.filename))
|
||||
viewobj.shadow_client_version += 1
|
||||
if diffs != None:
|
||||
if access == Access.ReadOnly:
|
||||
output.append("O:" + action["filename"] + "\n")
|
||||
else:
|
||||
# Textobj lock required for read/patch/write cycle.
|
||||
mobwrite_core.LOG.debug("text.lock.acquire on %s", textobj.name)
|
||||
textobj.lock.acquire()
|
||||
try:
|
||||
self.applyPatches(viewobj, diffs, action)
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("text.lock.release on %s", textobj.name)
|
||||
textobj.lock.release()
|
||||
|
||||
# Generate output if this is the last action or the username/filename
|
||||
# will change in the next iteration.
|
||||
if ((action_index + 1 == len(actions)) or
|
||||
actions[action_index + 1]["username"] != viewobj.username or
|
||||
actions[action_index + 1]["filename"] != viewobj.filename):
|
||||
echo_collaborators = "echo_collaborators" in action
|
||||
output.append(self.generateDiffs(viewobj,
|
||||
last_username, last_filename,
|
||||
action["echo_username"], action["force"],
|
||||
delta_ok, echo_collaborators))
|
||||
last_username = viewobj.username
|
||||
last_filename = viewobj.filename
|
||||
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("view.lock.release on %s@%s", viewobj.username, viewobj.filename)
|
||||
viewobj.lock.release()
|
||||
|
||||
answer = "".join(output)
|
||||
|
||||
return answer
|
||||
|
||||
|
||||
def generateDiffs(self, viewobj, last_username, last_filename,
|
||||
echo_username, force, delta_ok, echo_collaborators):
|
||||
output = []
|
||||
if (echo_username and last_username != viewobj.username):
|
||||
output.append("u:%s\n" % viewobj.username)
|
||||
if (last_filename != viewobj.filename or last_username != viewobj.username):
|
||||
output.append("F:%d:%s\n" %
|
||||
(viewobj.shadow_client_version, viewobj.filename))
|
||||
|
||||
textobj = viewobj.textobj
|
||||
mastertext = textobj.text
|
||||
|
||||
if delta_ok:
|
||||
if mastertext is None:
|
||||
mastertext = ""
|
||||
# Create the diff between the view's text and the master text.
|
||||
diffs = mobwrite_core.DMP.diff_main(viewobj.shadow, mastertext)
|
||||
mobwrite_core.DMP.diff_cleanupEfficiency(diffs)
|
||||
text = mobwrite_core.DMP.diff_toDelta(diffs)
|
||||
if force:
|
||||
# Client sending 'D' means number, no error.
|
||||
# Client sending 'R' means number, client error.
|
||||
# Both cases involve numbers, so send back an overwrite delta.
|
||||
viewobj.edit_stack.append((viewobj.shadow_server_version,
|
||||
"D:%d:%s\n" % (viewobj.shadow_server_version, text)))
|
||||
else:
|
||||
# Client sending 'd' means text, no error.
|
||||
# Client sending 'r' means text, client error.
|
||||
# Both cases involve text, so send back a merge delta.
|
||||
viewobj.edit_stack.append((viewobj.shadow_server_version,
|
||||
"d:%d:%s\n" % (viewobj.shadow_server_version, text)))
|
||||
viewobj.shadow_server_version += 1
|
||||
mobwrite_core.LOG.debug("Sent delta for %s@%s",
|
||||
viewobj.username, viewobj.filename)
|
||||
# mobwrite_core.LOG.debug("Sent '%s' delta: '%s@%s'" %
|
||||
# (text, viewobj.username, viewobj.filename))
|
||||
else:
|
||||
# Error; server could not parse client's delta.
|
||||
# Send a raw dump of the text.
|
||||
viewobj.shadow_client_version += 1
|
||||
if mastertext is None:
|
||||
mastertext = ""
|
||||
viewobj.edit_stack.append((viewobj.shadow_server_version,
|
||||
"r:%d:\n" % viewobj.shadow_server_version))
|
||||
mobwrite_core.LOG.info("Sent empty raw text: '%s@%s'" %
|
||||
(viewobj.username, viewobj.filename))
|
||||
else:
|
||||
# Force overwrite of client.
|
||||
text = mastertext
|
||||
text = text.encode("utf-8")
|
||||
text = urllib.quote(text, "!~*'();/?:@&=+$,# ")
|
||||
viewobj.edit_stack.append((viewobj.shadow_server_version,
|
||||
"R:%d:%s\n" % (viewobj.shadow_server_version, text)))
|
||||
mobwrite_core.LOG.info("Sent %db raw text: '%s@%s'" %
|
||||
(len(text), viewobj.username, viewobj.filename))
|
||||
|
||||
viewobj.shadow = mastertext
|
||||
|
||||
for edit in viewobj.edit_stack:
|
||||
output.append(edit[1])
|
||||
|
||||
# Mozilla: We're passing on the first 4 chars of the username here, but
|
||||
# it's worth checking if there is still value in doing that
|
||||
if echo_collaborators:
|
||||
for view in viewobj.textobj.views:
|
||||
view.metadata["id"] = view.username[0:4]
|
||||
line = "C:" + view.handle + ":" + simplejson.dumps(view.metadata) + "\n"
|
||||
output.append(line)
|
||||
|
||||
return "".join(output)
|
||||
|
||||
|
||||
class StreamRequestHandlerDaemonMobWrite(SocketServer.StreamRequestHandler, DaemonMobWrite):
|
||||
def __init__(self, a, b, c):
|
||||
DaemonMobWrite.__init__(self)
|
||||
SocketServer.StreamRequestHandler.__init__(self, a, b, c)
|
||||
|
||||
def feedBuffer(self, name, size, index, datum):
|
||||
"""Add one block of text to the buffer and return the whole text if the
|
||||
buffer is complete.
|
||||
|
||||
Args:
|
||||
name: Unique name of buffer object.
|
||||
size: Total number of slots in the buffer.
|
||||
index: Which slot to insert this text (note that index is 1-based)
|
||||
datum: The text to insert.
|
||||
|
||||
Returns:
|
||||
String with all the text blocks merged in the correct order. Or if the
|
||||
buffer is not yet complete returns the empty string.
|
||||
"""
|
||||
# Note that 'index' is 1-based.
|
||||
if not 0 < index <= size:
|
||||
mobwrite_core.LOG.error("Invalid buffer: '%s %d %d'" % (name, size, index))
|
||||
text = ""
|
||||
elif size == 1 and index == 1:
|
||||
# A buffer with one slot? Pointless.
|
||||
text = datum
|
||||
mobwrite_core.LOG.debug("Buffer with only one slot: '%s'" % name)
|
||||
else:
|
||||
# Retrieve the named buffer object. Create it if it doesn't exist.
|
||||
name += "_%d" % size
|
||||
# Don't let two simultaneous creations happen, or a deletion during a
|
||||
# retrieval.
|
||||
mobwrite_core.LOG.debug("lock_buffers.acquire")
|
||||
lock_buffers.acquire()
|
||||
try:
|
||||
if buffers.has_key(name):
|
||||
bufferobj = buffers[name]
|
||||
bufferobj.lasttime = datetime.datetime.now()
|
||||
mobwrite_core.LOG.debug("Found buffer: '%s'" % name)
|
||||
else:
|
||||
bufferobj = BufferObj(name, size)
|
||||
mobwrite_core.LOG.debug("Creating buffer: '%s'" % name)
|
||||
mobwrite_core.LOG.debug("buffer.lock.acquire on ??")
|
||||
bufferobj.lock.acquire()
|
||||
try:
|
||||
bufferobj.set(index, datum)
|
||||
# Check if Buffer is complete.
|
||||
text = bufferobj.get()
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("buffer.lock.release on ??")
|
||||
bufferobj.lock.release()
|
||||
finally:
|
||||
# Mozilla: This unlock used to come straight after the call to
|
||||
# bufferobj.lock.acquire() above.
|
||||
# We believe that the order lock-a, lock-b, unlock-a, unlock-b is
|
||||
# prone to deadlocks so we've moved 'unlock-a' to here
|
||||
mobwrite_core.LOG.debug("lock_buffers.release")
|
||||
lock_buffers.release()
|
||||
if text == None:
|
||||
text = ""
|
||||
return urllib.unquote(text)
|
||||
|
||||
|
||||
def handle(self):
|
||||
self.connection.settimeout(TIMEOUT_TELNET)
|
||||
if CONNECTION_ORIGIN and self.client_address[0] != CONNECTION_ORIGIN:
|
||||
raise("Connection refused from " + self.client_address[0])
|
||||
#mobwrite_core.LOG.info("Connection accepted from " + self.client_address[0])
|
||||
|
||||
data = []
|
||||
# Read in all the lines.
|
||||
while 1:
|
||||
try:
|
||||
line = self.rfile.readline()
|
||||
except:
|
||||
# Timeout.
|
||||
mobwrite_core.LOG.warning("Timeout on connection")
|
||||
break
|
||||
data.append(line)
|
||||
if not line.rstrip("\r\n"):
|
||||
# Terminate and execute on blank line.
|
||||
question = "".join(data)
|
||||
answer = self.handleRequest(question)
|
||||
self.wfile.write(answer)
|
||||
break
|
||||
|
||||
# Goodbye
|
||||
mobwrite_core.LOG.debug("Disconnecting.")
|
||||
|
||||
|
||||
def kill_views_for_user(username):
|
||||
for view in views.values():
|
||||
if view.username == username:
|
||||
mobwrite_core.LOG.info("kill_views_for_user on %s, %s" % (username, view.filename))
|
||||
view.nullify()
|
||||
|
||||
def kill_view(username, filename):
|
||||
view = fetch_viewobj(username, filename)
|
||||
if view is not None:
|
||||
mobwrite_core.LOG.info("kill_view on " + username + ", " + filename)
|
||||
view.nullify()
|
||||
|
||||
def cleanup_thread():
|
||||
# Every minute cleanup
|
||||
if STORAGE_MODE == BDB:
|
||||
import bsddb
|
||||
|
||||
while True:
|
||||
cleanup()
|
||||
time.sleep(60)
|
||||
|
||||
|
||||
def debugServer():
|
||||
mobwrite_core.LOG.info("Views: (count=" + str(len(views)) + ")")
|
||||
for key, view in views.items():
|
||||
mobwrite_core.LOG.info("- " + str(key) + ": " + str(view))
|
||||
|
||||
mobwrite_core.LOG.info("Texts: (count=" + str(len(texts)) + ")")
|
||||
for name, text in texts.items():
|
||||
mobwrite_core.LOG.info("- " + name + ": " + str(text))
|
||||
|
||||
mobwrite_core.LOG.info("Buffers: (count=" + str(len(buffers)) + ")")
|
||||
for name, buffer in buffers.items():
|
||||
mobwrite_core.LOG.info("- " + name + ": " + str(buffer))
|
||||
|
||||
|
||||
# Left at double initial indent to help diff
|
||||
def cleanup():
|
||||
mobwrite_core.LOG.info("Running cleanup task.")
|
||||
for view in views.values():
|
||||
view.cleanup()
|
||||
for text in texts.values():
|
||||
text.cleanup()
|
||||
for buffer in buffers.values():
|
||||
buffer.cleanup()
|
||||
|
||||
# Persist the remaining texts
|
||||
for text in texts.values():
|
||||
mobwrite_core.LOG.debug("text.lock.acquire on %s", text.name)
|
||||
text.lock.acquire()
|
||||
try:
|
||||
text.save()
|
||||
finally:
|
||||
mobwrite_core.LOG.debug("text.lock.release on %s", text.name)
|
||||
text.lock.release()
|
||||
|
||||
timeout = datetime.datetime.now() - mobwrite_core.TIMEOUT_TEXT
|
||||
if STORAGE_MODE == FILE:
|
||||
# Delete old files.
|
||||
files = glob.glob("%s/*.txt" % DATA_DIR)
|
||||
for filename in files:
|
||||
if datetime.datetime.fromtimestamp(os.path.getmtime(filename)) < timeout:
|
||||
os.unlink(filename)
|
||||
mobwrite_core.LOG.info("Deleted file: '%s'" % filename)
|
||||
|
||||
if STORAGE_MODE == BDB:
|
||||
# Delete old DB records.
|
||||
# Can't delete an entry in a hash while iterating or else order is lost.
|
||||
expired = []
|
||||
for k, v in lasttime_db.iteritems():
|
||||
if datetime.datetime.fromtimestamp(int(v)) < timeout:
|
||||
expired.append(k)
|
||||
for k in expired:
|
||||
if texts_db.has_key(k):
|
||||
del texts_db[k]
|
||||
if lasttime_db.has_key(k):
|
||||
del lasttime_db[k]
|
||||
mobwrite_core.LOG.info("Deleted from DB: '%s'" % k)
|
||||
|
||||
last_cleanup = time.time()
|
||||
|
||||
def maybe_cleanup():
|
||||
if PARANOID_SAVE:
|
||||
return
|
||||
global last_cleanup
|
||||
now = time.time()
|
||||
if now > last_cleanup + 10:
|
||||
cleanup()
|
||||
last_cleanup = now
|
||||
|
||||
|
||||
def main():
|
||||
if STORAGE_MODE == BDB:
|
||||
import bsddb
|
||||
global texts_db, lasttime_db
|
||||
texts_db = bsddb.hashopen(DATA_DIR + "/texts.db")
|
||||
lasttime_db = bsddb.hashopen(DATA_DIR + "/lasttime.db")
|
||||
|
||||
# Start up a thread that does timeouts and cleanup
|
||||
thread.start_new_thread(cleanup_thread, ())
|
||||
|
||||
mobwrite_core.LOG.info("Listening on port %d..." % LOCAL_PORT)
|
||||
s = SocketServer.ThreadingTCPServer(("", LOCAL_PORT), StreamRequestHandlerDaemonMobWrite)
|
||||
try:
|
||||
s.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
mobwrite_core.LOG.info("Shutting down.")
|
||||
s.socket.close()
|
||||
if STORAGE_MODE == BDB:
|
||||
texts_db.close()
|
||||
lasttime_db.close()
|
||||
|
||||
|
||||
from bespin import config
|
||||
|
||||
def process_mobwrite(args=None):
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
if args:
|
||||
mode = args.pop(0)
|
||||
else:
|
||||
mode = "dev"
|
||||
|
||||
print("Bespin mobwrite worker (mode=" + mode + ")")
|
||||
config.set_profile(mode)
|
||||
|
||||
if args:
|
||||
config.load_pyconfig(args.pop(0))
|
||||
|
||||
config.activate_profile()
|
||||
|
||||
mobwrite_core.logging.basicConfig()
|
||||
main()
|
||||
mobwrite_core.logging.shutdown()
|
|
@ -0,0 +1,50 @@
|
|||
import sys
|
||||
import logging
|
||||
|
||||
from paste.httpserver import serve
|
||||
from webob import Request, Response
|
||||
|
||||
from bespin.mobwrite.mobwrite_daemon import DaemonMobWrite
|
||||
from bespin import config
|
||||
from bespin.controllers import db_middleware
|
||||
|
||||
log = logging.getLogger("mw_web")
|
||||
|
||||
class WSGIMobWrite(DaemonMobWrite):
|
||||
def __call__(self, environ, start_response):
|
||||
request = Request(environ)
|
||||
response = Response()
|
||||
try:
|
||||
answer = self.handleRequest(request.body)
|
||||
response.body = answer
|
||||
response.content_type = "application/mobwrite"
|
||||
except Exception, e:
|
||||
log.exception("error in request handling")
|
||||
response.status = "500 Internal Server Error"
|
||||
response.body = str(e)
|
||||
return response(environ, start_response)
|
||||
|
||||
def start_server(args=None):
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
if args:
|
||||
mode = args.pop(0)
|
||||
else:
|
||||
mode = "dev"
|
||||
|
||||
print("Bespin mobwrite worker (mode=" + mode + ")")
|
||||
config.set_profile(mode)
|
||||
|
||||
if args:
|
||||
config.load_pyconfig(args.pop(0))
|
||||
|
||||
if mode == "dev":
|
||||
config.load_pyconfig("devconfig.py")
|
||||
|
||||
config.activate_profile()
|
||||
|
||||
app = WSGIMobWrite()
|
||||
app = db_middleware(app)
|
||||
|
||||
serve(app, config.c.mobwrite_server_address, config.c.mobwrite_server_port, use_threadpool=True)
|
|
@ -0,0 +1,458 @@
|
|||
|
||||
We are currently on mobwrite r70 with changes. The changes brought across from
|
||||
mobwrite-41-with-bepsin are documented below
|
||||
|
||||
|
||||
== Client diff.js ==
|
||||
|
||||
$ cp mobwrite/diff_match_patch_uncompressed.js frontend/js/bespin/mobwrite/diff.js
|
||||
- Add "dojo.provide("bespin.mobwrite.diff");" to the start
|
||||
|
||||
|
||||
== Client core.js ==
|
||||
|
||||
$ cp mobwrite/mobwrite_core.js frontend/js/bespin/mobwrite/core.js
|
||||
- Apply the following patch
|
||||
|
||||
diff -u --strip-trailing-cr frontend/js/bespin/mobwrite_70/core.js frontend/js/bespin/mobwrite/mobwrite_core.js
|
||||
--- frontend/js/bespin/mobwrite_fork_from_41/core.js 2009-07-08 10:05:31.000000000 +0100
|
||||
+++ frontend/js/bespin/mobwrite_41/mobwrite_core.js 2009-07-08 11:47:03.000000000 +0100
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* MobWrite - Real-time Synchronization and Collaboration Service
|
||||
*
|
||||
- * Copyright 2006 Neil Fraser
|
||||
+ * Copyright 2006 Google Inc.
|
||||
* http://code.google.com/p/google-mobwrite/
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -22,7 +22,6 @@
|
||||
* @author fraser@google.com (Neil Fraser)
|
||||
*/
|
||||
|
||||
-dojo.provide("bespin.mobwrite.core");
|
||||
|
||||
/**
|
||||
* Singleton class containing all MobWrite code.
|
||||
@@ -34,7 +33,7 @@
|
||||
* URL of Ajax gateway.
|
||||
* @type {string}
|
||||
*/
|
||||
-mobwrite.syncGateway = '/mobwrite/';
|
||||
+mobwrite.syncGateway = '/scripts/q.py';
|
||||
|
||||
|
||||
/**
|
||||
@@ -48,7 +47,7 @@
|
||||
* Print diagnostic messages to the browser's console.
|
||||
* @type {boolean}
|
||||
*/
|
||||
-mobwrite.debug = false;
|
||||
+mobwrite.debug = true;
|
||||
|
||||
|
||||
// Debug mode requires a compatible console.
|
||||
@@ -114,7 +113,7 @@
|
||||
* Shortest interval (in milliseconds) between connections.
|
||||
* @type {number}
|
||||
*/
|
||||
-mobwrite.minSyncInterval = 500;
|
||||
+mobwrite.minSyncInterval = 1000;
|
||||
|
||||
|
||||
/**
|
||||
@@ -129,7 +128,7 @@
|
||||
* This value is modified later as traffic rates are established.
|
||||
* @type {number}
|
||||
*/
|
||||
-mobwrite.syncInterval = 1000;
|
||||
+mobwrite.syncInterval = 2000;
|
||||
|
||||
|
||||
/**
|
||||
@@ -380,7 +379,9 @@
|
||||
}
|
||||
|
||||
var remote = (mobwrite.syncGateway.indexOf('://') != -1);
|
||||
- if (mobwrite.debug) {
|
||||
+ if (mobwrite.debug && typeof console == 'object') {
|
||||
+ // Extra check here for the existance of 'console' because
|
||||
+ // the console disappears on page unload before the code does.
|
||||
console.info('TO server:\n' + data.join(''));
|
||||
}
|
||||
// Add terminating blank line.
|
||||
@@ -608,9 +609,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
- } else if (name == 'C' || name == 'c') {
|
||||
- var users = value.split(",");
|
||||
- file._editSession.reportCollaborators(users);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -619,7 +617,7 @@
|
||||
mobwrite.syncInterval /= 2;
|
||||
} else {
|
||||
// Let the ping interval creep up.
|
||||
- mobwrite.syncInterval += 500;
|
||||
+ mobwrite.syncInterval += 1000;
|
||||
}
|
||||
// Keep the syncs constrained between 1 and 10 seconds.
|
||||
mobwrite.syncInterval =
|
||||
@@ -682,7 +680,7 @@
|
||||
try {
|
||||
req = new ActiveXObject('Microsoft.XMLHTTP');
|
||||
} catch(e) {
|
||||
- req = null;
|
||||
+ req = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -690,10 +688,6 @@
|
||||
req.onreadystatechange = callback;
|
||||
req.open('POST', url, true);
|
||||
req.setRequestHeader('Content-Type','application/x-www-form-urlencoded');
|
||||
-
|
||||
- // CSRF protection as defined by Bespin
|
||||
- bespin.get("server").protectXhrAgainstCsrf(req);
|
||||
-
|
||||
req.send(post);
|
||||
}
|
||||
return req;
|
||||
@@ -772,4 +766,3 @@
|
||||
}
|
||||
}
|
||||
};
|
||||
-
|
||||
|
||||
== Other client files ==
|
||||
|
||||
- integrate.js is a replacement for form.js
|
||||
form.js is not needed by bespin, integrate.js is Mozilla code
|
||||
|
||||
|
||||
== Server mobwrite_daemon.py ==
|
||||
|
||||
> cp daemon/mobwrite_daemon.py backend/python/bespin/mobwrite/mobwrite_daemon.py
|
||||
- Apply the following patch:
|
||||
|
||||
--- ../../mobwrite/trunk/daemon/mobwrite_daemon.py 2009-07-08 12:02:59.000000000 +0100
|
||||
+++ backend/python/bespin/mobwrite/mobwrite_daemon.py 2009-07-09 15:10:04.000000000 +0100
|
||||
@@ -47,10 +47,8 @@
|
||||
MEMORY = 0
|
||||
FILE = 1
|
||||
BDB = 2
|
||||
-STORAGE_MODE = MEMORY
|
||||
-
|
||||
-# Relative location of the data directory.
|
||||
-DATA_DIR = "./data"
|
||||
+PERSISTER = 3
|
||||
+STORAGE_MODE = PERSISTER
|
||||
|
||||
# Port to listen on.
|
||||
LOCAL_PORT = 3017
|
||||
@@ -78,7 +76,7 @@
|
||||
|
||||
# Object properties:
|
||||
# .lock - Access control for writing to the text on this object.
|
||||
- # .views - Count of views currently connected to this text.
|
||||
+ # .views - Views currently connected to this text.
|
||||
# .lasttime - The last time that this text was modified.
|
||||
|
||||
# Inerhited properties:
|
||||
@@ -89,7 +87,8 @@
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup this object
|
||||
mobwrite_core.TextObj.__init__(self, *args, **kwargs)
|
||||
- self.views = 0
|
||||
+ self.persister = kwargs.get("persister")
|
||||
+ self.views = []
|
||||
self.lasttime = datetime.datetime.now()
|
||||
self.lock = thread.allocate_lock()
|
||||
self.load()
|
||||
@@ -106,7 +105,7 @@
|
||||
|
||||
def cleanup(self):
|
||||
# General cleanup task.
|
||||
- if self.views > 0:
|
||||
+ if len(self.views) > 0:
|
||||
return
|
||||
terminate = False
|
||||
# Lock must be acquired to prevent simultaneous deletions.
|
||||
@@ -164,6 +163,10 @@
|
||||
self.setText(None)
|
||||
self.changed = False
|
||||
|
||||
+ if STORAGE_MODE == PERSISTER:
|
||||
+ contents = self.persister.load(self.name);
|
||||
+ self.setText(contents)
|
||||
+ self.changed = False
|
||||
|
||||
def save(self):
|
||||
# Save the text object to non-volatile storage.
|
||||
@@ -205,8 +208,12 @@
|
||||
lasttime_db[self.name] = str(int(time.time()))
|
||||
self.changed = False
|
||||
|
||||
+ if STORAGE_MODE == PERSISTER:
|
||||
+ self.persister.save(self.name, self.text)
|
||||
+ self.changed = False
|
||||
+
|
||||
|
||||
-def fetch_textobj(name, view):
|
||||
+def fetch_textobj(name, view, persister):
|
||||
# Retrieve the named text object. Create it if it doesn't exist.
|
||||
# Add the given view into the text object's list of connected views.
|
||||
# Don't let two simultaneous creations happen, or a deletion during a
|
||||
@@ -216,9 +223,9 @@
|
||||
textobj = texts[name]
|
||||
mobwrite_core.LOG.debug("Accepted text: '%s'" % name)
|
||||
else:
|
||||
- textobj = TextObj(name=name)
|
||||
+ textobj = TextObj(name=name, persister=persister)
|
||||
mobwrite_core.LOG.debug("Creating text: '%s'" % name)
|
||||
- textobj.views += 1
|
||||
+ textobj.views.append(view)
|
||||
lock_texts.release()
|
||||
return textobj
|
||||
|
||||
@@ -251,10 +258,11 @@
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Setup this object
|
||||
mobwrite_core.ViewObj.__init__(self, *args, **kwargs)
|
||||
+ self.handle = kwargs.get("handle")
|
||||
self.edit_stack = []
|
||||
self.lasttime = datetime.datetime.now()
|
||||
self.lock = thread.allocate_lock()
|
||||
- self.textobj = fetch_textobj(self.filename, self)
|
||||
+ self.textobj = fetch_textobj(self.filename, self, kwargs.get("persister"))
|
||||
|
||||
# lock_views must be acquired by the caller to prevent simultaneous
|
||||
# creations of the same view.
|
||||
@@ -274,7 +282,7 @@
|
||||
del views[(self.username, self.filename)]
|
||||
except KeyError:
|
||||
mobwrite_core.LOG.error("View object not in view list: '%s %s'" % (self.username, self.filename))
|
||||
- self.textobj.views -= 1
|
||||
+ self.textobj.views.remove(self)
|
||||
lock_views.release()
|
||||
|
||||
def nullify(self):
|
||||
@@ -282,7 +290,7 @@
|
||||
self.cleanup()
|
||||
|
||||
|
||||
-def fetch_viewobj(username, filename):
|
||||
+def fetch_viewobj(username, filename, handle, persister):
|
||||
# Retrieve the named view object. Create it if it doesn't exist.
|
||||
# Don't let two simultaneous creations happen, or a deletion during a
|
||||
# retrieval.
|
||||
@@ -297,7 +305,7 @@
|
||||
viewobj = None
|
||||
mobwrite_core.LOG.critical("Overflow: Can't create new view.")
|
||||
else:
|
||||
- viewobj = ViewObj(username=username, filename=filename)
|
||||
+ viewobj = ViewObj(username=username, filename=filename, handle=handle, persister=persister)
|
||||
mobwrite_core.LOG.debug("Creating view: '%s@%s'" % key)
|
||||
lock_views.release()
|
||||
return viewobj
|
||||
@@ -375,6 +383,9 @@
|
||||
|
||||
class DaemonMobWrite(SocketServer.StreamRequestHandler, mobwrite_core.MobWrite):
|
||||
|
||||
+ def __init__(self, persister):
|
||||
+ self.persister = persister
|
||||
+
|
||||
def feedBuffer(self, name, size, index, datum):
|
||||
"""Add one block of text to the buffer and return the whole text if the
|
||||
buffer is complete.
|
||||
@@ -439,7 +450,9 @@
|
||||
data.append(line)
|
||||
if not line.rstrip("\r\n"):
|
||||
# Terminate and execute on blank line.
|
||||
- self.wfile.write(self.handleRequest("".join(data)))
|
||||
+ question = "".join(data)
|
||||
+ answer = self.handleRequest(question)
|
||||
+ self.wfile.write(answer)
|
||||
break
|
||||
|
||||
# Goodbye
|
||||
@@ -463,7 +476,7 @@
|
||||
|
||||
# Fetch the requested view object.
|
||||
if not viewobj:
|
||||
- viewobj = fetch_viewobj(action["username"], action["filename"])
|
||||
+ viewobj = fetch_viewobj(action["username"], action["filename"], action["handle"], self.persister)
|
||||
if viewobj == None:
|
||||
# Too many views connected at once.
|
||||
# Send back nothing. Pretend the return packet was lost.
|
||||
@@ -574,7 +587,15 @@
|
||||
viewobj.lock.release()
|
||||
viewobj = None
|
||||
|
||||
- return "".join(output)
|
||||
+ if action["echo_collaborators"]:
|
||||
+ collaborators = set([view.handle for view in texts[action["filename"]].views])
|
||||
+ #collaborators -= actions["handle"]
|
||||
+ line = "C:" + (",".join(collaborators))
|
||||
+ output.append(line)
|
||||
+
|
||||
+ answer = "".join(output)
|
||||
+
|
||||
+ return answer
|
||||
|
||||
|
||||
def generateDiffs(self, viewobj, last_username, last_filename,
|
||||
@@ -644,6 +665,11 @@
|
||||
import bsddb
|
||||
|
||||
while True:
|
||||
+ cleanup()
|
||||
+ time.sleep(60)
|
||||
+
|
||||
+# Left at double initial indent to help diff
|
||||
+def cleanup():
|
||||
mobwrite_core.LOG.info("Running cleanup task.")
|
||||
for v in views.values():
|
||||
v.cleanup()
|
||||
@@ -675,7 +701,42 @@
|
||||
del lasttime_db[k]
|
||||
mobwrite_core.LOG.info("Deleted from DB: '%s'" % k)
|
||||
|
||||
- time.sleep(60)
|
||||
+last_cleanup = time.time()
|
||||
+
|
||||
+def maybe_cleanup():
|
||||
+ global last_cleanup
|
||||
+ now = time.time()
|
||||
+ if now > last_cleanup + 60:
|
||||
+ cleanup()
|
||||
+ last_cleanup = now
|
||||
+
|
||||
+class Persister:
|
||||
+
|
||||
+ def load(self, name):
|
||||
+ project, path = self.__decomposeName(name)
|
||||
+ print "loading from: %s/%s" % (project.name, path)
|
||||
+ return project.get_file(path)
|
||||
+
|
||||
+ def save(self, name, contents):
|
||||
+ project, path = self.__decomposeName(name)
|
||||
+ print "saving to: %s/%s" % (project.name, path)
|
||||
+ project.save_file(path, contents)
|
||||
+
|
||||
+ def __decomposeName(self, name):
|
||||
+ from bespin.database import User, get_project
|
||||
+ (user_name, project_name, path) = name.split("/", 2)
|
||||
+
|
||||
+ user = User.find_user(user_name)
|
||||
+
|
||||
+ parts = project_name.partition('+')
|
||||
+ if parts[1] == '':
|
||||
+ owner = user
|
||||
+ else:
|
||||
+ owner = User.find_user(parts[0])
|
||||
+ project_name = parts[2]
|
||||
+
|
||||
+ project = get_project(user, owner, project_name)
|
||||
+ return (project, path)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
|
||||
== Server mobwrite_core.py ==
|
||||
|
||||
> cp lib/mobwrite_core.py backend/python/bespin/mobwrite/mobwrite_core.py
|
||||
- Apply the following patch:
|
||||
|
||||
--- ../../mobwrite/trunk/lib/mobwrite_core.py 2009-07-08 12:02:59.000000000 +0100
|
||||
+++ backend/python/bespin/mobwrite/mobwrite_core.py 2009-07-09 13:38:36.000000000 +0100
|
||||
@@ -33,7 +33,7 @@
|
||||
|
||||
# Demo usage should limit the maximum size of any text.
|
||||
# Set to 0 to disable limit.
|
||||
-MAX_CHARS = 20000
|
||||
+MAX_CHARS = 0
|
||||
|
||||
# Delete any view which hasn't been accessed in half an hour.
|
||||
TIMEOUT_VIEW = datetime.timedelta(minutes=30)
|
||||
@@ -138,8 +138,10 @@
|
||||
actions = []
|
||||
username = None
|
||||
filename = None
|
||||
+ handle = None
|
||||
server_version = None
|
||||
echo_username = False
|
||||
+ echo_collaborators = False
|
||||
for line in data.splitlines():
|
||||
if not line:
|
||||
# Terminate on blank line.
|
||||
@@ -188,6 +190,12 @@
|
||||
# Client may request explicit usernames in response.
|
||||
echo_username = (name == "U")
|
||||
|
||||
+ elif name == "h" or name == "H":
|
||||
+ # Remember the username.
|
||||
+ handle = value
|
||||
+ # Client may request explicit collaborator handles in response.
|
||||
+ echo_collaborators = (name == "H")
|
||||
+
|
||||
elif name == "f" or name == "F":
|
||||
# Remember the filename and version.
|
||||
filename = value
|
||||
@@ -219,12 +227,16 @@
|
||||
action["server_version"] = server_version
|
||||
action["client_version"] = version
|
||||
action["data"] = value
|
||||
+ action["handle"] = handle
|
||||
action["echo_username"] = echo_username
|
||||
+ action["echo_collaborators"] = echo_collaborators
|
||||
if username and filename and action["mode"]:
|
||||
action["username"] = username
|
||||
action["filename"] = filename
|
||||
actions.append(action)
|
||||
-
|
||||
+ else:
|
||||
+ LOG.warning("Skipping " + str(action) + ": username=" + str(username) + ", filename=" + str(filename) + ", action[mode]=" + str(action["mode"]))
|
||||
+
|
||||
return actions
|
||||
|
||||
|
||||
|
||||
- Also the following patch from mobwrite-r41-with-bespin to mobwrite-r41 was not copied across when
|
||||
we merged with mobwrite-r70. We might need to re-instante this later:
|
||||
|
||||
--- backend/python/bespin/mobwrite-41/mobwrite_daemon.py 2009-07-08 11:47:03.000000000 +0100
|
||||
+++ backend/python/bespin/mobwrite_fork_from_41/mobwrite_daemon.py 2009-06-02 09:30:57.000000000 +0100
|
||||
@@ -98,9 +107,9 @@
|
||||
# Keep the text within the length limit.
|
||||
if MAX_CHARS != 0 and len(text) > MAX_CHARS:
|
||||
text = text[-MAX_CHARS:]
|
||||
- logging.warning("Truncated text to %d characters." % MAX_CHARS)
|
||||
- # Normalize linebreaks to LF.
|
||||
- text = re.sub(r"(\r\n|\r|\n)", "\n", text)
|
||||
+ log.warning("Truncated text to %d characters." % MAX_CHARS)
|
||||
+ # Normalize linebreaks to CRLF.
|
||||
+ text = re.sub(r"(\r\n|\r|\n)", "\r\n", text)
|
||||
if (self.text != text):
|
||||
self.text = text
|
||||
self.changed = True
|
||||
|
||||
|
||||
== Server diff_match_patch.py ==
|
||||
|
||||
$ cp mobwrite/lib/diff_match_patch.py bespin/backend/python/bespin/mobwrite/diff_match_patch.py
|
||||
- No changes except:
|
||||
- We originally set self.Match_MaxBits to 0 rather than 32. When we updated to r70, we left it at 32
|
||||
I'm not sure there was much detailed logic behind the original change, but if it produces poor
|
||||
patches, then this might be a place to look.
|
||||
|
||||
|
||||
|
||||
== Other server files ==
|
||||
|
||||
- __init__.py is from Bespin not from mobwrite
|
||||
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
"""Data classes for working with files/projects/users."""
|
||||
|
||||
from bespin import database, filesystem
|
|
@ -0,0 +1,654 @@
|
|||
# 2007/11/08
|
||||
# Version 0.2.6
|
||||
# pathutils.py
|
||||
# Functions useful for working with files and paths.
|
||||
# http://www.voidspace.org.uk/python/recipebook.shtml#utils
|
||||
|
||||
# Copyright Michael Foord 2004
|
||||
# Released subject to the BSD License
|
||||
# Please see http://www.voidspace.org.uk/python/license.shtml
|
||||
|
||||
# For information about bugfixes, updates and support, please join the Pythonutils mailing list.
|
||||
# http://groups.google.com/group/pythonutils/
|
||||
# Comments, suggestions and bug reports welcome.
|
||||
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
|
||||
# E-mail fuzzyman@voidspace.org.uk
|
||||
|
||||
"""
|
||||
This module contains convenience functions for working with files and paths.
|
||||
"""
|
||||
|
||||
from __future__ import generators
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
__version__ = '0.2.6'
|
||||
|
||||
__all__ = (
|
||||
'readlines',
|
||||
'writelines',
|
||||
'readbinary',
|
||||
'writebinary',
|
||||
'readfile',
|
||||
'writefile',
|
||||
'tslash',
|
||||
'relpath',
|
||||
'splitall',
|
||||
'walkfiles',
|
||||
'walkdirs',
|
||||
'walkemptydirs',
|
||||
'formatbytes',
|
||||
'fullcopy',
|
||||
'import_path',
|
||||
'onerror',
|
||||
'get_main_dir',
|
||||
'main_is_frozen',
|
||||
'Lock',
|
||||
'LockError',
|
||||
'LockFile',
|
||||
'__version__',
|
||||
)
|
||||
|
||||
######################################
|
||||
# Functions to read and write files in text and binary mode.
|
||||
|
||||
def readlines(filename):
|
||||
"""Passed a filename, it reads it, and returns a list of lines. (Read in text mode)"""
|
||||
filehandle = open(filename, 'r')
|
||||
outfile = filehandle.readlines()
|
||||
filehandle.close()
|
||||
return outfile
|
||||
|
||||
def writelines(filename, infile, newline=False):
|
||||
"""
|
||||
Given a filename and a list of lines it writes the file. (In text mode)
|
||||
|
||||
If ``newline`` is ``True`` (default is ``False``) it adds a newline to each
|
||||
line.
|
||||
"""
|
||||
filehandle = open(filename, 'w')
|
||||
if newline:
|
||||
infile = [line + '\n' for line in infile]
|
||||
filehandle.writelines(infile)
|
||||
filehandle.close()
|
||||
|
||||
def readbinary(filename):
|
||||
"""Given a filename, read a file in binary mode. It returns a single string."""
|
||||
filehandle = open(filename, 'rb')
|
||||
thisfile = filehandle.read()
|
||||
filehandle.close()
|
||||
return thisfile
|
||||
|
||||
def writebinary(filename, infile):
|
||||
"""Given a filename and a string, write the file in binary mode. """
|
||||
filehandle = open(filename, 'wb')
|
||||
filehandle.write(infile)
|
||||
filehandle.close()
|
||||
|
||||
def readfile(filename):
|
||||
"""Given a filename, read a file in text mode. It returns a single string."""
|
||||
filehandle = open(filename, 'r')
|
||||
outfile = filehandle.read()
|
||||
filehandle.close()
|
||||
return outfile
|
||||
|
||||
def writefile(filename, infile):
|
||||
"""Given a filename and a string, write the file in text mode."""
|
||||
filehandle = open(filename, 'w')
|
||||
filehandle.write(infile)
|
||||
filehandle.close()
|
||||
|
||||
####################################################################
|
||||
# Some functions for dealing with paths
|
||||
|
||||
def tslash(apath):
|
||||
"""
|
||||
Add a trailing slash (``/``) to a path if it lacks one.
|
||||
|
||||
It doesn't use ``os.sep`` because you end up in trouble on windoze, when you
|
||||
want separators for URLs.
|
||||
"""
|
||||
if apath and apath != '.' and not apath.endswith('/') and not apath.endswith('\\'):
|
||||
return apath + '/'
|
||||
else:
|
||||
return apath
|
||||
|
||||
def relpath(origin, dest):
|
||||
"""
|
||||
Return the relative path between origin and dest.
|
||||
|
||||
If it's not possible return dest.
|
||||
|
||||
|
||||
If they are identical return ``os.curdir``
|
||||
|
||||
Adapted from `path.py <http://www.jorendorff.com/articles/python/path/>`_ by Jason Orendorff.
|
||||
"""
|
||||
origin = os.path.abspath(origin).replace('\\', '/')
|
||||
dest = os.path.abspath(dest).replace('\\', '/')
|
||||
#
|
||||
orig_list = splitall(os.path.normcase(origin))
|
||||
# Don't normcase dest! We want to preserve the case.
|
||||
dest_list = splitall(dest)
|
||||
#
|
||||
if orig_list[0] != os.path.normcase(dest_list[0]):
|
||||
# Can't get here from there.
|
||||
return dest
|
||||
#
|
||||
# Find the location where the two paths start to differ.
|
||||
i = 0
|
||||
for start_seg, dest_seg in zip(orig_list, dest_list):
|
||||
if start_seg != os.path.normcase(dest_seg):
|
||||
break
|
||||
i += 1
|
||||
#
|
||||
# Now i is the point where the two paths diverge.
|
||||
# Need a certain number of "os.pardir"s to work up
|
||||
# from the origin to the point of divergence.
|
||||
segments = [os.pardir] * (len(orig_list) - i)
|
||||
# Need to add the diverging part of dest_list.
|
||||
segments += dest_list[i:]
|
||||
if len(segments) == 0:
|
||||
# If they happen to be identical, use os.curdir.
|
||||
return os.curdir
|
||||
else:
|
||||
return os.path.join(*segments).replace('\\', '/')
|
||||
|
||||
def splitall(loc):
|
||||
"""
|
||||
Return a list of the path components in loc. (Used by relpath_).
|
||||
|
||||
The first item in the list will be either ``os.curdir``, ``os.pardir``, empty,
|
||||
or the root directory of loc (for example, ``/`` or ``C:\\).
|
||||
|
||||
The other items in the list will be strings.
|
||||
|
||||
Adapted from *path.py* by Jason Orendorff.
|
||||
"""
|
||||
parts = []
|
||||
while loc != os.curdir and loc != os.pardir:
|
||||
prev = loc
|
||||
loc, child = os.path.split(prev)
|
||||
if loc == prev:
|
||||
break
|
||||
parts.append(child)
|
||||
parts.append(loc)
|
||||
parts.reverse()
|
||||
return parts
|
||||
|
||||
#######################################################################
|
||||
# a pre 2.3 walkfiles function - adapted from the path module by Jason Orendorff
|
||||
|
||||
join = os.path.join
|
||||
isdir = os.path.isdir
|
||||
isfile = os.path.isfile
|
||||
|
||||
def walkfiles(thisdir):
|
||||
"""
|
||||
walkfiles(D) -> iterator over files in D, recursively. Yields full file paths.
|
||||
|
||||
Adapted from path.py by Jason Orendorff.
|
||||
"""
|
||||
for child in os.listdir(thisdir):
|
||||
thischild = join(thisdir, child)
|
||||
if isfile(thischild):
|
||||
yield thischild
|
||||
elif isdir(thischild):
|
||||
for f in walkfiles(thischild):
|
||||
yield f
|
||||
|
||||
def walkdirs(thisdir):
|
||||
"""
|
||||
Walk through all the subdirectories in a tree. Recursively yields directory
|
||||
names (full paths).
|
||||
"""
|
||||
for child in os.listdir(thisdir):
|
||||
thischild = join(thisdir, child)
|
||||
if isfile(thischild):
|
||||
continue
|
||||
elif isdir(thischild):
|
||||
for f in walkdirs(thischild):
|
||||
yield f
|
||||
yield thischild
|
||||
|
||||
def walkemptydirs(thisdir):
|
||||
"""
|
||||
Recursively yield names of *empty* directories.
|
||||
|
||||
These are the only paths omitted when using ``walkfiles``.
|
||||
"""
|
||||
if not os.listdir(thisdir):
|
||||
# if the directory is empty.. then yield it
|
||||
yield thisdir
|
||||
for child in os.listdir(thisdir):
|
||||
thischild = join(thisdir, child)
|
||||
if isdir(thischild):
|
||||
for emptydir in walkemptydirs(thischild):
|
||||
yield emptydir
|
||||
|
||||
###############################################################
|
||||
# formatbytes takes a filesize (as returned by os.getsize() )
|
||||
# and formats it for display in one of two ways !!
|
||||
|
||||
def formatbytes(sizeint, configdict=None, **configs):
|
||||
"""
|
||||
Given a file size as an integer, return a nicely formatted string that
|
||||
represents the size. Has various options to control it's output.
|
||||
|
||||
You can pass in a dictionary of arguments or keyword arguments. Keyword
|
||||
arguments override the dictionary and there are sensible defaults for options
|
||||
you don't set.
|
||||
|
||||
Options and defaults are as follows :
|
||||
|
||||
* ``forcekb = False`` - If set this forces the output to be in terms
|
||||
of kilobytes and bytes only.
|
||||
|
||||
* ``largestonly = True`` - If set, instead of outputting
|
||||
``1 Mbytes, 307 Kbytes, 478 bytes`` it outputs using only the largest
|
||||
denominator - e.g. ``1.3 Mbytes`` or ``17.2 Kbytes``
|
||||
|
||||
* ``kiloname = 'Kbytes'`` - The string to use for kilobytes
|
||||
|
||||
* ``meganame = 'Mbytes'`` - The string to use for Megabytes
|
||||
|
||||
* ``bytename = 'bytes'`` - The string to use for bytes
|
||||
|
||||
* ``nospace = True`` - If set it outputs ``1Mbytes, 307Kbytes``,
|
||||
notice there is no space.
|
||||
|
||||
Example outputs : ::
|
||||
|
||||
19Mbytes, 75Kbytes, 255bytes
|
||||
2Kbytes, 0bytes
|
||||
23.8Mbytes
|
||||
|
||||
.. note::
|
||||
|
||||
It currently uses the plural form even for singular.
|
||||
"""
|
||||
defaultconfigs = { 'forcekb' : False,
|
||||
'largestonly' : True,
|
||||
'kiloname' : 'Kbytes',
|
||||
'meganame' : 'Mbytes',
|
||||
'bytename' : 'bytes',
|
||||
'nospace' : True}
|
||||
if configdict is None:
|
||||
configdict = {}
|
||||
for entry in configs:
|
||||
# keyword parameters override the dictionary passed in
|
||||
configdict[entry] = configs[entry]
|
||||
#
|
||||
for keyword in defaultconfigs:
|
||||
if not configdict.has_key(keyword):
|
||||
configdict[keyword] = defaultconfigs[keyword]
|
||||
#
|
||||
if configdict['nospace']:
|
||||
space = ''
|
||||
else:
|
||||
space = ' '
|
||||
#
|
||||
mb, kb, rb = bytedivider(sizeint)
|
||||
if configdict['largestonly']:
|
||||
if mb and not configdict['forcekb']:
|
||||
return stringround(mb, kb)+ space + configdict['meganame']
|
||||
elif kb or configdict['forcekb']:
|
||||
if mb and configdict['forcekb']:
|
||||
kb += 1024*mb
|
||||
return stringround(kb, rb) + space+ configdict['kiloname']
|
||||
else:
|
||||
return str(rb) + space + configdict['bytename']
|
||||
else:
|
||||
outstr = ''
|
||||
if mb and not configdict['forcekb']:
|
||||
outstr = str(mb) + space + configdict['meganame'] +', '
|
||||
if kb or configdict['forcekb'] or mb:
|
||||
if configdict['forcekb']:
|
||||
kb += 1024*mb
|
||||
outstr += str(kb) + space + configdict['kiloname'] +', '
|
||||
return outstr + str(rb) + space + configdict['bytename']
|
||||
|
||||
def stringround(main, rest):
|
||||
"""
|
||||
Given a file size in either (mb, kb) or (kb, bytes) - round it
|
||||
appropriately.
|
||||
"""
|
||||
# divide an int by a float... get a float
|
||||
value = main + rest/1024.0
|
||||
return str(round(value, 1))
|
||||
|
||||
def bytedivider(nbytes):
|
||||
"""
|
||||
Given an integer (probably a long integer returned by os.getsize() )
|
||||
it returns a tuple of (megabytes, kilobytes, bytes).
|
||||
|
||||
This can be more easily converted into a formatted string to display the
|
||||
size of the file.
|
||||
"""
|
||||
mb, remainder = divmod(nbytes, 1048576)
|
||||
kb, rb = divmod(remainder, 1024)
|
||||
return (mb, kb, rb)
|
||||
|
||||
########################################
|
||||
|
||||
def fullcopy(src, dst):
|
||||
"""
|
||||
Copy file from src to dst.
|
||||
|
||||
If the dst directory doesn't exist, we will attempt to create it using makedirs.
|
||||
"""
|
||||
import shutil
|
||||
if not os.path.isdir(os.path.dirname(dst)):
|
||||
os.makedirs(os.path.dirname(dst))
|
||||
shutil.copy(src, dst)
|
||||
|
||||
#######################################
|
||||
|
||||
def import_path(fullpath, strict=True):
|
||||
"""
|
||||
Import a file from the full path. Allows you to import from anywhere,
|
||||
something ``__import__`` does not do.
|
||||
|
||||
If strict is ``True`` (the default), raise an ``ImportError`` if the module
|
||||
is found in the "wrong" directory.
|
||||
|
||||
Taken from firedrop2_ by `Hans Nowak`_
|
||||
|
||||
.. _firedrop2: http://www.voidspace.org.uk/python/firedrop2/
|
||||
.. _Hans Nowak: http://zephyrfalcon.org
|
||||
"""
|
||||
path, filename = os.path.split(fullpath)
|
||||
filename, ext = os.path.splitext(filename)
|
||||
sys.path.insert(0, path)
|
||||
try:
|
||||
module = __import__(filename)
|
||||
except ImportError:
|
||||
del sys.path[0]
|
||||
raise
|
||||
del sys.path[0]
|
||||
#
|
||||
if strict:
|
||||
path = os.path.split(module.__file__)[0]
|
||||
# FIXME: doesn't *startswith* allow room for errors ?
|
||||
if not fullpath.startswith(path):
|
||||
raise ImportError, "Module '%s' found, but not in '%s'" % (
|
||||
filename, fullpath)
|
||||
#
|
||||
return module
|
||||
|
||||
##############################################################################
|
||||
# These functions get us our directory name
|
||||
# Even if py2exe or another freeze tool has been used
|
||||
|
||||
def main_is_frozen():
|
||||
"""Return ``True`` if we're running from a frozen program."""
|
||||
import imp
|
||||
return (
|
||||
# new py2exe
|
||||
hasattr(sys, "frozen") or
|
||||
# tools/freeze
|
||||
imp.is_frozen("__main__"))
|
||||
|
||||
def get_main_dir():
|
||||
"""Return the script directory - whether we're frozen or not."""
|
||||
if main_is_frozen():
|
||||
return os.path.abspath(os.path.dirname(sys.executable))
|
||||
return os.path.abspath(os.path.dirname(sys.argv[0]))
|
||||
|
||||
##############################
|
||||
|
||||
def onerror(func, path, exc_info):
|
||||
"""
|
||||
Error handler for ``shutil.rmtree``.
|
||||
|
||||
If the error is due to an access error (read only file)
|
||||
it attempts to add write permission and then retries.
|
||||
|
||||
If the error is for another reason it re-raises the error.
|
||||
|
||||
Usage : ``shutil.rmtree(path, onerror=onerror)``
|
||||
"""
|
||||
import stat
|
||||
if not os.access(path, os.W_OK):
|
||||
# Is the error an access error ?
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
func(path)
|
||||
else:
|
||||
raise
|
||||
|
||||
##########################################################
|
||||
# A set of object for providing simple, cross-platform file locking
|
||||
|
||||
class LockError(IOError):
|
||||
"""The generic error for locking - it is a subclass of ``IOError``."""
|
||||
|
||||
class Lock(object):
|
||||
"""A simple file lock, compatible with windows and Unixes."""
|
||||
|
||||
def __init__(self, filename, timeout=5, step=0.1):
|
||||
"""
|
||||
Create a ``Lock`` object on file ``filename``
|
||||
|
||||
``timeout`` is the time in seconds to wait before timing out, when
|
||||
attempting to acquire the lock.
|
||||
|
||||
``step`` is the number of seconds to wait in between each attempt to
|
||||
acquire the lock.
|
||||
|
||||
"""
|
||||
self.timeout = timeout
|
||||
self.step = step
|
||||
self.filename = filename
|
||||
self.locked = False
|
||||
|
||||
def lock(self, force=True):
|
||||
"""
|
||||
Lock the file for access by creating a directory of the same name (plus
|
||||
a trailing underscore).
|
||||
|
||||
The file is only locked if you use this class to acquire the lock
|
||||
before accessing.
|
||||
|
||||
If ``force`` is ``True`` (the default), then on timeout we forcibly
|
||||
acquire the lock.
|
||||
|
||||
If ``force`` is ``False``, then on timeout a ``LockError`` is raised.
|
||||
"""
|
||||
if self.locked:
|
||||
raise LockError('%s is already locked' % self.filename)
|
||||
t = 0
|
||||
name = self._mungedname()
|
||||
while t < self.timeout:
|
||||
t += self.step
|
||||
try:
|
||||
if os.path.isdir(name):
|
||||
raise os.error
|
||||
else:
|
||||
os.mkdir(name)
|
||||
except os.error, err:
|
||||
time.sleep(self.step)
|
||||
else:
|
||||
self.locked = True
|
||||
return
|
||||
if force:
|
||||
self.locked = True
|
||||
else:
|
||||
raise LockError('Failed to acquire lock on %s' % self.filename)
|
||||
|
||||
def unlock(self, ignore=True):
|
||||
"""
|
||||
Release the lock.
|
||||
|
||||
If ``ignore`` is ``True`` and removing the lock directory fails, then
|
||||
the error is surpressed. (This may happen if the lock was acquired
|
||||
via a timeout.)
|
||||
"""
|
||||
if not self.locked:
|
||||
raise LockError('%s is not locked' % self.filename)
|
||||
self.locked = False
|
||||
try:
|
||||
os.rmdir(self._mungedname())
|
||||
except os.error, err:
|
||||
if not ignore:
|
||||
raise LockError('unlocking appeared to fail - %s' %
|
||||
self.filename)
|
||||
|
||||
def _mungedname(self):
|
||||
"""
|
||||
Override this in a subclass if you want to change the way ``Lock``
|
||||
creates the directory name.
|
||||
"""
|
||||
return self.filename + '_'
|
||||
|
||||
def __del__(self):
|
||||
"""Auto unlock when object is deleted."""
|
||||
if self.locked:
|
||||
self.unlock()
|
||||
|
||||
class LockFile(Lock):
|
||||
"""
|
||||
A file like object with an exclusive lock, whilst it is open.
|
||||
|
||||
The lock is provided by the ``Lock`` class, which creates a directory
|
||||
with the same name as the file (plus a trailing underscore), to indicate
|
||||
that the file is locked.
|
||||
|
||||
This is simple and cross platform, with some limitations :
|
||||
|
||||
* Unusual process termination could result in the directory
|
||||
being left.
|
||||
* The process acquiring the lock must have permission to create a
|
||||
directory in the same location as the file.
|
||||
* It only locks the file against other processes that attempt to
|
||||
acquire a lock using ``LockFile`` or ``Lock``.
|
||||
"""
|
||||
|
||||
def __init__(self, filename, mode='r', bufsize=-1, timeout=5, step=0.1,
|
||||
force=True):
|
||||
"""
|
||||
Create a file like object that is locked (using the ``Lock`` class)
|
||||
until it is closed.
|
||||
|
||||
The file is only locked against another process that attempts to
|
||||
acquire a lock using ``Lock`` (or ``LockFile``).
|
||||
|
||||
The lock is released automatically when the file is closed.
|
||||
|
||||
The filename, mode and bufsize arguments have the same meaning as for
|
||||
the built in function ``open``.
|
||||
|
||||
The timeout and step arguments have the same meaning as for a ``Lock``
|
||||
object.
|
||||
|
||||
The force argument has the same meaning as for the ``Lock.lock`` method.
|
||||
|
||||
A ``LockFile`` object has all the normal ``file`` methods and
|
||||
attributes.
|
||||
"""
|
||||
Lock.__init__(self, filename, timeout, step)
|
||||
# may raise an error if lock is ``False``
|
||||
self.lock(force)
|
||||
# may also raise an error
|
||||
self._file = open(filename, mode, bufsize)
|
||||
|
||||
def close(self, ignore=True):
|
||||
"""
|
||||
close the file and release the lock.
|
||||
|
||||
ignore has the same meaning as for ``Lock.unlock``
|
||||
"""
|
||||
self._file.close()
|
||||
self.unlock(ignore)
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""delegate appropriate method/attribute calls to the file."""
|
||||
if name not in self.__dict__:
|
||||
return getattr(self._file, name)
|
||||
else:
|
||||
return self.__dict__[self, name]
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""Only allow attribute setting that don't clash with the file."""
|
||||
if not '_file' in self.__dict__:
|
||||
Lock.__setattr__(self, name, value)
|
||||
elif hasattr(self._file, name):
|
||||
return setattr(self._file, name, value)
|
||||
else:
|
||||
Lock.__setattr__(self, name, value)
|
||||
|
||||
def __del__(self):
|
||||
"""Auto unlock (and close file) when object is deleted."""
|
||||
if self.locked:
|
||||
self.unlock()
|
||||
self._file.close()
|
||||
|
||||
"""
|
||||
|
||||
Changelog
|
||||
=========
|
||||
|
||||
2007/11/08 Version 0.2.6
|
||||
----------------------------
|
||||
|
||||
Bug fix in ``Lock`` corrected misspelling of ``os.path`` (thanks to Thomas Viner).
|
||||
|
||||
Added a workaround in ``Lock`` for operating systems that don't raise
|
||||
``os.error`` when attempting to create a directory that already exists.
|
||||
|
||||
|
||||
2006/07/22 Version 0.2.5
|
||||
-----------------------------
|
||||
|
||||
Bugfix for Python 2.5 compatibility.
|
||||
|
||||
|
||||
2005/12/06 Version 0.2.4
|
||||
-----------------------------
|
||||
|
||||
Fixed bug in ``onerror``. (Missing stat import)
|
||||
|
||||
|
||||
2005/11/26 Version 0.2.3
|
||||
-----------------------------
|
||||
|
||||
Added ``Lock``, ``LockError``, and ``LockFile``
|
||||
|
||||
Added ``__version__``
|
||||
|
||||
|
||||
2005/11/13 Version 0.2.2
|
||||
-----------------------------
|
||||
|
||||
Added the py2exe support functions.
|
||||
|
||||
Added ``onerror``.
|
||||
|
||||
|
||||
2005/08/28 Version 0.2.1
|
||||
-----------------------------
|
||||
|
||||
* Added ``import_path``
|
||||
* Added ``__all__``
|
||||
* Code cleanup
|
||||
|
||||
|
||||
2005/06/01 Version 0.2.0
|
||||
-----------------------------
|
||||
|
||||
Added ``walkdirs`` generator.
|
||||
|
||||
|
||||
2005/03/11 Version 0.1.1
|
||||
-----------------------------
|
||||
|
||||
Added rounding to ``formatbytes`` and improved ``bytedivider`` with ``divmod``.
|
||||
|
||||
Now explicit keyword parameters override the ``configdict`` in ``formatbytes``.
|
||||
|
||||
|
||||
2005/02/18 Version 0.1.0
|
||||
-----------------------------
|
||||
|
||||
The first numbered version.
|
||||
"""
|
|
@ -0,0 +1,188 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
"""Functions for managing asynchronous operations."""
|
||||
import sqlite3
|
||||
import simplejson
|
||||
import time
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from bespin import config
|
||||
|
||||
try:
|
||||
import beanstalkc
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
log = logging.getLogger("bespin.queue")
|
||||
|
||||
class QueueItem(object):
|
||||
next_jobid = 0
|
||||
|
||||
def __init__(self, id, queue, message, execute, error_handler=None,
|
||||
job=None, use_db=True):
|
||||
if id == None:
|
||||
self.id = QueueItem.next_jobid
|
||||
QueueItem.next_jobid = QueueItem.next_jobid + 1
|
||||
else:
|
||||
self.id = id
|
||||
self.queue = queue
|
||||
self.message = message
|
||||
self.execute = execute
|
||||
self.error_handler = error_handler
|
||||
self.job = job
|
||||
self.use_db = use_db
|
||||
self.session = None
|
||||
|
||||
def run(self):
|
||||
execute = self.execute
|
||||
execute = _resolve_function(execute)
|
||||
|
||||
use_db = self.use_db
|
||||
if use_db:
|
||||
session = config.c.session_factory()
|
||||
self.session = session
|
||||
try:
|
||||
execute(self)
|
||||
if use_db:
|
||||
session.commit()
|
||||
except Exception, e:
|
||||
if use_db:
|
||||
session.rollback()
|
||||
session.close()
|
||||
|
||||
# get a fresh session for the error handler to use
|
||||
session = config.c.session_factory()
|
||||
self.session = session
|
||||
|
||||
try:
|
||||
self.error(e)
|
||||
if use_db:
|
||||
session.commit()
|
||||
except:
|
||||
if use_db:
|
||||
session.rollback()
|
||||
log.exception("Error in error handler for message %s. Original error was %s", self.message, e)
|
||||
finally:
|
||||
if use_db:
|
||||
session.close()
|
||||
return self.id
|
||||
|
||||
def error(self, e):
|
||||
error_handler = self.error_handler
|
||||
error_handler = _resolve_function(error_handler)
|
||||
error_handler(self, e)
|
||||
|
||||
def done(self):
|
||||
if self.job:
|
||||
self.job.delete()
|
||||
|
||||
class BeanstalkQueue(object):
|
||||
"""Manages Bespin jobs within a beanstalkd server.
|
||||
|
||||
http://xph.us/software/beanstalkd/
|
||||
|
||||
The client library used is beanstalkc:
|
||||
|
||||
http://github.com/earl/beanstalkc/tree/master
|
||||
"""
|
||||
|
||||
def __init__(self, host, port):
|
||||
if host is None or port is None:
|
||||
self.conn = beanstalkc.Connection()
|
||||
else:
|
||||
self.conn = beanstalkc.Connection(host=host, port=port)
|
||||
|
||||
def enqueue(self, name, message, execute, error_handler, use_db):
|
||||
message['__execute'] = execute
|
||||
message['__error_handler'] = error_handler
|
||||
message['__use_db'] = use_db
|
||||
c = self.conn
|
||||
c.use(name)
|
||||
id = c.put(simplejson.dumps(message))
|
||||
return id
|
||||
|
||||
def read_queue(self, name):
|
||||
c = self.conn
|
||||
log.debug("Starting to read %s on %s", name, c)
|
||||
c.watch(name)
|
||||
|
||||
while True:
|
||||
log.debug("Reserving next job")
|
||||
item = c.reserve()
|
||||
if item is not None:
|
||||
log.debug("Job received (%s)", item.jid)
|
||||
message = simplejson.loads(item.body)
|
||||
execute = message.pop('__execute')
|
||||
error_handler = message.pop('__error_handler')
|
||||
use_db = message.pop('__use_db')
|
||||
qi = QueueItem(item.jid, name, message,
|
||||
execute, error_handler=error_handler,
|
||||
job=item, use_db=use_db)
|
||||
yield qi
|
||||
|
||||
def close(self):
|
||||
self.conn.close()
|
||||
|
||||
def _resolve_function(namestring):
|
||||
modulename, funcname = namestring.split(":")
|
||||
module = __import__(modulename, fromlist=[funcname])
|
||||
return getattr(module, funcname)
|
||||
|
||||
def enqueue(queue_name, message, execute, error_handler=None, use_db=True):
|
||||
if config.c.queue:
|
||||
id = config.c.queue.enqueue(queue_name, message, execute,
|
||||
error_handler, use_db)
|
||||
log.debug("Running job asynchronously (%s)", id)
|
||||
return id
|
||||
else:
|
||||
qi = QueueItem(None, queue_name, message, execute,
|
||||
error_handler=error_handler, use_db=use_db)
|
||||
log.debug("Running job synchronously (%s)", qi.id)
|
||||
return qi.run()
|
||||
|
||||
def process_queue(args=None):
|
||||
log.info("Bespin queue worker")
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
if args:
|
||||
config.set_profile(args.pop(0))
|
||||
else:
|
||||
config.set_profile("dev")
|
||||
config.c.async_jobs=True
|
||||
|
||||
if args:
|
||||
config.load_pyconfig(args.pop(0))
|
||||
|
||||
config.activate_profile()
|
||||
|
||||
bq = config.c.queue
|
||||
log.debug("Queue: %s", bq)
|
||||
for qi in bq.read_queue("vcs"):
|
||||
log.info("Processing job %s", qi.id)
|
||||
log.debug("Message: %s", qi.message)
|
||||
qi.run()
|
||||
qi.done()
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
from datetime import date
|
||||
import sys
|
||||
|
||||
import beanstalkc
|
||||
import redis
|
||||
|
||||
def command():
|
||||
if len(sys.argv) < 5:
|
||||
print "Usage: beanstalk host, beanstalk port, redis host, redis port"
|
||||
sys.exit(1)
|
||||
bhost, bport, rhost, rport = sys.argv[1:]
|
||||
bport = int(bport)
|
||||
rport = int(rport)
|
||||
beanstalk = beanstalkc.Connection(host=bhost, port=bport)
|
||||
redis_conn = redis.Redis(rhost, rport)
|
||||
try:
|
||||
queue_size = beanstalk.stats_tube('vcs')['current-jobs-ready']
|
||||
except beanstalkc.CommandFailed:
|
||||
queue_size = 0
|
||||
|
||||
today = date.today().strftime("%Y%m%d")
|
||||
redis_conn.push("queue_" + today, queue_size, tail=False)
|
||||
|
|
@ -0,0 +1,955 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
""" redis.py - A client for the Redis daemon.
|
||||
|
||||
"""
|
||||
|
||||
__author__ = "Ludovico Magnocavallo <ludo\x40qix\x2eit>"
|
||||
__copyright__ = "Copyright 2009, Ludovico Magnocavallo"
|
||||
__license__ = "MIT"
|
||||
__version__ = "0.5"
|
||||
__revision__ = "$LastChangedRevision: 175 $"[22:-2]
|
||||
__date__ = "$LastChangedDate: 2009-03-17 16:15:55 +0100 (Mar, 17 Mar 2009) $"[18:-2]
|
||||
|
||||
|
||||
# TODO: Redis._get_multi_response
|
||||
|
||||
|
||||
import socket
|
||||
|
||||
|
||||
BUFSIZE = 4096
|
||||
|
||||
|
||||
class RedisError(Exception): pass
|
||||
class ConnectionError(RedisError): pass
|
||||
class ResponseError(RedisError): pass
|
||||
class InvalidResponse(RedisError): pass
|
||||
class InvalidData(RedisError): pass
|
||||
|
||||
|
||||
class Redis(object):
|
||||
"""The main Redis client.
|
||||
"""
|
||||
|
||||
def __init__(self, host=None, port=None, timeout=None, db=None):
|
||||
self.host = host or 'localhost'
|
||||
self.port = port or 6379
|
||||
if timeout:
|
||||
socket.setdefaulttimeout(timeout)
|
||||
self._sock = None
|
||||
self._fp = None
|
||||
self.db = db
|
||||
|
||||
def _write(self, s):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.connect()
|
||||
>>> r._sock.close()
|
||||
>>> try:
|
||||
... r._write('pippo')
|
||||
... except ConnectionError, e:
|
||||
... print e
|
||||
Error 9 while writing to socket. Bad file descriptor.
|
||||
>>>
|
||||
>>>
|
||||
"""
|
||||
try:
|
||||
self._sock.sendall(s)
|
||||
except socket.error, e:
|
||||
if e.args[0] == 32:
|
||||
# broken pipe
|
||||
self.disconnect()
|
||||
raise ConnectionError("Error %s while writing to socket. %s." % tuple(e.args))
|
||||
|
||||
def _read(self):
|
||||
try:
|
||||
return self._fp.readline()
|
||||
except socket.error, e:
|
||||
if e.args and e.args[0] == errno.EAGAIN:
|
||||
return
|
||||
self.disconnect()
|
||||
raise ConnectionError("Error %s while reading from socket. %s." % tuple(e.args))
|
||||
if not data:
|
||||
self.disconnect()
|
||||
raise ConnectionError("Socket connection closed when reading.")
|
||||
return data
|
||||
|
||||
def ping(self):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.ping()
|
||||
'PONG'
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('PING\r\n')
|
||||
return self.get_response()
|
||||
|
||||
def set(self, name, value, preserve=False):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.set('a', 'pippo')
|
||||
'OK'
|
||||
>>> try:
|
||||
... r.set('a', u'pippo \u3235')
|
||||
... except InvalidData, e:
|
||||
... print e
|
||||
Error encoding unicode value for key 'a': 'ascii' codec can't encode character u'\u3235' in position 15: ordinal not in range(128).
|
||||
>>> r.set('b', 105.2)
|
||||
'OK'
|
||||
>>> r.set('b', 'xxx', preserve=True)
|
||||
0
|
||||
>>> r.get('b')
|
||||
'105.2'
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
# the following will raise an error for unicode values that can't be encoded to ascii
|
||||
# we could probably add an 'encoding' arg to init, but then what do we do with get()?
|
||||
# convert back to unicode? and what about ints, or pickled values?
|
||||
try:
|
||||
value = value if isinstance(value, basestring) else str(value)
|
||||
self._write('%s %s %s\r\n%s\r\n' % (
|
||||
'SETNX' if preserve else 'SET', name, len(value), value
|
||||
))
|
||||
except UnicodeEncodeError, e:
|
||||
raise InvalidData("Error encoding unicode value for key '%s': %s." % (name, e))
|
||||
return self.get_response()
|
||||
|
||||
def get(self, name):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.set('a', 'pippo'), r.set('b', 15), r.set('c', ' \\r\\naaa\\nbbb\\r\\ncccc\\nddd\\r\\n '), r.set('d', '\\r\\n')
|
||||
('OK', 'OK', 'OK', 'OK')
|
||||
>>> r.get('a')
|
||||
'pippo'
|
||||
>>> r.get('b')
|
||||
'15'
|
||||
>>> r.get('d')
|
||||
'\\r\\n'
|
||||
>>> r.get('b')
|
||||
'15'
|
||||
>>> r.get('c')
|
||||
' \\r\\naaa\\nbbb\\r\\ncccc\\nddd\\r\\n '
|
||||
>>> r.get('c')
|
||||
' \\r\\naaa\\nbbb\\r\\ncccc\\nddd\\r\\n '
|
||||
>>> r.get('ajhsd')
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('GET %s\r\n' % name)
|
||||
return self.get_response()
|
||||
|
||||
def mget(self, *args):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.set('a', 'pippo'), r.set('b', 15), r.set('c', '\\r\\naaa\\nbbb\\r\\ncccc\\nddd\\r\\n'), r.set('d', '\\r\\n')
|
||||
('OK', 'OK', 'OK', 'OK')
|
||||
>>> r.mget('a', 'b', 'c', 'd')
|
||||
['pippo', '15', '\\r\\naaa\\nbbb\\r\\ncccc\\nddd\\r\\n', '\\r\\n']
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('MGET %s\r\n' % ' '.join(args))
|
||||
return self.get_response()
|
||||
|
||||
def incr(self, name, amount=1):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('a')
|
||||
1
|
||||
>>> r.incr('a')
|
||||
1
|
||||
>>> r.incr('a')
|
||||
2
|
||||
>>> r.incr('a', 2)
|
||||
4
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
if amount == 1:
|
||||
self._write('INCR %s\r\n' % name)
|
||||
else:
|
||||
self._write('INCRBY %s %s\r\n' % (name, amount))
|
||||
return self.get_response()
|
||||
|
||||
def decr(self, name, amount=1):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> if r.get('a'):
|
||||
... r.delete('a')
|
||||
... else:
|
||||
... print 1
|
||||
1
|
||||
>>> r.decr('a')
|
||||
-1
|
||||
>>> r.decr('a')
|
||||
-2
|
||||
>>> r.decr('a', 5)
|
||||
-7
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
if amount == 1:
|
||||
self._write('DECR %s\r\n' % name)
|
||||
else:
|
||||
self._write('DECRBY %s %s\r\n' % (name, amount))
|
||||
return self.get_response()
|
||||
|
||||
def exists(self, name):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.exists('dsjhfksjdhfkdsjfh')
|
||||
0
|
||||
>>> r.set('a', 'a')
|
||||
'OK'
|
||||
>>> r.exists('a')
|
||||
1
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('EXISTS %s\r\n' % name)
|
||||
return self.get_response()
|
||||
|
||||
def delete(self, name):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('dsjhfksjdhfkdsjfh')
|
||||
0
|
||||
>>> r.set('a', 'a')
|
||||
'OK'
|
||||
>>> r.delete('a')
|
||||
1
|
||||
>>> r.exists('a')
|
||||
0
|
||||
>>> r.delete('a')
|
||||
0
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('DEL %s\r\n' % name)
|
||||
return self.get_response()
|
||||
|
||||
def get_type(self, name):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.set('a', 3)
|
||||
'OK'
|
||||
>>> r.get_type('a')
|
||||
'string'
|
||||
>>> r.get_type('zzz')
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('TYPE %s\r\n' % name)
|
||||
res = self.get_response()
|
||||
return None if res == 'none' else res
|
||||
|
||||
def keys(self, pattern):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.flush()
|
||||
'OK'
|
||||
>>> r.set('a', 'a')
|
||||
'OK'
|
||||
>>> r.keys('a*')
|
||||
['a']
|
||||
>>> r.set('a2', 'a')
|
||||
'OK'
|
||||
>>> r.keys('a*')
|
||||
['a', 'a2']
|
||||
>>> r.delete('a2')
|
||||
1
|
||||
>>> r.keys('sjdfhskjh*')
|
||||
[]
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('KEYS %s\r\n' % pattern)
|
||||
return self.get_response().split()
|
||||
|
||||
def randomkey(self):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.set('a', 'a')
|
||||
'OK'
|
||||
>>> isinstance(r.randomkey(), str)
|
||||
True
|
||||
>>>
|
||||
"""
|
||||
#raise NotImplementedError("Implemented but buggy, do not use.")
|
||||
self.connect()
|
||||
self._write('RANDOMKEY\r\n')
|
||||
return self.get_response()
|
||||
|
||||
def rename(self, src, dst, preserve=False):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> try:
|
||||
... r.rename('a', 'a')
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
source and destination objects are the same
|
||||
>>> r.rename('a', 'b')
|
||||
'OK'
|
||||
>>> try:
|
||||
... r.rename('a', 'b')
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
no such key
|
||||
>>> r.set('a', 1)
|
||||
'OK'
|
||||
>>> r.rename('b', 'a', preserve=True)
|
||||
0
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
if preserve:
|
||||
self._write('RENAMENX %s %s\r\n' % (src, dst))
|
||||
return self.get_response()
|
||||
else:
|
||||
self._write('RENAME %s %s\r\n' % (src, dst))
|
||||
return self.get_response() #.strip()
|
||||
|
||||
def expire(self, name, time):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.set('a', 1)
|
||||
'OK'
|
||||
>>> r.expire('a', 1)
|
||||
1
|
||||
>>> r.expire('zzzzz', 1)
|
||||
0
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('EXPIRE %s %s\r\n' % (name, time))
|
||||
return self.get_response()
|
||||
|
||||
def push(self, name, value, tail=False):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> r.push('l', 'a')
|
||||
'OK'
|
||||
>>> r.set('a', 'a')
|
||||
'OK'
|
||||
>>> try:
|
||||
... r.push('a', 'a')
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
Operation against a key holding the wrong kind of value
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
# same considerations on unicode as in set() apply here
|
||||
try:
|
||||
value = value if isinstance(value, basestring) else str(value)
|
||||
self._write('%s %s %s\r\n%s\r\n' % (
|
||||
'LPUSH' if tail else 'RPUSH', name, len(value), value
|
||||
))
|
||||
except UnicodeEncodeError, e:
|
||||
raise InvalidData("Error encoding unicode value for element in list '%s': %s." % (name, e))
|
||||
return self.get_response()
|
||||
|
||||
def llen(self, name):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> r.push('l', 'a')
|
||||
'OK'
|
||||
>>> r.llen('l')
|
||||
1
|
||||
>>> r.push('l', 'a')
|
||||
'OK'
|
||||
>>> r.llen('l')
|
||||
2
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('LLEN %s\r\n' % name)
|
||||
return self.get_response()
|
||||
|
||||
def lrange(self, name, start, end):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> r.lrange('l', 0, 1)
|
||||
[]
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.lrange('l', 0, 1)
|
||||
['aaa']
|
||||
>>> r.push('l', 'bbb')
|
||||
'OK'
|
||||
>>> r.lrange('l', 0, 0)
|
||||
['aaa']
|
||||
>>> r.lrange('l', 0, 1)
|
||||
['aaa', 'bbb']
|
||||
>>> r.lrange('l', -1, 0)
|
||||
[]
|
||||
>>> r.lrange('l', -1, -1)
|
||||
['bbb']
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('LRANGE %s %s %s\r\n' % (name, start, end))
|
||||
return self.get_response()
|
||||
|
||||
def ltrim(self, name, start, end):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> try:
|
||||
... r.ltrim('l', 0, 1)
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
no such key
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.push('l', 'bbb')
|
||||
'OK'
|
||||
>>> r.push('l', 'ccc')
|
||||
'OK'
|
||||
>>> r.ltrim('l', 0, 1)
|
||||
'OK'
|
||||
>>> r.llen('l')
|
||||
2
|
||||
>>> r.ltrim('l', 99, 95)
|
||||
'OK'
|
||||
>>> r.llen('l')
|
||||
0
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('LTRIM %s %s %s\r\n' % (name, start, end))
|
||||
return self.get_response()
|
||||
|
||||
def lindex(self, name, index):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> res = r.delete('l')
|
||||
>>> r.lindex('l', 0)
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.lindex('l', 0)
|
||||
'aaa'
|
||||
>>> r.lindex('l', 2)
|
||||
>>> r.push('l', 'ccc')
|
||||
'OK'
|
||||
>>> r.lindex('l', 1)
|
||||
'ccc'
|
||||
>>> r.lindex('l', -1)
|
||||
'ccc'
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('LINDEX %s %s\r\n' % (name, index))
|
||||
return self.get_response()
|
||||
|
||||
def pop(self, name, tail=False):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> r.pop('l')
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.push('l', 'bbb')
|
||||
'OK'
|
||||
>>> r.pop('l')
|
||||
'aaa'
|
||||
>>> r.pop('l')
|
||||
'bbb'
|
||||
>>> r.pop('l')
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.push('l', 'bbb')
|
||||
'OK'
|
||||
>>> r.pop('l', tail=True)
|
||||
'bbb'
|
||||
>>> r.pop('l')
|
||||
'aaa'
|
||||
>>> r.pop('l')
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('%s %s\r\n' % ('RPOP' if tail else 'LPOP', name))
|
||||
return self.get_response()
|
||||
|
||||
def lset(self, name, index, value):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> try:
|
||||
... r.lset('l', 0, 'a')
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
no such key
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> try:
|
||||
... r.lset('l', 1, 'a')
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
index out of range
|
||||
>>> r.lset('l', 0, 'bbb')
|
||||
'OK'
|
||||
>>> r.lrange('l', 0, 1)
|
||||
['bbb']
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
try:
|
||||
value = value if isinstance(value, basestring) else str(value)
|
||||
self._write('LSET %s %s %s\r\n%s\r\n' % (
|
||||
name, index, len(value), value
|
||||
))
|
||||
except UnicodeEncodeError, e:
|
||||
raise InvalidData("Error encoding unicode value for element %s in list '%s': %s." % (index, name, e))
|
||||
return self.get_response()
|
||||
|
||||
def lrem(self, name, value, num=0):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.push('l', 'bbb')
|
||||
'OK'
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.lrem('l', 'aaa')
|
||||
2
|
||||
>>> r.lrange('l', 0, 10)
|
||||
['bbb']
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.lrem('l', 'aaa', 1)
|
||||
1
|
||||
>>> r.lrem('l', 'aaa', 1)
|
||||
1
|
||||
>>> r.lrem('l', 'aaa', 1)
|
||||
0
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
try:
|
||||
value = value if isinstance(value, basestring) else str(value)
|
||||
self._write('LREM %s %s %s\r\n%s\r\n' % (
|
||||
name, num, len(value), value
|
||||
))
|
||||
except UnicodeEncodeError, e:
|
||||
raise InvalidData("Error encoding unicode value for element %s in list '%s': %s." % (index, name, e))
|
||||
return self.get_response()
|
||||
|
||||
def sort(self, name, by=None, get=None, start=None, num=None, desc=False, alpha=False):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> r.push('l', 'ccc')
|
||||
'OK'
|
||||
>>> r.push('l', 'aaa')
|
||||
'OK'
|
||||
>>> r.push('l', 'ddd')
|
||||
'OK'
|
||||
>>> r.push('l', 'bbb')
|
||||
'OK'
|
||||
>>> r.sort('l', alpha=True)
|
||||
['aaa', 'bbb', 'ccc', 'ddd']
|
||||
>>> r.delete('l')
|
||||
1
|
||||
>>> for i in range(1, 5):
|
||||
... res = r.push('l', 1.0 / i)
|
||||
>>> r.sort('l')
|
||||
['0.25', '0.333333333333', '0.5', '1.0']
|
||||
>>> r.sort('l', desc=True)
|
||||
['1.0', '0.5', '0.333333333333', '0.25']
|
||||
>>> r.sort('l', desc=True, start=2, num=1)
|
||||
['0.333333333333']
|
||||
>>> r.set('weight_0.5', 10)
|
||||
'OK'
|
||||
>>> r.sort('l', desc=True, by='weight_*')
|
||||
['0.5', '1.0', '0.333333333333', '0.25']
|
||||
>>> for i in r.sort('l', desc=True):
|
||||
... res = r.set('test_%s' % i, 100 - float(i))
|
||||
>>> r.sort('l', desc=True, get='test_*')
|
||||
['99.0', '99.5', '99.6666666667', '99.75']
|
||||
>>> r.sort('l', desc=True, by='weight_*', get='test_*')
|
||||
['99.5', '99.0', '99.6666666667', '99.75']
|
||||
>>> r.sort('l', desc=True, by='weight_*', get='missing_*')
|
||||
[None, None, None, None]
|
||||
>>>
|
||||
"""
|
||||
stmt = ['SORT', name]
|
||||
if by:
|
||||
stmt.append("BY %s" % by)
|
||||
if start and num:
|
||||
stmt.append("LIMIT %s %s" % (start, num))
|
||||
if get is None:
|
||||
pass
|
||||
elif isinstance(get, basestring):
|
||||
stmt.append("GET %s" % get)
|
||||
elif isinstance(get, list) or isinstance(get, tuple):
|
||||
for g in get:
|
||||
stmt.append("GET %s" % g)
|
||||
else:
|
||||
raise RedisError("Invalid parameter 'get' for Redis sort")
|
||||
if desc:
|
||||
stmt.append("DESC")
|
||||
if alpha:
|
||||
stmt.append("ALPHA")
|
||||
self.connect()
|
||||
self._write(' '.join(stmt + ["\r\n"]))
|
||||
return self.get_response()
|
||||
|
||||
def sadd(self, name, value):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> res = r.delete('s')
|
||||
>>> r.sadd('s', 'a')
|
||||
1
|
||||
>>> r.sadd('s', 'b')
|
||||
1
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
# same considerations on unicode as in set() apply here
|
||||
try:
|
||||
value = value if isinstance(value, basestring) else str(value)
|
||||
self._write('SADD %s %s\r\n%s\r\n' % (
|
||||
name, len(value), value
|
||||
))
|
||||
except UnicodeEncodeError, e:
|
||||
raise InvalidData("Error encoding unicode value for element in set '%s': %s." % (name, e))
|
||||
return self.get_response()
|
||||
|
||||
def srem(self, name, value):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('s')
|
||||
1
|
||||
>>> r.srem('s', 'aaa')
|
||||
0
|
||||
>>> r.sadd('s', 'b')
|
||||
1
|
||||
>>> r.srem('s', 'b')
|
||||
1
|
||||
>>> r.sismember('s', 'b')
|
||||
0
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
# same considerations on unicode as in set() apply here
|
||||
try:
|
||||
value = value if isinstance(value, basestring) else str(value)
|
||||
self._write('SREM %s %s\r\n%s\r\n' % (
|
||||
name, len(value), value
|
||||
))
|
||||
except UnicodeEncodeError, e:
|
||||
raise InvalidData("Error encoding unicode value for element in set '%s': %s." % (name, e))
|
||||
return self.get_response()
|
||||
|
||||
def sismember(self, name, value):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('s')
|
||||
1
|
||||
>>> r.sismember('s', 'b')
|
||||
0
|
||||
>>> r.sadd('s', 'a')
|
||||
1
|
||||
>>> r.sismember('s', 'b')
|
||||
0
|
||||
>>> r.sismember('s', 'a')
|
||||
1
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
# same considerations on unicode as in set() apply here
|
||||
try:
|
||||
value = value if isinstance(value, basestring) else str(value)
|
||||
self._write('SISMEMBER %s %s\r\n%s\r\n' % (
|
||||
name, len(value), value
|
||||
))
|
||||
except UnicodeEncodeError, e:
|
||||
raise InvalidData("Error encoding unicode value for element in set '%s': %s." % (name, e))
|
||||
return self.get_response()
|
||||
|
||||
def sinter(self, *args):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> res = r.delete('s1')
|
||||
>>> res = r.delete('s2')
|
||||
>>> res = r.delete('s3')
|
||||
>>> r.sadd('s1', 'a')
|
||||
1
|
||||
>>> r.sadd('s2', 'a')
|
||||
1
|
||||
>>> r.sadd('s3', 'b')
|
||||
1
|
||||
>>> try:
|
||||
... r.sinter()
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
wrong number of arguments
|
||||
>>> try:
|
||||
... r.sinter('l')
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
Operation against a key holding the wrong kind of value
|
||||
>>> r.sinter('s1', 's2', 's3')
|
||||
set([])
|
||||
>>> r.sinter('s1', 's2')
|
||||
set(['a'])
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('SINTER %s\r\n' % ' '.join(args))
|
||||
return set(self.get_response())
|
||||
|
||||
def sinterstore(self, dest, *args):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> res = r.delete('s1')
|
||||
>>> res = r.delete('s2')
|
||||
>>> res = r.delete('s3')
|
||||
>>> r.sadd('s1', 'a')
|
||||
1
|
||||
>>> r.sadd('s2', 'a')
|
||||
1
|
||||
>>> r.sadd('s3', 'b')
|
||||
1
|
||||
>>> r.sinterstore('s_s', 's1', 's2', 's3')
|
||||
'OK'
|
||||
>>> r.sinterstore('s_s', 's1', 's2')
|
||||
'OK'
|
||||
>>> r.smembers('s_s')
|
||||
set(['a'])
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('SINTERSTORE %s %s\r\n' % (dest, ' '.join(args)))
|
||||
return self.get_response()
|
||||
|
||||
def smembers(self, name):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('s')
|
||||
1
|
||||
>>> r.sadd('s', 'a')
|
||||
1
|
||||
>>> r.sadd('s', 'b')
|
||||
1
|
||||
>>> try:
|
||||
... r.smembers('l')
|
||||
... except ResponseError, e:
|
||||
... print e
|
||||
Operation against a key holding the wrong kind of value
|
||||
>>> r.smembers('s')
|
||||
set(['a', 'b'])
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('SMEMBERS %s\r\n' % name)
|
||||
return set(self.get_response())
|
||||
|
||||
def select(self, db):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.delete('a')
|
||||
1
|
||||
>>> r.select(10)
|
||||
'OK'
|
||||
>>> r.set('a', 1)
|
||||
'OK'
|
||||
>>> r.select(9)
|
||||
'OK'
|
||||
>>> r.get('a')
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('SELECT %s\r\n' % db)
|
||||
return self.get_response()
|
||||
|
||||
def move(self, name, db):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.set('a', 'a')
|
||||
'OK'
|
||||
>>> r.select(10)
|
||||
'OK'
|
||||
>>> if r.get('a'):
|
||||
... r.delete('a')
|
||||
... else:
|
||||
... print 1
|
||||
1
|
||||
>>> r.select(9)
|
||||
'OK'
|
||||
>>> r.move('a', 10)
|
||||
1
|
||||
>>> r.get('a')
|
||||
>>> r.select(10)
|
||||
'OK'
|
||||
>>> r.get('a')
|
||||
'a'
|
||||
>>> r.select(9)
|
||||
'OK'
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('MOVE %s %s\r\n' % (name, db))
|
||||
return self.get_response()
|
||||
|
||||
def save(self, background=False):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.save()
|
||||
'OK'
|
||||
>>> try:
|
||||
... resp = r.save(background=True)
|
||||
... except ResponseError, e:
|
||||
... assert str(e) == 'background save already in progress', str(e)
|
||||
... else:
|
||||
... assert resp == 'OK'
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
if background:
|
||||
self._write('BGSAVE\r\n')
|
||||
else:
|
||||
self._write('SAVE\r\n')
|
||||
return self.get_response()
|
||||
|
||||
def lastsave(self):
|
||||
"""
|
||||
>>> import time
|
||||
>>> r = Redis(db=9)
|
||||
>>> t = int(time.time())
|
||||
>>> r.save()
|
||||
'OK'
|
||||
>>> r.lastsave() >= t
|
||||
True
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('LASTSAVE\r\n')
|
||||
return self.get_response()
|
||||
|
||||
def flush(self, all_dbs=False):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.flush()
|
||||
'OK'
|
||||
>>> # r.flush(all_dbs=True)
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('%s\r\n' % ('FLUSHALL' if all_dbs else 'FLUSHDB'))
|
||||
return self.get_response()
|
||||
|
||||
def info(self):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> info = r.info()
|
||||
>>> info and isinstance(info, dict)
|
||||
True
|
||||
>>> isinstance(info.get('connected_clients'), int)
|
||||
True
|
||||
>>>
|
||||
"""
|
||||
self.connect()
|
||||
self._write('INFO\r\n')
|
||||
info = dict()
|
||||
for l in self.get_response().split('\r\n'):
|
||||
if not l:
|
||||
continue
|
||||
k, v = l.split(':', 1)
|
||||
info[k] = int(v) if v.isdigit() else v
|
||||
return info
|
||||
|
||||
def get_response(self):
|
||||
data = self._read().strip()
|
||||
c = data[0]
|
||||
if c == '-':
|
||||
raise ResponseError(data[5:] if data[:5] == '-ERR ' else data[1:])
|
||||
if c == '+':
|
||||
return data[1:]
|
||||
if c == '*':
|
||||
try:
|
||||
num = int(data[1:])
|
||||
except (TypeError, ValueError):
|
||||
raise InvalidResponse("Cannot convert multi-response header '%s' to integer" % data)
|
||||
result = list()
|
||||
for i in range(num):
|
||||
result.append(self._get_value())
|
||||
return result
|
||||
return self._get_value(data)
|
||||
|
||||
def _get_value(self, data=None):
|
||||
data = data or self._read().strip()
|
||||
if data == '$-1':
|
||||
return None
|
||||
try:
|
||||
c, i = data[0], (int(data[1:]) if data.find('.') == -1 else float(data[1:]))
|
||||
except ValueError:
|
||||
raise InvalidResponse("Cannot convert data '%s' to integer" % data)
|
||||
if c == ':':
|
||||
return i
|
||||
if c != '$':
|
||||
raise InvalidResponse("Unkown response prefix for '%s'" % data)
|
||||
buf = []
|
||||
while True:
|
||||
data = self._read()
|
||||
i -= len(data)
|
||||
buf.append(data)
|
||||
if i < 0:
|
||||
break
|
||||
return ''.join(buf)[:-2]
|
||||
|
||||
def disconnect(self):
|
||||
if isinstance(self._sock, socket.socket):
|
||||
try:
|
||||
self._sock.close()
|
||||
except socket.error:
|
||||
pass
|
||||
self._sock = None
|
||||
self._fp = None
|
||||
|
||||
def connect(self):
|
||||
"""
|
||||
>>> r = Redis(db=9)
|
||||
>>> r.connect()
|
||||
>>> isinstance(r._sock, socket.socket)
|
||||
True
|
||||
>>> r.disconnect()
|
||||
>>>
|
||||
"""
|
||||
if isinstance(self._sock, socket.socket):
|
||||
return
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.connect((self.host, self.port))
|
||||
except socket.error, e:
|
||||
raise ConnectionError("Error %s connecting to %s:%s. %s." % (e.args[0], self.host, self.port, e.args[1]))
|
||||
else:
|
||||
self._sock = sock
|
||||
self._fp = self._sock.makefile('r')
|
||||
if self.db:
|
||||
self.select(self.db)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
doctest.testmod()
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
"""Methods for tracking statistics."""
|
||||
|
||||
from datetime import date
|
||||
import logging
|
||||
|
||||
log = logging.getLogger("bespin.stats")
|
||||
|
||||
class DoNothingStats(object):
|
||||
def incr(self, key, by=1):
|
||||
return 0
|
||||
|
||||
def decr(self, key, by=1):
|
||||
return 0
|
||||
|
||||
def multiget(self, keys):
|
||||
return dict()
|
||||
|
||||
def disconnect(self):
|
||||
pass
|
||||
|
||||
def _get_key(key):
|
||||
if "_DATE" in key:
|
||||
return key.replace("DATE", date.today().strftime("%Y%m%d"))
|
||||
return key
|
||||
|
||||
class MemoryStats(object):
|
||||
def __init__(self):
|
||||
self.storage = {}
|
||||
|
||||
def incr(self, key, by=1):
|
||||
key = _get_key(key)
|
||||
current = self.storage.setdefault(key, 0)
|
||||
newval = current + by
|
||||
self.storage[key] = newval
|
||||
return newval
|
||||
|
||||
def decr(self, key, by=1):
|
||||
return self.incr(key, -1*by)
|
||||
|
||||
def multiget(self, keys):
|
||||
return dict((key, self.storage.get(key)) for key in keys)
|
||||
|
||||
def disconnect(self):
|
||||
pass
|
||||
|
||||
class RedisStats(object):
|
||||
def __init__(self, redis):
|
||||
self.redis = redis
|
||||
|
||||
def incr(self, key, by=1):
|
||||
key = _get_key(key)
|
||||
try:
|
||||
return self.redis.incr(key, by)
|
||||
except:
|
||||
log.exception("Problem incrementing stat %s", key)
|
||||
|
||||
def decr(self, key, by=1):
|
||||
key = _get_key(key)
|
||||
try:
|
||||
return self.redis.decr(key, by)
|
||||
except:
|
||||
log.exception("Problem decrementing stat %s", key)
|
||||
|
||||
def multiget(self, keys):
|
||||
return dict(zip(keys, self.redis.mget(*keys)))
|
||||
|
||||
def disconnect(self):
|
||||
self.redis.disconnect()
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
/*
|
||||
* This sample code
|
||||
* is really lame
|
||||
* but we're tired
|
||||
*/
|
||||
function welcome() {
|
||||
return "to Bespin";
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Bespin Editor Sample</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Welcome to Bespin!</h1>
|
||||
<p>You can edit and preview within this interface.</p>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,13 @@
|
|||
Welcome to Bespin!
|
||||
------------------
|
||||
|
||||
A few helpful tips:
|
||||
|
||||
* To jump between the command line and the editor, simply hit Ctrl-J
|
||||
|
||||
* To turn on "strictlines" mode, which means that you can't click anywhere in the editor, and instead are restricted to where content is, type: > set strictlines on
|
||||
|
||||
Check out:
|
||||
|
||||
* FAQ: https://wiki.mozilla.org/Labs/Bespin/FAQ
|
||||
* Our initial announcement: http://labs.mozilla.com/2009/02/introducing-bespin
|
Двоичный файл не отображается.
|
@ -0,0 +1,37 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
from webtest import TestApp
|
||||
|
||||
class BespinTestApp(TestApp):
|
||||
def _make_environ(self, extra_environ=None):
|
||||
environ = super(BespinTestApp, self)._make_environ(extra_environ)
|
||||
environ["HTTP_DOMAIN_TOKEN"] = "anti-csrf"
|
||||
environ["HTTP_COOKIE"] = "Domain-Token=anti-csrf"
|
||||
environ["BespinTestApp"] = "True"
|
||||
return environ
|
|
@ -0,0 +1,248 @@
|
|||
# mock.py
|
||||
# Test tools for mocking and patching.
|
||||
# Copyright (C) 2007-2009 Michael Foord
|
||||
# E-mail: fuzzyman AT voidspace DOT org DOT uk
|
||||
|
||||
# mock 0.5.0
|
||||
# http://www.voidspace.org.uk/python/mock/
|
||||
|
||||
# Released subject to the BSD License
|
||||
# Please see http://www.voidspace.org.uk/python/license.shtml
|
||||
|
||||
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
|
||||
# Comments, suggestions and bug reports welcome.
|
||||
|
||||
|
||||
__all__ = (
|
||||
'Mock',
|
||||
'patch',
|
||||
'patch_object',
|
||||
'sentinel',
|
||||
'DEFAULT'
|
||||
)
|
||||
|
||||
__version__ = '0.5.0'
|
||||
|
||||
|
||||
class SentinelObject(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __repr__(self):
|
||||
return '<SentinelObject "%s">' % self.name
|
||||
|
||||
|
||||
class Sentinel(object):
|
||||
def __init__(self):
|
||||
self._sentinels = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
return self._sentinels.setdefault(name, SentinelObject(name))
|
||||
|
||||
|
||||
sentinel = Sentinel()
|
||||
|
||||
DEFAULT = sentinel.DEFAULT
|
||||
|
||||
class OldStyleClass:
|
||||
pass
|
||||
ClassType = type(OldStyleClass)
|
||||
|
||||
def _is_magic(name):
|
||||
return '__%s__' % name[2:-2] == name
|
||||
|
||||
def _copy(value):
|
||||
if type(value) in (dict, list, tuple, set):
|
||||
return type(value)(value)
|
||||
return value
|
||||
|
||||
|
||||
class Mock(object):
|
||||
|
||||
def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
|
||||
name=None, parent=None, wraps=None):
|
||||
self._parent = parent
|
||||
self._name = name
|
||||
if spec is not None and not isinstance(spec, list):
|
||||
spec = [member for member in dir(spec) if not _is_magic(member)]
|
||||
|
||||
self._methods = spec
|
||||
self._children = {}
|
||||
self._return_value = return_value
|
||||
self.side_effect = side_effect
|
||||
self._wraps = wraps
|
||||
|
||||
self.reset_mock()
|
||||
|
||||
|
||||
|
||||
def reset_mock(self):
|
||||
self.called = False
|
||||
self.call_args = None
|
||||
self.call_count = 0
|
||||
self.call_args_list = []
|
||||
self.method_calls = []
|
||||
for child in self._children.itervalues():
|
||||
child.reset_mock()
|
||||
if isinstance(self._return_value, Mock):
|
||||
self._return_value.reset_mock()
|
||||
|
||||
|
||||
def __get_return_value(self):
|
||||
if self._return_value is DEFAULT:
|
||||
self._return_value = Mock()
|
||||
return self._return_value
|
||||
|
||||
def __set_return_value(self, value):
|
||||
self._return_value = value
|
||||
|
||||
return_value = property(__get_return_value, __set_return_value)
|
||||
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
self.called = True
|
||||
self.call_count += 1
|
||||
self.call_args = (args, kwargs)
|
||||
self.call_args_list.append((args, kwargs))
|
||||
|
||||
parent = self._parent
|
||||
name = self._name
|
||||
while parent is not None:
|
||||
parent.method_calls.append((name, args, kwargs))
|
||||
if parent._parent is None:
|
||||
break
|
||||
name = parent._name + '.' + name
|
||||
parent = parent._parent
|
||||
|
||||
ret_val = self.return_value
|
||||
if self.side_effect is not None:
|
||||
ret_val = self.side_effect(*args, **kwargs)
|
||||
if ret_val is DEFAULT:
|
||||
ret_val = self.return_value
|
||||
|
||||
if self._wraps is not None:
|
||||
return self._wraps(*args, **kwargs)
|
||||
return ret_val
|
||||
|
||||
|
||||
def __getattr__(self, name):
|
||||
if self._methods is not None:
|
||||
if name not in self._methods:
|
||||
raise AttributeError("Mock object has no attribute '%s'" % name)
|
||||
elif _is_magic(name):
|
||||
raise AttributeError(name)
|
||||
|
||||
if name not in self._children:
|
||||
wraps = None
|
||||
if self._wraps is not None:
|
||||
wraps = getattr(self._wraps, name)
|
||||
self._children[name] = Mock(parent=self, name=name, wraps=wraps)
|
||||
|
||||
return self._children[name]
|
||||
|
||||
|
||||
def assert_called_with(self, *args, **kwargs):
|
||||
assert self.call_args == (args, kwargs), 'Expected: %s\nCalled with: %s' % ((args, kwargs), self.call_args)
|
||||
|
||||
|
||||
def _dot_lookup(thing, comp, import_path):
|
||||
try:
|
||||
return getattr(thing, comp)
|
||||
except AttributeError:
|
||||
__import__(import_path)
|
||||
return getattr(thing, comp)
|
||||
|
||||
|
||||
def _importer(target):
|
||||
components = target.split('.')
|
||||
import_path = components.pop(0)
|
||||
thing = __import__(import_path)
|
||||
|
||||
for comp in components:
|
||||
import_path += ".%s" % comp
|
||||
thing = _dot_lookup(thing, comp, import_path)
|
||||
return thing
|
||||
|
||||
|
||||
class _patch(object):
|
||||
def __init__(self, target, attribute, new, spec, create):
|
||||
self.target = target
|
||||
self.attribute = attribute
|
||||
self.new = new
|
||||
self.spec = spec
|
||||
self.create = create
|
||||
|
||||
|
||||
def __call__(self, func):
|
||||
if hasattr(func, 'patchings'):
|
||||
func.patchings.append(self)
|
||||
return func
|
||||
|
||||
def patched(*args, **keywargs):
|
||||
# don't use a with here (backwards compatability with 2.5)
|
||||
extra_args = []
|
||||
for patching in patched.patchings:
|
||||
arg = patching.__enter__()
|
||||
if patching.new is DEFAULT:
|
||||
extra_args.append(arg)
|
||||
args += tuple(extra_args)
|
||||
try:
|
||||
return func(*args, **keywargs)
|
||||
finally:
|
||||
for patching in getattr(patched, 'patchings', []):
|
||||
patching.__exit__()
|
||||
|
||||
patched.patchings = [self]
|
||||
patched.__name__ = func.__name__
|
||||
patched.compat_co_firstlineno = getattr(func, "compat_co_firstlineno",
|
||||
func.func_code.co_firstlineno)
|
||||
return patched
|
||||
|
||||
|
||||
def get_original(self):
|
||||
try:
|
||||
return getattr(self.target, self.attribute)
|
||||
except AttributeError:
|
||||
if not self.create:
|
||||
raise
|
||||
return DEFAULT
|
||||
|
||||
|
||||
def __enter__(self):
|
||||
new, spec, = self.new, self.spec
|
||||
original = self.get_original()
|
||||
if new is DEFAULT:
|
||||
inherit = False
|
||||
if spec == True:
|
||||
# set spec to the object we are replacing
|
||||
spec = original
|
||||
if isinstance(spec, (type, ClassType)):
|
||||
inherit = True
|
||||
new = Mock(spec=spec)
|
||||
if inherit:
|
||||
new.return_value = Mock(spec=spec)
|
||||
self.temp_original = original
|
||||
setattr(self.target, self.attribute, new)
|
||||
return new
|
||||
|
||||
|
||||
def __exit__(self, *_):
|
||||
if self.temp_original is not DEFAULT:
|
||||
setattr(self.target, self.attribute, self.temp_original)
|
||||
else:
|
||||
delattr(self.target, self.attribute)
|
||||
del self.temp_original
|
||||
|
||||
|
||||
def patch_object(target, attribute, new=DEFAULT, spec=None, create=False):
|
||||
return _patch(target, attribute, new, spec, create)
|
||||
|
||||
|
||||
def patch(target, new=DEFAULT, spec=None, create=False):
|
||||
try:
|
||||
target, attribute = target.rsplit('.', 1)
|
||||
except (TypeError, ValueError):
|
||||
raise TypeError("Need a valid target to patch. You supplied: %r" % (target,))
|
||||
target = _importer(target)
|
||||
return _patch(target, attribute, new, spec, create)
|
||||
|
Двоичный файл не отображается.
|
@ -0,0 +1,28 @@
|
|||
import time
|
||||
|
||||
from bespin import auth
|
||||
|
||||
def test_login_failure():
|
||||
tracker = auth.MemoryFailedLoginTracker(1, 1)
|
||||
fli = tracker.can_log_in("foo")
|
||||
assert fli.can_log_in
|
||||
tracker.login_failed(fli)
|
||||
fli = tracker.can_log_in("foo")
|
||||
assert not fli.can_log_in
|
||||
time.sleep(1.5)
|
||||
fli = tracker.can_log_in("foo")
|
||||
assert fli.can_log_in
|
||||
|
||||
def test_login_success():
|
||||
tracker = auth.MemoryFailedLoginTracker(10, 600)
|
||||
fli = tracker.can_log_in("foo")
|
||||
assert fli.can_log_in
|
||||
tracker.login_failed(fli)
|
||||
fli = tracker.can_log_in("foo")
|
||||
assert fli.can_log_in
|
||||
assert fli.failed_attempts == 1
|
||||
tracker.login_successful(fli)
|
||||
fli = tracker.can_log_in("foo")
|
||||
assert fli.can_log_in
|
||||
assert fli.failed_attempts == 0
|
||||
|
|
@ -0,0 +1,582 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
#from webtest import TestApp
|
||||
#import simplejson
|
||||
|
||||
import simplejson
|
||||
from bespin import config, controllers
|
||||
from bespin.filesystem import get_project
|
||||
from bespin.database import User, Base, ConflictError
|
||||
|
||||
from nose.tools import assert_equals
|
||||
from __init__ import BespinTestApp
|
||||
|
||||
session = None
|
||||
mattb = None
|
||||
zuck = None
|
||||
tom = None
|
||||
ev = None
|
||||
joe = None
|
||||
app = None
|
||||
group = None
|
||||
|
||||
def setup_module(module):
|
||||
config.set_profile("test")
|
||||
config.activate_profile()
|
||||
_reset()
|
||||
|
||||
def _reset():
|
||||
Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
Base.metadata.create_all(bind=config.c.dbengine)
|
||||
fsroot = config.c.fsroot
|
||||
if fsroot.exists() and fsroot.basename() == "testfiles":
|
||||
fsroot.rmtree()
|
||||
fsroot.makedirs()
|
||||
|
||||
global session
|
||||
session = config.c.session_factory()
|
||||
num_users = session.query(User).count()
|
||||
assert_equals(num_users, 0)
|
||||
session.commit()
|
||||
|
||||
global mattb, zuck, tom, ev, joe
|
||||
mattb = User.create_user("mattb", "mattb", "mattb")
|
||||
zuck = User.create_user("zuck", "zuck", "zuck")
|
||||
tom = User.create_user("tom", "tom", "tom")
|
||||
ev = User.create_user("ev", "ev", "ev")
|
||||
joe = User.create_user("joe", "joe", "joe")
|
||||
group = joe.add_group("group")
|
||||
group.add_member(mattb)
|
||||
group.add_member(zuck)
|
||||
group.add_member(tom)
|
||||
group.add_member(ev)
|
||||
|
||||
global app
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
app.post("/register/login/joe", dict(password="joe"))
|
||||
|
||||
def _followed_names(connections):
|
||||
return set([connection.followed.username for connection in connections])
|
||||
|
||||
def _following_names(connections):
|
||||
return set([connection.following.username for connection in connections])
|
||||
|
||||
def _group_names(groups):
|
||||
return set([group.name for group in groups])
|
||||
|
||||
def _group_member_names(group_memberships):
|
||||
return set([group_membership.user.username for group_membership in group_memberships])
|
||||
|
||||
# Group tests
|
||||
def test_groups():
|
||||
_reset()
|
||||
|
||||
assert_equals(len(joe.get_groups()), 1)
|
||||
|
||||
homies = joe.get_group("homies", create_on_not_found=True)
|
||||
assert_equals(_group_names(joe.get_groups()), set([ "homies", "group" ]))
|
||||
assert_equals(_group_names(joe.get_groups(mattb)), set([ "group" ]))
|
||||
|
||||
homies.add_member(mattb)
|
||||
assert_equals(_group_names(joe.get_groups(mattb)), set([ "homies", "group" ]))
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb" ]))
|
||||
|
||||
homies.add_member(zuck)
|
||||
homies.add_member(tom)
|
||||
homies.add_member(ev)
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb", "zuck", "tom", "ev" ]))
|
||||
|
||||
deleted = homies.remove_member(tom)
|
||||
assert deleted > 0
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb", "zuck", "ev" ]))
|
||||
|
||||
deleted = homies.remove_member(tom)
|
||||
assert_equals(deleted, 0)
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb", "zuck", "ev" ]))
|
||||
|
||||
deleted = homies.remove_all_members()
|
||||
assert deleted > 0
|
||||
assert_equals(homies.get_members(), [])
|
||||
|
||||
deleted = homies.remove_all_members()
|
||||
assert_equals(deleted, 0)
|
||||
assert_equals(homies.get_members(), [])
|
||||
|
||||
homies.add_member(mattb)
|
||||
homies.add_member(zuck)
|
||||
homies.add_member(tom)
|
||||
homies.add_member(ev)
|
||||
|
||||
session.commit()
|
||||
try:
|
||||
homies.add_member(joe)
|
||||
assert False, "Missing ConflictError"
|
||||
except ConflictError:
|
||||
session.rollback()
|
||||
|
||||
deleted = homies.remove()
|
||||
assert deleted > 0
|
||||
assert_equals(_group_names(joe.get_groups()), set([ "group" ]))
|
||||
|
||||
# Group tests
|
||||
def _test_groups_with_app():
|
||||
_reset()
|
||||
|
||||
assert_equals(len(joe.get_groups()), 1)
|
||||
|
||||
app.get("/group/list/homies/", status=409)
|
||||
app.post("/group/add/homies/", '["mattb"]')
|
||||
|
||||
homies = joe.get_group("homies", raise_on_not_found=True)
|
||||
|
||||
assert_equals(_group_names(joe.get_groups()), set([ "homies", "group" ]))
|
||||
assert_equals(_group_names(joe.get_groups(mattb)), set([ "homies" ]))
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb" ]))
|
||||
|
||||
app.post("/group/add/homies/", '["zuck", "tom", "ev"]')
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb", "zuck", "tom", "ev" ]))
|
||||
|
||||
response = app.post("/group/remove/homies/", '["tom"]')
|
||||
assert int(response.body) >= 1
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb", "zuck", "ev" ]))
|
||||
|
||||
response = app.post("/group/remove/homies/", '["tom"]')
|
||||
assert_equals(response.body, "0")
|
||||
assert_equals(_group_member_names(homies.get_members()), set([ "mattb", "zuck", "ev" ]))
|
||||
|
||||
response = app.post("/group/remove/all/homies/")
|
||||
assert int(response.body) >= 1
|
||||
assert_equals(homies.get_members(), [])
|
||||
|
||||
app.post("/group/remove/all/homies/", status=409)
|
||||
|
||||
app.post("/group/add/homies/", '["mattb", "zuck", "tom", "ev"]')
|
||||
response = app.post("/group/remove/all/homies/")
|
||||
assert int(response.body) >= 1
|
||||
assert_equals(homies.get_members(), [])
|
||||
assert_equals(_group_names(joe.get_groups()), set([ "group" ]))
|
||||
|
||||
# Sharing tests
|
||||
def test_sharing():
|
||||
_reset()
|
||||
|
||||
assert_equals(len(joe.projects), 1) # We start with a SampleProject
|
||||
joes_project = get_project(joe, joe, "joes_project", create=True)
|
||||
assert_equals(len(joe.projects), 2)
|
||||
assert_equals(joe.get_sharing(), [])
|
||||
|
||||
joe.add_sharing(joes_project, ev, False, False)
|
||||
sharing = joe.get_sharing()
|
||||
assert_equals(sharing, [{'loadany':False, 'edit':False, 'type':'user', 'project':'joes_project', 'owner':'joe', 'recipient':'ev'}])
|
||||
|
||||
# Joe has shared a project with ev but without anyone following him nothing changes
|
||||
assert_equals(len(ev.projects), 1)
|
||||
assert_equals(len(tom.projects), 1)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
ev.follow(joe)
|
||||
|
||||
# user.projects reports projects that the user owns, so this should not change
|
||||
assert_equals(len(ev.projects), 1)
|
||||
assert_equals(len(tom.projects), 1)
|
||||
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
# Joe's homies are mattb and zuck
|
||||
homies = joe.get_group("homies", create_on_not_found=True)
|
||||
homies.add_member(mattb)
|
||||
homies.add_member(zuck)
|
||||
joe.add_sharing(joes_project, homies, False, False)
|
||||
|
||||
# But mattb and zuck don't care they're not following joe
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
mattb.follow(joe)
|
||||
zuck.follow(joe)
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
# So now joe shares it with everyone
|
||||
joe.add_sharing(joes_project, 'everyone', False, False)
|
||||
|
||||
# Once again, tom doesn't care, because he's not following joe
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
tom.follow(joe)
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 2)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
# Check that we can undo in a different order
|
||||
joe.remove_sharing(joes_project, 'everyone')
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
joe.remove_sharing(joes_project, ev)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
joe.remove_sharing(joes_project, homies)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
# Share again to check fast removal
|
||||
joe.add_sharing(joes_project, ev, False, False)
|
||||
joe.add_sharing(joes_project, homies, False, False)
|
||||
joe.add_sharing(joes_project, 'everyone', False, False)
|
||||
|
||||
joe.remove_sharing(joes_project)
|
||||
assert_equals(joe.get_sharing(), [])
|
||||
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
|
||||
joes_project.delete()
|
||||
|
||||
# Sharing tests
|
||||
def test_sharing_with_app():
|
||||
_reset()
|
||||
|
||||
response = app.get("/file/list/")
|
||||
assert_equals(len(simplejson.loads(response.body)), 1)
|
||||
|
||||
##app.post("/group/add/homies/", '["mattb"]')
|
||||
|
||||
joes_project = get_project(joe, joe, "joes_project", create=True)
|
||||
response = app.get("/file/list/")
|
||||
assert_equals(len(simplejson.loads(response.body)), 2)
|
||||
|
||||
response = app.get("/share/list/all/")
|
||||
assert_equals(len(simplejson.loads(response.body)), 0)
|
||||
|
||||
response = app.post("/share/add/joes_project/ev/", '["edit"]')
|
||||
assert_equals(response.body, "")
|
||||
|
||||
response = app.get("/share/list/all/")
|
||||
shares = simplejson.loads(response.body)
|
||||
assert_equals(shares, [{'loadany':False, 'edit':True, 'type':'user', 'project':'joes_project', 'owner':'joe', 'recipient':'ev'}])
|
||||
|
||||
# Joe has shared a project with ev but without anyone following him nothing changes
|
||||
assert_equals(len(ev.projects), 1)
|
||||
assert_equals(len(tom.projects), 1)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
ev.follow(joe)
|
||||
|
||||
# user.projects reports projects that the user owns, so this should not change
|
||||
assert_equals(len(ev.projects), 1)
|
||||
assert_equals(len(tom.projects), 1)
|
||||
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
# Joe's homies are mattb and zuck
|
||||
homies = joe.get_group("homies", create_on_not_found=True)
|
||||
homies.add_member(mattb)
|
||||
homies.add_member(zuck)
|
||||
joe.add_sharing(joes_project, homies, False, False)
|
||||
|
||||
# But mattb and zuck don't care they're not following joe
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
mattb.follow(joe)
|
||||
zuck.follow(joe)
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
# So now joe shares it with everyone
|
||||
joe.add_sharing(joes_project, 'everyone', False, False)
|
||||
|
||||
# Once again, tom doesn't care, because he's not following joe
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
tom.follow(joe)
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 2)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
# Check that we can undo in a different order
|
||||
joe.remove_sharing(joes_project, 'everyone')
|
||||
assert_equals(len(ev.get_all_projects(True)), 2)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
joe.remove_sharing(joes_project, ev)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 2)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 2)
|
||||
|
||||
joe.remove_sharing(joes_project, homies)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(zuck.get_all_projects(True)), 1)
|
||||
assert_equals(len(mattb.get_all_projects(True)), 1)
|
||||
|
||||
# Share again to check fast removal
|
||||
joe.add_sharing(joes_project, ev, False, False)
|
||||
joe.add_sharing(joes_project, homies, False, False)
|
||||
joe.add_sharing(joes_project, 'everyone', False, False)
|
||||
|
||||
joe.remove_sharing(joes_project)
|
||||
assert_equals(joe.get_sharing(), [])
|
||||
|
||||
assert_equals(len(tom.get_all_projects(True)), 1)
|
||||
assert_equals(len(ev.get_all_projects(True)), 1)
|
||||
|
||||
joes_project.delete()
|
||||
|
||||
# Follower tests
|
||||
def test_follow():
|
||||
_reset()
|
||||
|
||||
# To start with no-one follows anyone else
|
||||
assert_equals(len(joe.users_i_follow()), 0)
|
||||
assert_equals(len(mattb.users_i_follow()), 0)
|
||||
assert_equals(len(zuck.users_i_follow()), 0)
|
||||
assert_equals(len(tom.users_i_follow()), 0)
|
||||
assert_equals(len(ev.users_i_follow()), 0)
|
||||
assert_equals(len(joe.users_following_me()), 0)
|
||||
assert_equals(len(mattb.users_following_me()), 0)
|
||||
assert_equals(len(zuck.users_following_me()), 0)
|
||||
assert_equals(len(tom.users_following_me()), 0)
|
||||
assert_equals(len(ev.users_following_me()), 0)
|
||||
|
||||
# Add a single follow
|
||||
zuck.follow(joe)
|
||||
assert_equals(len(joe.users_i_follow()), 0)
|
||||
assert_equals(len(mattb.users_i_follow()), 0)
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(len(tom.users_i_follow()), 0)
|
||||
assert_equals(len(ev.users_i_follow()), 0)
|
||||
assert_equals(_following_names(joe.users_following_me()), set([ "zuck" ]))
|
||||
assert_equals(len(mattb.users_following_me()), 0)
|
||||
assert_equals(len(zuck.users_following_me()), 0)
|
||||
assert_equals(len(tom.users_following_me()), 0)
|
||||
assert_equals(len(ev.users_following_me()), 0)
|
||||
|
||||
# Everyone loves joe
|
||||
mattb.follow(joe)
|
||||
ev.follow(joe)
|
||||
tom.follow(joe)
|
||||
assert_equals(len(joe.users_i_follow()), 0)
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([ "zuck", "mattb", "tom", "ev" ]))
|
||||
assert_equals(len(zuck.users_following_me()), 0)
|
||||
assert_equals(len(mattb.users_following_me()), 0)
|
||||
assert_equals(len(tom.users_following_me()), 0)
|
||||
assert_equals(len(ev.users_following_me()), 0)
|
||||
|
||||
# There is a limit to how much love though
|
||||
session.commit()
|
||||
try:
|
||||
zuck.follow(joe)
|
||||
assert False, "Missing ConflictError"
|
||||
except ConflictError:
|
||||
session.rollback()
|
||||
assert_equals(len(joe.users_i_follow()), 0)
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([ "zuck", "mattb", "tom", "ev" ]))
|
||||
assert_equals(len(mattb.users_following_me()), 0)
|
||||
assert_equals(len(zuck.users_following_me()), 0)
|
||||
assert_equals(len(tom.users_following_me()), 0)
|
||||
assert_equals(len(ev.users_following_me()), 0)
|
||||
|
||||
# Tom is a narcissist
|
||||
session.commit()
|
||||
try:
|
||||
tom.follow(tom)
|
||||
assert False, "Missing ConflictError"
|
||||
except ConflictError:
|
||||
session.rollback()
|
||||
assert_equals(len(joe.users_i_follow()), 0)
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([ "zuck", "mattb", "tom", "ev" ]))
|
||||
assert_equals(len(mattb.users_following_me()), 0)
|
||||
assert_equals(len(zuck.users_following_me()), 0)
|
||||
assert_equals(len(tom.users_following_me()), 0)
|
||||
assert_equals(len(ev.users_following_me()), 0)
|
||||
|
||||
# Make this a bit less unrequited
|
||||
joe.follow(zuck)
|
||||
joe.follow(tom)
|
||||
joe.follow(mattb)
|
||||
joe.follow(ev)
|
||||
assert_equals(len(joe.users_i_follow()), 4)
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "joe" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([ "zuck", "mattb", "tom", "ev" ]))
|
||||
assert_equals(_following_names(mattb.users_following_me()), set([ "joe" ]))
|
||||
assert_equals(_following_names(zuck.users_following_me()), set([ "joe" ]))
|
||||
assert_equals(_following_names(tom.users_following_me()), set([ "joe" ]))
|
||||
assert_equals(_following_names(ev.users_following_me()), set([ "joe" ]))
|
||||
|
||||
# A love in
|
||||
zuck.follow(tom)
|
||||
zuck.follow(mattb)
|
||||
zuck.follow(ev)
|
||||
tom.follow(zuck)
|
||||
tom.follow(mattb)
|
||||
tom.follow(ev)
|
||||
mattb.follow(zuck)
|
||||
mattb.follow(tom)
|
||||
mattb.follow(ev)
|
||||
ev.follow(zuck)
|
||||
ev.follow(tom)
|
||||
ev.follow(mattb)
|
||||
assert_equals(_followed_names(joe.users_i_follow()), set([ "mattb", "zuck", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "zuck", "tom", "ev", "joe" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "mattb", "tom", "ev", "joe" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "mattb", "zuck", "ev", "joe" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "mattb", "zuck", "tom", "joe" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([ "zuck", "mattb", "tom", "ev" ]))
|
||||
assert_equals(_following_names(mattb.users_following_me()), set([ "zuck", "tom", "ev", "joe" ]))
|
||||
assert_equals(_following_names(zuck.users_following_me()), set([ "mattb", "tom", "ev", "joe" ]))
|
||||
assert_equals(_following_names(tom.users_following_me()), set([ "mattb", "zuck", "ev", "joe" ]))
|
||||
assert_equals(_following_names(ev.users_following_me()), set([ "mattb", "zuck", "tom", "joe" ]))
|
||||
|
||||
# The joe hate begins
|
||||
zuck.unfollow(joe)
|
||||
tom.unfollow(joe)
|
||||
assert_equals(_followed_names(joe.users_i_follow()), set([ "mattb", "zuck", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "zuck", "tom", "ev", "joe" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "mattb", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "mattb", "zuck", "ev" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "mattb", "zuck", "tom", "joe" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([ "mattb", "ev" ]))
|
||||
assert_equals(_following_names(mattb.users_following_me()), set([ "zuck", "tom", "ev", "joe" ]))
|
||||
assert_equals(_following_names(zuck.users_following_me()), set([ "mattb", "tom", "ev", "joe" ]))
|
||||
assert_equals(_following_names(tom.users_following_me()), set([ "mattb", "zuck", "ev", "joe" ]))
|
||||
assert_equals(_following_names(ev.users_following_me()), set([ "mattb", "zuck", "tom", "joe" ]))
|
||||
|
||||
# The joe hate continues
|
||||
mattb.unfollow(joe)
|
||||
ev.unfollow(joe)
|
||||
assert_equals(_followed_names(joe.users_i_follow()), set([ "mattb", "zuck", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "zuck", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "mattb", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "mattb", "zuck", "ev" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "mattb", "zuck", "tom" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([]))
|
||||
assert_equals(_following_names(mattb.users_following_me()), set([ "zuck", "tom", "ev", "joe" ]))
|
||||
assert_equals(_following_names(zuck.users_following_me()), set([ "mattb", "tom", "ev", "joe" ]))
|
||||
assert_equals(_following_names(tom.users_following_me()), set([ "mattb", "zuck", "ev", "joe" ]))
|
||||
assert_equals(_following_names(ev.users_following_me()), set([ "mattb", "zuck", "tom", "joe" ]))
|
||||
|
||||
# Joe: well be like that then
|
||||
joe.unfollow(zuck)
|
||||
joe.unfollow(tom)
|
||||
joe.unfollow(mattb)
|
||||
joe.unfollow(ev)
|
||||
assert_equals(_followed_names(joe.users_i_follow()), set([]))
|
||||
assert_equals(_followed_names(mattb.users_i_follow()), set([ "zuck", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(zuck.users_i_follow()), set([ "mattb", "tom", "ev" ]))
|
||||
assert_equals(_followed_names(tom.users_i_follow()), set([ "mattb", "zuck", "ev" ]))
|
||||
assert_equals(_followed_names(ev.users_i_follow()), set([ "mattb", "zuck", "tom" ]))
|
||||
assert_equals(_following_names(joe.users_following_me()), set([]))
|
||||
assert_equals(_following_names(mattb.users_following_me()), set([ "zuck", "tom", "ev" ]))
|
||||
assert_equals(_following_names(zuck.users_following_me()), set([ "mattb", "tom", "ev" ]))
|
||||
assert_equals(_following_names(tom.users_following_me()), set([ "mattb", "zuck", "ev" ]))
|
||||
assert_equals(_following_names(ev.users_following_me()), set([ "mattb", "zuck", "tom" ]))
|
||||
|
||||
# And we all throw our toys out of the pram
|
||||
zuck.unfollow(tom)
|
||||
zuck.unfollow(mattb)
|
||||
zuck.unfollow(ev)
|
||||
tom.unfollow(zuck)
|
||||
tom.unfollow(mattb)
|
||||
tom.unfollow(ev)
|
||||
mattb.unfollow(zuck)
|
||||
mattb.unfollow(tom)
|
||||
mattb.unfollow(ev)
|
||||
ev.unfollow(zuck)
|
||||
ev.unfollow(tom)
|
||||
ev.unfollow(mattb)
|
||||
assert_equals(len(joe.users_i_follow()), 0)
|
||||
assert_equals(len(zuck.users_i_follow()), 0)
|
||||
assert_equals(len(mattb.users_i_follow()), 0)
|
||||
assert_equals(len(tom.users_i_follow()), 0)
|
||||
assert_equals(len(ev.users_i_follow()), 0)
|
||||
assert_equals(len(joe.users_following_me()), 0)
|
||||
assert_equals(len(zuck.users_following_me()), 0)
|
||||
assert_equals(len(mattb.users_following_me()), 0)
|
||||
assert_equals(len(tom.users_following_me()), 0)
|
||||
assert_equals(len(ev.users_following_me()), 0)
|
||||
|
||||
# But there is a limit to how much you can hate
|
||||
session.commit()
|
||||
try:
|
||||
zuck.unfollow(tom)
|
||||
assert False, "Missing ConflictError"
|
||||
except ConflictError:
|
||||
session.rollback()
|
|
@ -0,0 +1,45 @@
|
|||
import simplejson
|
||||
|
||||
from bespin import config, controllers, database
|
||||
|
||||
from bespin.tests import BespinTestApp
|
||||
|
||||
def setup_module(module):
|
||||
config.set_profile("test")
|
||||
config.activate_profile()
|
||||
|
||||
def _clear_db():
|
||||
database.Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
database.Base.metadata.create_all(bind=config.c.dbengine)
|
||||
fsroot = config.c.fsroot
|
||||
if fsroot.exists() and fsroot.basename() == "testfiles":
|
||||
fsroot.rmtree()
|
||||
fsroot.makedirs()
|
||||
|
||||
def test_server_capabilities():
|
||||
_clear_db()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
resp = app.get("/capabilities/")
|
||||
assert resp.content_type == "application/json"
|
||||
data = simplejson.loads(resp.body)
|
||||
print data
|
||||
assert data == dict(
|
||||
capabilities=["vcs", "collab"],
|
||||
dojoModulePath={},
|
||||
javaScriptPlugins=[]
|
||||
)
|
||||
|
||||
def test_userinfo_also_returns_capabilities():
|
||||
_clear_db()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
resp = app.get('/register/userinfo/')
|
||||
data = simplejson.loads(resp.body)
|
||||
print data
|
||||
assert 'serverCapabilities' in data
|
||||
|
|
@ -0,0 +1,278 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import logging
|
||||
from path import path
|
||||
from simplejson import dumps, loads
|
||||
|
||||
from bespin.database import Base
|
||||
from bespin.database import User
|
||||
from bespin import deploy, config, controllers
|
||||
from bespin.filesystem import get_project, NotAuthorized
|
||||
|
||||
from bespin.tests import BespinTestApp
|
||||
from bespin.tests.mock import patch
|
||||
|
||||
app = None
|
||||
|
||||
def setup_module(module):
|
||||
global app
|
||||
config.set_profile('test')
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
def _init_data():
|
||||
global macgyver
|
||||
config.activate_profile()
|
||||
|
||||
fsroot = config.c.fsroot
|
||||
if fsroot.exists() and fsroot.basename() == "testfiles":
|
||||
fsroot.rmtree()
|
||||
fsroot.makedirs()
|
||||
|
||||
app.reset()
|
||||
|
||||
Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
Base.metadata.create_all(bind=config.c.dbengine)
|
||||
s = config.c.session_factory()
|
||||
|
||||
app.post("/register/new/MacGyver",
|
||||
dict(password="richarddean", email="rich@sg1.com"))
|
||||
|
||||
macgyver = User.find_user("MacGyver")
|
||||
s.flush()
|
||||
|
||||
|
||||
|
||||
def test_keychain_creation():
|
||||
_init_data()
|
||||
kc = deploy.DeploymentKeyChain(macgyver, "foobar")
|
||||
public_key, private_key = kc.get_ssh_key()
|
||||
|
||||
assert public_key.startswith("ssh-rsa")
|
||||
assert "RSA PRIVATE KEY" in private_key
|
||||
|
||||
public_key2 = deploy.DeploymentKeyChain.get_ssh_public_key(macgyver)
|
||||
assert public_key2 == public_key
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
|
||||
kc.set_ssh_for_project(bigmac, "macgyver")
|
||||
|
||||
kcfile = path(macgyver.get_location()) / ".bespin-keychain"
|
||||
assert kcfile.exists()
|
||||
|
||||
# make sure the file is encrypted
|
||||
text = kcfile.bytes()
|
||||
assert "RSA PRIVATE KEY" not in text
|
||||
assert "ssh-rsa" not in text
|
||||
|
||||
kc = deploy.DeploymentKeyChain(macgyver, "foobar")
|
||||
public_key2, private_key2 = kc.get_ssh_key()
|
||||
assert public_key2 == public_key
|
||||
assert private_key2 == private_key
|
||||
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert "RSA PRIVATE KEY" in credentials['ssh_private_key']
|
||||
assert credentials['type'] == "ssh"
|
||||
assert credentials['username'] == 'macgyver'
|
||||
|
||||
kc.delete_credentials_for_project(bigmac)
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials is None
|
||||
|
||||
kc.set_credentials_for_project(bigmac, "macG", "coolpass")
|
||||
|
||||
kc = deploy.DeploymentKeyChain(macgyver, "foobar")
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials['type'] == 'password'
|
||||
assert credentials['username'] == 'macG'
|
||||
assert credentials['password'] == 'coolpass'
|
||||
|
||||
kc.delete_credentials_for_project(bigmac)
|
||||
|
||||
kc = deploy.DeploymentKeyChain(macgyver, "foobar")
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials is None
|
||||
|
||||
def test_set_project_deployment_metadata():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
pdo = deploy.ProjectDeploymentOptions(bigmac,
|
||||
remote_host="macgyver.com",
|
||||
remote_directory="/home/macgyver/knownunknowns",
|
||||
type="sftp")
|
||||
pdo.save()
|
||||
|
||||
md = bigmac.metadata
|
||||
options_json = md['deployment']
|
||||
assert "remote_host" in options_json
|
||||
assert "sftp" in options_json
|
||||
|
||||
pdo = deploy.ProjectDeploymentOptions.get(bigmac)
|
||||
assert pdo.remote_host == "macgyver.com"
|
||||
assert pdo.remote_directory == "/home/macgyver/knownunknowns"
|
||||
|
||||
# Web tests
|
||||
|
||||
def test_deployment_setup():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.put("/project/deploy/bigmac/setup", dumps(dict(
|
||||
remoteHost="macgyver.com",
|
||||
remoteDirectory="/home/macgyver/knownunknowns",
|
||||
connType="sftp",
|
||||
kcpass="sekretkeychain",
|
||||
authType="ssh",
|
||||
username="macman")))
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
pdo = deploy.ProjectDeploymentOptions.get(bigmac)
|
||||
assert pdo.remote_host == "macgyver.com"
|
||||
assert pdo.remote_directory == "/home/macgyver/knownunknowns"
|
||||
kc = deploy.DeploymentKeyChain(macgyver, "sekretkeychain")
|
||||
cred = kc.get_credentials_for_project(bigmac)
|
||||
assert cred['type'] == "ssh"
|
||||
assert cred["username"] == "macman"
|
||||
|
||||
resp = app.post("/project/deploy/bigmac/setup",
|
||||
dumps(dict(kcpass="sekretkeychain")))
|
||||
assert resp.content_type == "application/json"
|
||||
data = loads(resp.body)
|
||||
assert data['authType'] == "ssh"
|
||||
assert data['username'] == "macman"
|
||||
assert data['remoteHost'] == "macgyver.com"
|
||||
assert data['remoteDirectory'] == "/home/macgyver/knownunknowns"
|
||||
assert data['connType'] == "sftp"
|
||||
|
||||
resp = app.put("/project/deploy/bigmac/setup", dumps(dict(
|
||||
remoteHost="macgyver.com",
|
||||
remoteDirectory="/home/macgyver/knownunknowns",
|
||||
connType="sftp",
|
||||
kcpass="sekretkeychain",
|
||||
authType="password",
|
||||
username="macman",
|
||||
password="NO ONE WILL EVER GUESS THIS!")))
|
||||
|
||||
resp = app.post("/project/deploy/bigmac/setup",
|
||||
dumps(dict(kcpass="sekretkeychain")))
|
||||
assert resp.content_type == "application/json"
|
||||
data = loads(resp.body)
|
||||
assert data['authType'] == "password"
|
||||
assert data['username'] == "macman"
|
||||
assert data['password'] == "NO ONE WILL EVER GUESS THIS!"
|
||||
assert data['remoteHost'] == "macgyver.com"
|
||||
assert data['remoteDirectory'] == "/home/macgyver/knownunknowns"
|
||||
assert data['connType'] == "sftp"
|
||||
|
||||
def test_retrieve_new_deployment_setup():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.post("/project/deploy/bigmac/setup",
|
||||
dumps(dict(kcpass="sekretkeychain")))
|
||||
assert resp.content_type == "application/json"
|
||||
data = loads(resp.body)
|
||||
assert data is None
|
||||
|
||||
def test_deployment_fails_when_not_configured():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.post("/project/deploy/bigmac/",
|
||||
dumps(dict(kcpass="sekretkeychain")), status=400)
|
||||
assert resp.content_type == "application/json"
|
||||
data = loads(resp.body)
|
||||
assert data['error'] == "Deployment is not yet configured."
|
||||
assert data['notConfigured'] == True
|
||||
|
||||
def test_deployment_setup_with_illegal_parameters():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.put("/project/deploy/bigmac/setup", dumps(dict(
|
||||
remoteHost="macgyver.com",
|
||||
remoteDirectory="/home/macgyver/knownunknowns",
|
||||
connType="file",
|
||||
kcpass="sekretkeychain",
|
||||
authType="ssh",
|
||||
username="macman")), status=400)
|
||||
|
||||
|
||||
@patch("bespin.deploy._launch_sync")
|
||||
def test_deployment_runs(launch_sync):
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.put("/project/deploy/bigmac/setup", dumps(dict(
|
||||
remoteHost="macgyver.com",
|
||||
remoteDirectory="/home/macgyver/knownunknowns",
|
||||
connType="sftp",
|
||||
kcpass="sekretkeychain",
|
||||
authType="password",
|
||||
username="macman",
|
||||
password="super/pass")))
|
||||
|
||||
resp = app.post("/project/deploy/bigmac/",
|
||||
dumps(dict(kcpass="sekretkeychain", dryRun=True)))
|
||||
|
||||
assert resp.content_type == "application/json"
|
||||
data = loads(resp.body)
|
||||
|
||||
assert 'jobid' in data
|
||||
assert data['jobid'] is not None
|
||||
assert launch_sync.called
|
||||
desturl = launch_sync.call_args[0][2]
|
||||
assert desturl == "sftp://macgyver.com//home/macgyver/knownunknowns"
|
||||
options = launch_sync.call_args[0][3]
|
||||
assert options.dry_run
|
||||
assert options.username == "macman"
|
||||
assert options.password == "super/pass"
|
||||
|
||||
@patch("bespin.deploy._launch_sync")
|
||||
def test_deployment_runs_with_ssh_key(launch_sync):
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.put("/project/deploy/bigmac/setup", dumps(dict(
|
||||
remoteHost="macgyver.com",
|
||||
remoteDirectory="/home/macgyver/knownunknowns",
|
||||
connType="sftp",
|
||||
kcpass="sekretkeychain",
|
||||
authType="ssh",
|
||||
username="macman")))
|
||||
|
||||
resp = app.post("/project/deploy/bigmac/",
|
||||
dumps(dict(kcpass="sekretkeychain")))
|
||||
|
||||
assert resp.content_type == "application/json"
|
||||
data = loads(resp.body)
|
||||
|
||||
assert 'jobid' in data
|
||||
assert data['jobid'] is not None
|
||||
assert launch_sync.called
|
||||
desturl = launch_sync.call_args[0][2]
|
||||
assert desturl == "sftp://macgyver.com//home/macgyver/knownunknowns"
|
||||
options = launch_sync.call_args[0][3]
|
||||
assert not options.dry_run
|
||||
assert options.username == "macman"
|
||||
assert isinstance(options.sshkey, path)
|
||||
assert not options.sshkey.exists(), "Key file should be deleted at the end"
|
||||
|
||||
|
|
@ -0,0 +1,764 @@
|
|||
# -*- coding: utf8 -*-
|
||||
|
||||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from urllib import urlencode
|
||||
|
||||
from __init__ import BespinTestApp
|
||||
import simplejson
|
||||
from path import path
|
||||
|
||||
from bespin import config, controllers, filesystem
|
||||
|
||||
from bespin.filesystem import File, get_project, ProjectView
|
||||
from bespin.filesystem import FSException, FileNotFound, OverQuota, FileConflict, BadValue
|
||||
from bespin.database import User, Base
|
||||
|
||||
tarfilename = os.path.join(os.path.dirname(__file__), "ut.tgz")
|
||||
zipfilename = os.path.join(os.path.dirname(__file__), "ut.zip")
|
||||
otherfilename = os.path.join(os.path.dirname(__file__), "other_import.tgz")
|
||||
with_tabs = os.path.join(os.path.dirname(__file__), "ProjectWithTabs.tgz")
|
||||
|
||||
app = None
|
||||
macgyver = None
|
||||
someone_else = None
|
||||
murdoc = None
|
||||
|
||||
def setup_module(module):
|
||||
global app
|
||||
config.set_profile('test')
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
|
||||
def _init_data():
|
||||
global macgyver, someone_else, murdoc
|
||||
config.activate_profile()
|
||||
|
||||
fsroot = config.c.fsroot
|
||||
if fsroot.exists() and fsroot.basename() == "testfiles":
|
||||
fsroot.rmtree()
|
||||
fsroot.makedirs()
|
||||
|
||||
app.reset()
|
||||
|
||||
Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
Base.metadata.create_all(bind=config.c.dbengine)
|
||||
s = config.c.session_factory()
|
||||
|
||||
someone_else = User.create_user("SomeoneElse", "", "someone@else.com")
|
||||
murdoc = User.create_user("Murdoc", "", "murdoc@badpeople.bad")
|
||||
|
||||
otherproject = get_project(someone_else, someone_else,
|
||||
"otherproject", create=True)
|
||||
otherproject.save_file('foo', 'Just a file to reserve a project')
|
||||
|
||||
app.post("/register/new/MacGyver",
|
||||
dict(password="richarddean", email="rich@sg1.com"))
|
||||
|
||||
macgyver = User.find_user("MacGyver")
|
||||
|
||||
def test_basic_file_creation():
|
||||
_init_data()
|
||||
starting_point = macgyver.amount_used
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("reqs", "Chewing gum wrapper")
|
||||
file_obj = File(bigmac, "reqs")
|
||||
data = str(file_obj.data)
|
||||
assert data == 'Chewing gum wrapper'
|
||||
ending_point = macgyver.amount_used
|
||||
difference = ending_point - starting_point
|
||||
assert difference == 19
|
||||
|
||||
result = bigmac.search_files("eq")
|
||||
assert result == ['reqs']
|
||||
|
||||
now = datetime.now()
|
||||
assert now - file_obj.created < timedelta(seconds=2)
|
||||
assert now - file_obj.modified < timedelta(seconds=2)
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
files = bigmac.list_files("")
|
||||
assert len(files) == 1
|
||||
assert files[0].short_name == 'reqs'
|
||||
proj_names = set([proj.name for proj in macgyver.projects])
|
||||
assert proj_names == set(['bigmac', "SampleProject",
|
||||
"BespinSettings"])
|
||||
|
||||
# let's update the contents
|
||||
bigmac.save_file("reqs", "New content")
|
||||
file_obj = File(bigmac, "reqs")
|
||||
|
||||
assert file_obj.data == 'New content'
|
||||
|
||||
def test_changing_file_contents_changes_amount_used():
|
||||
_init_data()
|
||||
starting_point = macgyver.amount_used
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo", "step 1")
|
||||
assert macgyver.amount_used == starting_point + 6
|
||||
bigmac.save_file("foo", "step two")
|
||||
assert macgyver.amount_used == starting_point + 8
|
||||
|
||||
def test_cannot_save_beyond_quota():
|
||||
_init_data()
|
||||
old_units = filesystem.QUOTA_UNITS
|
||||
filesystem.QUOTA_UNITS = 10
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
try:
|
||||
bigmac.save_file("foo", "x" * 11)
|
||||
assert False, "Expected an OverQuota exception"
|
||||
except OverQuota:
|
||||
pass
|
||||
finally:
|
||||
filesystem.QUOTA_UNITS = old_units
|
||||
|
||||
def test_amount_used_can_be_recomputed():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("secrets", "The password is pa55w0rd!")
|
||||
starting_point = macgyver.amount_used
|
||||
# open the file, to cause a status file to be created
|
||||
bigmac.get_file("secrets")
|
||||
macgyver.amount_used = 0
|
||||
macgyver.recompute_files()
|
||||
assert macgyver.amount_used == starting_point
|
||||
|
||||
def test_retrieve_file_obj():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("reqs", "tenletters")
|
||||
try:
|
||||
bigmac.get_file_object("foo/bar")
|
||||
assert False, "expected file not found for missing file"
|
||||
except FileNotFound:
|
||||
pass
|
||||
|
||||
file_obj = bigmac.get_file_object("reqs")
|
||||
assert file_obj.saved_size == 10
|
||||
|
||||
|
||||
def test_error_if_you_try_to_replace_dir_with_file():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar/baz", "biz")
|
||||
try:
|
||||
bigmac.save_file("foo/bar", "NOT GONNA DO IT!")
|
||||
assert False, "Expected a FileConflict exception"
|
||||
except FileConflict:
|
||||
pass
|
||||
|
||||
def test_get_file_raises_exception_if_its_a_directory():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar/baz", "biz")
|
||||
try:
|
||||
contents = bigmac.get_file("foo/bar/")
|
||||
assert False, "Expected exception for directory"
|
||||
except FSException:
|
||||
pass
|
||||
|
||||
def test_get_file_raises_not_found_exception():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar/baz", "biz")
|
||||
try:
|
||||
contents = bigmac.get_file("NOTFOUND")
|
||||
assert False, "Expected exception for not found"
|
||||
except FileNotFound:
|
||||
pass
|
||||
|
||||
def test_directory_shortname_computed_to_have_last_dir():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar/baz", "biz")
|
||||
res = bigmac.list_files("foo/")
|
||||
print res
|
||||
assert len(res) == 1
|
||||
d = res[0]
|
||||
shortname = d.short_name
|
||||
assert shortname == "bar/"
|
||||
|
||||
def test_can_delete_empty_directory():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.create_directory("foo/bar/")
|
||||
bigmac.delete("foo/bar/")
|
||||
location = bigmac.location / "foo/bar"
|
||||
assert not location.exists()
|
||||
|
||||
def test_delete_raises_file_not_found():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
try:
|
||||
bigmac.delete("DOESNT MATTER")
|
||||
assert False, "Expected not found for missing project"
|
||||
except FileNotFound:
|
||||
pass
|
||||
bigmac.save_file("foo/bar/baz", "biz")
|
||||
try:
|
||||
bigmac.delete("STILL DOESNT MATTER")
|
||||
assert False, "Expected not found for missing file"
|
||||
except FileNotFound:
|
||||
pass
|
||||
flist = bigmac.list_files()
|
||||
assert flist[0].name == "foo/"
|
||||
bigmac.delete("foo/bar/")
|
||||
|
||||
def test_successful_deletion():
|
||||
_init_data()
|
||||
starting_used = macgyver.amount_used
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar/baz", "biz")
|
||||
|
||||
files = bigmac.search_files("baz")
|
||||
assert files == ["foo/bar/baz"]
|
||||
|
||||
bigmac.delete("foo/bar/baz")
|
||||
|
||||
files = bigmac.search_files("baz")
|
||||
assert files == []
|
||||
|
||||
assert macgyver.amount_used == starting_used
|
||||
try:
|
||||
bigmac.get_file("foo/bar/baz")
|
||||
assert False, "Expected FileNotFound because the file is gone"
|
||||
except FileNotFound:
|
||||
pass
|
||||
files = bigmac.list_files("foo/bar/")
|
||||
assert not files
|
||||
|
||||
def test_top_level_deletion():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo", "data")
|
||||
bigmac.delete("foo")
|
||||
flist = bigmac.list_files()
|
||||
assert flist == []
|
||||
|
||||
def test_directory_deletion():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("whiz/bang", "stillmore")
|
||||
starting_used = macgyver.amount_used
|
||||
bigmac.save_file("foo/bar", "data")
|
||||
bigmac.save_file("foo/blorg", "moredata")
|
||||
|
||||
files = bigmac.search_files("blorg")
|
||||
assert files == ['foo/blorg']
|
||||
|
||||
bigmac.delete("foo/")
|
||||
|
||||
files = bigmac.search_files("blorg")
|
||||
assert files == []
|
||||
|
||||
flist = bigmac.list_files()
|
||||
assert len(flist) == 1
|
||||
assert flist[0].name == 'whiz/'
|
||||
file_loc = bigmac.location / "foo/bar"
|
||||
assert not file_loc.exists()
|
||||
assert macgyver.amount_used == starting_used
|
||||
|
||||
# Edit functions are commented out for now. These may be reimplemented
|
||||
# after the collaboration merge. If not, we should delete this stuff.
|
||||
|
||||
# def test_basic_edit_functions():
|
||||
# _init_data()
|
||||
# s = fm.session
|
||||
# bigmac = fm.get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
# bigmac2 = fm.get_project(someone_else, someone_else, "bigmac", create=True)
|
||||
# fm.save_edit(macgyver, bigmac, "foo/bar/baz", "['edit', 'thinger']")
|
||||
# fm.save_edit(someone_else, bigmac2, "foo/bar/baz", "['some', 'thing']")
|
||||
# file_obj = s.query(File).filter_by(name="foo/bar/baz") \
|
||||
# .filter_by(project=bigmac).one()
|
||||
# assert len(file_obj.edits) == 1
|
||||
#
|
||||
# try:
|
||||
# content = fm.get_file(bigmac, "foo/bar/baz")
|
||||
# assert False, "Files are not retrievable until the edits are saved"
|
||||
# except model.FileNotFound:
|
||||
# pass
|
||||
#
|
||||
# files = fm.list_open(macgyver)
|
||||
# info = files['bigmac']['foo/bar/baz']
|
||||
# assert info['mode'] == "rw"
|
||||
#
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz")
|
||||
# assert edits == ["['edit', 'thinger']"]
|
||||
#
|
||||
# fm.save_edit(macgyver, bigmac, "foo/bar/baz", "['second', 'edit']")
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz")
|
||||
# assert edits == ["['edit', 'thinger']", "['second', 'edit']"]
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz", 1)
|
||||
# assert edits == ["['second', 'edit']"]
|
||||
#
|
||||
# try:
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz", 2)
|
||||
# assert False, "Expected FSException for out-of-bounds start point"
|
||||
# except model.FSException:
|
||||
# pass
|
||||
#
|
||||
# def test_reset_edits():
|
||||
# _init_data()
|
||||
# bigmac = fm.get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
# fm.save_edit(macgyver, bigmac, "foo/bar/baz", "['edit', 'thinger']")
|
||||
# fm.reset_edits(macgyver, bigmac, "foo/bar/baz")
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz")
|
||||
# assert edits == []
|
||||
# files = fm.list_open(macgyver)
|
||||
# assert files == {}
|
||||
#
|
||||
# fm.save_edit(macgyver, bigmac, "foo/bar/baz", "['edit', 'thinger']")
|
||||
# fm.save_edit(macgyver, bigmac, "foo/bar/blork", "['edit', 'thinger']")
|
||||
# files = fm.list_open(macgyver)
|
||||
# bigmac_files = files['bigmac']
|
||||
# assert len(bigmac_files) == 2
|
||||
# fm.reset_edits(macgyver)
|
||||
# files = fm.list_open(macgyver)
|
||||
# assert files == {}
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz")
|
||||
# assert edits == []
|
||||
#
|
||||
# def test_edits_cleared_after_save():
|
||||
# _init_data()
|
||||
# bigmac = fm.get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
# fm.save_edit(macgyver, bigmac, "foo/bar/baz", "['edit', 'thinger']")
|
||||
# fm.save_file(macgyver, bigmac, "foo/bar/baz", "macaroni")
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz")
|
||||
# assert edits == []
|
||||
#
|
||||
# def test_edits_cleared_after_close():
|
||||
# _init_data()
|
||||
# bigmac = fm.get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
# fm.save_file(macgyver, bigmac, "foo/bar/baz", "macaroni")
|
||||
# fm.get_file(bigmac, "foo/bar/baz")
|
||||
# fm.save_edit(macgyver, bigmac, "foo/bar/baz", "['edit', 'thinger']")
|
||||
# fm.close(bigmac, "foo/bar/baz")
|
||||
# edits = fm.list_edits(macgyver, bigmac, "foo/bar/baz")
|
||||
# assert edits == []
|
||||
#
|
||||
def test_list_top_level():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("readme.txt", "Hi there!")
|
||||
result = bigmac.list_files()
|
||||
result_names = [file.name for file in result]
|
||||
assert result_names == ["readme.txt"]
|
||||
result = macgyver.projects
|
||||
result_names = [proj.name for proj in result]
|
||||
assert result_names == ["BespinSettings",
|
||||
"SampleProject", "bigmac"]
|
||||
|
||||
|
||||
def test_filesystem_can_be_arranged_in_levels():
|
||||
config.c.fslevels = 0
|
||||
_init_data()
|
||||
|
||||
assert macgyver.file_location == macgyver.uuid
|
||||
|
||||
config.c.fslevels = 1
|
||||
_init_data()
|
||||
fsroot = config.c.fsroot
|
||||
dirs = [d.basename() for d in fsroot.dirs()]
|
||||
# the first character should be peeled off as the top directory name
|
||||
for d in dirs:
|
||||
assert len(d) == 1
|
||||
|
||||
uuid = macgyver.uuid
|
||||
assert macgyver.file_location == "%s/%s" % (uuid[0], uuid)
|
||||
|
||||
config.c.fslevels = 2
|
||||
_init_data()
|
||||
uuid = macgyver.uuid
|
||||
assert macgyver.file_location == "%s/%s/%s" % (uuid[0], uuid[1], uuid)
|
||||
|
||||
def test_bad_project_names():
|
||||
_init_data()
|
||||
try:
|
||||
badone = get_project(macgyver, macgyver, "..", create=True)
|
||||
assert False, "Expected BadValue exception for bad name"
|
||||
except BadValue:
|
||||
pass
|
||||
try:
|
||||
badone = get_project(macgyver, macgyver, "foo/bar", create=True)
|
||||
assert False, "Expected BadValue exception for bad name"
|
||||
except BadValue:
|
||||
pass
|
||||
|
||||
def test_bad_files_and_directories():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
try:
|
||||
bigmac.save_file("../foo/bar", "hi")
|
||||
assert False, "Expected BadValue exception for bad name"
|
||||
except BadValue:
|
||||
pass
|
||||
|
||||
bigmac.save_file("/tmp/foo", "hi")
|
||||
foopath = path("/tmp/foo")
|
||||
assert not foopath.exists()
|
||||
location = bigmac.location
|
||||
assert (location / "tmp" / "foo").exists()
|
||||
|
||||
def test_bad_directory_names():
|
||||
_init_data()
|
||||
p = path("/tmp/onlydirs/")
|
||||
assert not p.exists()
|
||||
p.makedirs()
|
||||
try:
|
||||
(p / "dir2").mkdir()
|
||||
(p / "dir3").mkdir()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
try:
|
||||
files = bigmac.list_files(p)
|
||||
assert False, "Expected exception for absolute dir"
|
||||
except BadValue:
|
||||
pass
|
||||
except FileNotFound:
|
||||
pass
|
||||
finally:
|
||||
p.rmtree()
|
||||
|
||||
def test_delete_directories_outside_of_tree():
|
||||
_init_data()
|
||||
p = path("/tmp/onlydirs/")
|
||||
assert not p.exists()
|
||||
p.makedirs()
|
||||
try:
|
||||
(p / "dir2").mkdir()
|
||||
(p / "dir3").mkdir()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("tmp/onlydirs/newfile", "yo")
|
||||
files = bigmac.delete(p)
|
||||
assert p.exists()
|
||||
finally:
|
||||
if p.exists():
|
||||
p.rmtree()
|
||||
|
||||
def _setup_search_data():
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
for name in [
|
||||
"foo_bar",
|
||||
"whiz_bang",
|
||||
"ding_dong",
|
||||
"foo_some_other",
|
||||
"some_deeply_nested_file_here",
|
||||
"whiz_cheez",
|
||||
"bespin_rocks",
|
||||
"many_files",
|
||||
"cool+one",
|
||||
"some_1"
|
||||
]:
|
||||
bigmac.save_file(name, "hi")
|
||||
return bigmac
|
||||
|
||||
def test_file_search():
|
||||
_init_data()
|
||||
bigmac = _setup_search_data()
|
||||
_run_search_tests(bigmac.search_files)
|
||||
|
||||
def _run_search_tests(search_func):
|
||||
result = search_func("")
|
||||
assert result == [
|
||||
"bespin_rocks",
|
||||
"cool+one",
|
||||
"ding_dong",
|
||||
"foo_bar",
|
||||
"foo_some_other",
|
||||
"many_files",
|
||||
"some_1",
|
||||
"some_deeply_nested_file_here",
|
||||
"whiz_bang",
|
||||
"whiz_cheez"
|
||||
]
|
||||
|
||||
result = search_func("o")
|
||||
assert result == [
|
||||
"bespin_rocks",
|
||||
"cool+one",
|
||||
"ding_dong",
|
||||
"foo_bar",
|
||||
"foo_some_other",
|
||||
"some_1",
|
||||
"some_deeply_nested_file_here"
|
||||
]
|
||||
|
||||
result = search_func("o", 2)
|
||||
assert result == [
|
||||
"bespin_rocks",
|
||||
"cool+one"
|
||||
]
|
||||
|
||||
result = search_func("os")
|
||||
assert result == [
|
||||
"bespin_rocks",
|
||||
"foo_some_other",
|
||||
"some_deeply_nested_file_here"
|
||||
]
|
||||
|
||||
result = search_func("me")
|
||||
assert result == [
|
||||
"foo_some_other",
|
||||
"some_1",
|
||||
"some_deeply_nested_file_here",
|
||||
"many_files"
|
||||
]
|
||||
|
||||
result = search_func("+")
|
||||
assert result == [
|
||||
"cool+one"
|
||||
]
|
||||
|
||||
result = search_func("ME")
|
||||
assert result == [
|
||||
"foo_some_other",
|
||||
"some_1",
|
||||
"some_deeply_nested_file_here",
|
||||
"many_files"
|
||||
]
|
||||
|
||||
result = search_func("so")
|
||||
assert result == [
|
||||
"foo_some_other",
|
||||
"some_1",
|
||||
"some_deeply_nested_file_here",
|
||||
"bespin_rocks"
|
||||
]
|
||||
result = search_func("som")
|
||||
print result
|
||||
assert result == [
|
||||
"some_1",
|
||||
"foo_some_other",
|
||||
"some_deeply_nested_file_here"
|
||||
]
|
||||
|
||||
result = search_func(u'ø')
|
||||
assert result == []
|
||||
|
||||
def test_project_rename_should_be_secure():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
p = path('/tmp/foo')
|
||||
try:
|
||||
try:
|
||||
bigmac.rename("/tmp/foo")
|
||||
assert not p.exists()
|
||||
except BadValue:
|
||||
pass
|
||||
finally:
|
||||
if p.exists():
|
||||
p.rmdir()
|
||||
|
||||
# -------
|
||||
# Web tests
|
||||
# -------
|
||||
|
||||
def test_good_file_operations_from_web():
|
||||
_init_data()
|
||||
app.put("/file/at/bigmac/reqs", "Chewing gum wrapper")
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
fileobj = File(bigmac, "reqs")
|
||||
contents = str(fileobj.data)
|
||||
assert contents == "Chewing gum wrapper"
|
||||
|
||||
resp = app.get("/file/at/bigmac/reqs")
|
||||
assert resp.body == "Chewing gum wrapper"
|
||||
|
||||
resp = app.get("/file/at/bigmac/reqs?mode=r")
|
||||
assert resp.body == "Chewing gum wrapper"
|
||||
|
||||
resp = app.get("/file/list/bigmac/")
|
||||
data = simplejson.loads(resp.body)
|
||||
print data
|
||||
|
||||
resp = app.get("/file/list/")
|
||||
data = simplejson.loads(resp.body)
|
||||
assert data == [{'name' : 'BespinSettings/'},
|
||||
{'name' : 'SampleProject/'},
|
||||
{'name' : 'bigmac/'}]
|
||||
|
||||
resp = app.get("/file/list/SampleProject/")
|
||||
data = simplejson.loads(resp.body)
|
||||
assert data[1]['name'] == 'index.html'
|
||||
|
||||
resp = app.get("/file/list/bigmac/")
|
||||
assert resp.content_type == "application/json"
|
||||
data = simplejson.loads(resp.body)
|
||||
assert len(data) == 1
|
||||
data = data[0]
|
||||
assert data['name'] == 'reqs'
|
||||
assert data['size'] == 19
|
||||
assert data['created'].startswith("20")
|
||||
assert 'T' in data['created']
|
||||
assert data['modified'].startswith("20")
|
||||
assert 'T' in data['modified']
|
||||
|
||||
app.delete("/file/at/bigmac/reqs")
|
||||
resp = app.get("/file/list/bigmac/")
|
||||
data = simplejson.loads(resp.body)
|
||||
assert data == []
|
||||
|
||||
|
||||
def test_error_conditions_from_web():
|
||||
_init_data()
|
||||
app.get("/file/at/bigmac/UNKNOWN", status=404)
|
||||
app.put("/file/at/bigmac/bar/baz", "A file in bar")
|
||||
app.put("/file/at/bigmac/bar", "A file to replace bar", status=409)
|
||||
app.get("/file/at/bigmac/bar/baz")
|
||||
app.get("/file/at/bigmac", status=400)
|
||||
app.get("/file/at/bigmac/", status=400)
|
||||
app.get("/file/at/", status=400)
|
||||
|
||||
# Edit related functions are likely to change for collab
|
||||
# DELETE if this is not needed
|
||||
|
||||
# def test_edit_interface():
|
||||
# _init_data()
|
||||
# app.put("/edit/at/bigmac/bar/baz", "Starting a file")
|
||||
# app.put("/edit/at/bigmac/bar/baz", "Second edit")
|
||||
# resp = app.get("/edit/list/bigmac/bar/baz")
|
||||
# assert resp.content_type == "application/json"
|
||||
# data = simplejson.loads(resp.body)
|
||||
# assert data == ["Starting a file", "Second edit"]
|
||||
#
|
||||
# resp = app.get("/edit/recent/1/bigmac/bar/baz")
|
||||
# assert resp.content_type == "application/json"
|
||||
# data = simplejson.loads(resp.body)
|
||||
# assert data == ["Second edit"]
|
||||
#
|
||||
# resp = app.get("/file/listopen/")
|
||||
# data = simplejson.loads(resp.body)
|
||||
# bigmac_data = data['bigmac']
|
||||
# assert len(bigmac_data) == 1
|
||||
# assert bigmac_data['bar/baz']['mode'] == 'rw'
|
||||
#
|
||||
# app.post("/edit/reset/")
|
||||
# resp = app.get("/edit/list/bigmac/bar/baz")
|
||||
# data = simplejson.loads(resp.body)
|
||||
# assert data == []
|
||||
#
|
||||
# app.put("/edit/at/bigmac/bar/baz", "Starting a file")
|
||||
# app.post("/edit/reset/bigmac/bar/baz")
|
||||
# resp = app.get("/edit/list/bigmac/bar/baz")
|
||||
# data = simplejson.loads(resp.body)
|
||||
# assert data == []
|
||||
|
||||
def test_get_file_stats_from_web():
|
||||
_init_data()
|
||||
app.put("/file/at/bigmac/reqs", "Chewing gum wrapper")
|
||||
resp = app.get("/file/stats/bigmac/reqs")
|
||||
assert resp.content_type == "application/json"
|
||||
data = simplejson.loads(resp.body)
|
||||
assert data['size'] == 19
|
||||
|
||||
def test_preview_mode():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("README.txt", "This is the readme file.")
|
||||
bigmac.save_file("foo.flibber", "Can't guess what this is!")
|
||||
|
||||
resp = app.get("/preview/at/bigmac/", status=400)
|
||||
resp = app.get("/preview/at/bigmac/README.txt")
|
||||
assert resp.body == "This is the readme file."
|
||||
assert resp.content_type == "text/plain"
|
||||
|
||||
resp = app.get("/preview/at/bigmac/foo.flibber")
|
||||
assert resp.content_type == "application/octet-stream"
|
||||
|
||||
bigmac.save_file("index.html",
|
||||
"<html><body>Simple HTML file</body></html>")
|
||||
|
||||
resp = app.get("/preview/at/bigmac/index.html")
|
||||
assert resp.body == "<html><body>Simple HTML file</body></html>"
|
||||
assert resp.content_type == "text/html"
|
||||
|
||||
def test_quota_limits_on_the_web():
|
||||
_init_data()
|
||||
old_units = filesystem.QUOTA_UNITS
|
||||
filesystem.QUOTA_UNITS = 10
|
||||
try:
|
||||
resp = app.put("/file/at/bigmac/foo", "x" * 11, status=400)
|
||||
assert resp.body == "Over quota"
|
||||
finally:
|
||||
filesystem.QUOTA_UNITS = old_units
|
||||
|
||||
def test_search_from_the_web():
|
||||
_init_data()
|
||||
bigmac = _setup_search_data()
|
||||
resp = app.get("/file/search/bigmac")
|
||||
assert resp.content_type == "application/json"
|
||||
def run_search(q, limit=20):
|
||||
resp = app.get("/file/search/bigmac?%s"
|
||||
% urlencode([('q', q.encode('utf-8')), ('limit', limit)]))
|
||||
assert resp.content_type == "application/json"
|
||||
return simplejson.loads(resp.body)
|
||||
_run_search_tests(run_search)
|
||||
|
||||
# illegal limits are turned into the default
|
||||
resp = app.get("/file/search/bigmac?limit=foo")
|
||||
|
||||
def test_list_all_files():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar/baz.txt", "Text file 1\n")
|
||||
bigmac.save_file("README.txt", "Another file\n")
|
||||
bigmac.save_file("bespin/noodle.py", "# A python file\n")
|
||||
resp = app.get("/file/list_all/bigmac/")
|
||||
assert resp.content_type == "application/json"
|
||||
data = simplejson.loads(resp.body)
|
||||
assert len(data) == 3
|
||||
|
||||
def test_install_template_files():
|
||||
_init_data()
|
||||
app.put("/file/template/jetpacks/mysidebar.html",
|
||||
simplejson.dumps(dict(stdtemplate="jetpacks/sidebar.js",
|
||||
values=dict(templateName="mysidebar"))))
|
||||
jetpacks = get_project(macgyver, macgyver, "jetpacks")
|
||||
datafile = jetpacks.get_file_object("mysidebar.html")
|
||||
|
||||
data = datafile.data
|
||||
# add assertions here once this is finalized.
|
||||
pass
|
||||
|
||||
def test_handling_of_symlinks():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
linksource = bigmac.location / "thefile"
|
||||
linksource.write_bytes("Hi")
|
||||
linkdest = bigmac.location / "thelink"
|
||||
linksource.symlink(linkdest)
|
||||
assert linkdest.exists()
|
||||
assert linkdest.islink()
|
||||
resp = app.get("/file/at/bigmac/thelink", status=400)
|
||||
resp = app.put("/file/at/bigmac/thelink", "new data", status=400)
|
||||
assert linksource.bytes() == "Hi"
|
||||
|
||||
thisdir = path(__file__).dirname()
|
||||
linkdest = bigmac.location / "dirlink"
|
||||
thisdir.symlink(linkdest)
|
||||
assert linkdest.exists()
|
||||
assert linkdest.isdir()
|
||||
resp = app.get("/file/list/bigmac/dirlink/", status=400)
|
||||
|
|
@ -0,0 +1,355 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import os
|
||||
from cStringIO import StringIO
|
||||
import tarfile
|
||||
import zipfile
|
||||
|
||||
from __init__ import BespinTestApp
|
||||
import simplejson
|
||||
from path import path
|
||||
|
||||
from bespin import config, controllers
|
||||
|
||||
from bespin.filesystem import get_project, FileNotFound, _find_common_base
|
||||
from bespin.database import User, Base
|
||||
|
||||
tarfilename = os.path.join(os.path.dirname(__file__), "ut.tgz")
|
||||
zipfilename = os.path.join(os.path.dirname(__file__), "ut.zip")
|
||||
otherfilename = os.path.join(os.path.dirname(__file__), "other_import.tgz")
|
||||
with_tabs = os.path.join(os.path.dirname(__file__), "ProjectWithTabs.tgz")
|
||||
|
||||
app = None
|
||||
macgyver = None
|
||||
someone_else = None
|
||||
murdoc = None
|
||||
|
||||
def setup_module(module):
|
||||
global app
|
||||
config.set_profile('test')
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
|
||||
def _init_data():
|
||||
global macgyver, someone_else, murdoc
|
||||
config.activate_profile()
|
||||
|
||||
fsroot = config.c.fsroot
|
||||
if fsroot.exists() and fsroot.basename() == "testfiles":
|
||||
fsroot.rmtree()
|
||||
fsroot.makedirs()
|
||||
|
||||
app.reset()
|
||||
|
||||
Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
Base.metadata.create_all(bind=config.c.dbengine)
|
||||
s = config.c.session_factory()
|
||||
|
||||
someone_else = User.create_user("SomeoneElse", "", "someone@else.com")
|
||||
murdoc = User.create_user("Murdoc", "", "murdoc@badpeople.bad")
|
||||
|
||||
otherproject = get_project(someone_else, someone_else,
|
||||
"otherproject", create=True)
|
||||
otherproject.save_file('foo', 'Just a file to reserve a project')
|
||||
|
||||
app.post("/register/new/MacGyver",
|
||||
dict(password="richarddean", email="rich@sg1.com"))
|
||||
|
||||
macgyver = User.find_user("MacGyver")
|
||||
|
||||
def test_project_deletion():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar/baz", "biz")
|
||||
bigmac.delete()
|
||||
flist = macgyver.projects
|
||||
assert "bigmac" not in flist
|
||||
|
||||
def test_template_installation():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.install_template()
|
||||
data = bigmac.get_file("readme.txt")
|
||||
bigmac.close("readme.txt")
|
||||
assert "Welcome to Bespin" in data
|
||||
result = bigmac.list_files()
|
||||
result_names = [file.name for file in result]
|
||||
assert 'readme.txt' in result_names
|
||||
|
||||
def test_customized_template_installation():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
here = path(__file__).dirname().abspath()
|
||||
config.c.template_path.append(here)
|
||||
bigmac.install_template("ttemplate", other_vars={'answer': 42})
|
||||
flist = bigmac.list_files()
|
||||
fnlist = [f.name for f in bigmac.list_files()]
|
||||
assert fnlist == ["newfile-bigmac.js"]
|
||||
contents = flist[0].data
|
||||
print contents
|
||||
assert contents == """// newfile-bigmac.js
|
||||
// Created by MacGyver (who likes the number 42)
|
||||
|
||||
alert("Welcome to bigmac");
|
||||
"""
|
||||
|
||||
def test_common_base_selection():
|
||||
tests = [
|
||||
(["foo.js", "bar.js"], ""),
|
||||
(["usertemplate/", "usertemplate/foo.js", "usertemplate/bar.js"], "usertemplate/")
|
||||
]
|
||||
def run_one(input, output):
|
||||
print "Testing %s" % (input)
|
||||
actual = _find_common_base(input)
|
||||
assert actual == output
|
||||
for input, output in tests:
|
||||
yield run_one, input, output
|
||||
|
||||
def test_import():
|
||||
tests = [
|
||||
("import_tarball", tarfilename),
|
||||
("import_zipfile", zipfilename)
|
||||
]
|
||||
|
||||
def run_one(func, f):
|
||||
print "Testing %s on %s" % (func, f)
|
||||
handle = open(f)
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
getattr(bigmac, func)(os.path.basename(f), handle)
|
||||
handle.close()
|
||||
print bigmac.location
|
||||
proj_names = [proj.name for proj in macgyver.projects]
|
||||
assert 'bigmac' in proj_names
|
||||
filenames = [f.basename() for f in bigmac.location.files()]
|
||||
assert "config.js" in filenames
|
||||
dirnames = [f.basename() for f in bigmac.location.dirs()]
|
||||
assert 'commands' in dirnames
|
||||
filenames = [f.basename() for f in (bigmac.location / "commands").files()]
|
||||
assert 'yourcommands.js' in filenames
|
||||
|
||||
for test in tests:
|
||||
yield run_one, test[0], test[1]
|
||||
|
||||
def test_reimport_wipes_out_the_project():
|
||||
tests = [
|
||||
("import_tarball", tarfilename),
|
||||
("import_zipfile", zipfilename)
|
||||
]
|
||||
|
||||
def run_one(func, f):
|
||||
global macgyver
|
||||
print "Testing %s" % (func)
|
||||
handle = open(f)
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
getattr(bigmac, func)(os.path.basename(f), handle)
|
||||
handle.close()
|
||||
flist = bigmac.list_files()
|
||||
flist = [item.name for item in flist]
|
||||
assert flist == ["commands/", "config.js", "scratchpad/"]
|
||||
|
||||
handle = open(otherfilename)
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", clean=True)
|
||||
bigmac.import_tarball(os.path.basename(f), handle)
|
||||
flist = bigmac.list_files()
|
||||
flist = [item.name for item in flist]
|
||||
assert flist == ["README"]
|
||||
|
||||
for test in tests:
|
||||
yield run_one, test[0], test[1]
|
||||
|
||||
def test_export_tarfile():
|
||||
_init_data()
|
||||
handle = open(tarfilename)
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.import_tarball(os.path.basename(tarfilename), handle)
|
||||
handle.close()
|
||||
tempfilename = bigmac.export_tarball()
|
||||
tfile = tarfile.open(tempfilename.name)
|
||||
members = tfile.getmembers()
|
||||
assert len(members) == 6
|
||||
names = set(member.name for member in members)
|
||||
# the extra slash shows up in this context, but does not seem to be a problem
|
||||
assert 'bigmac//' in names
|
||||
|
||||
def test_export_zipfile():
|
||||
_init_data()
|
||||
handle = open(tarfilename)
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.import_tarball(os.path.basename(tarfilename), handle)
|
||||
handle.close()
|
||||
tempfilename = bigmac.export_zipfile()
|
||||
zfile = zipfile.ZipFile(tempfilename.name)
|
||||
members = zfile.infolist()
|
||||
assert len(members) == 3
|
||||
names = set(member.filename for member in members)
|
||||
# the extra slash shows up in this context, but does not seem to be a problem
|
||||
assert 'bigmac/commands/yourcommands.js' in names
|
||||
|
||||
|
||||
# -------
|
||||
# Web tests
|
||||
# -------
|
||||
|
||||
def test_create_a_project_from_the_web():
|
||||
_init_data()
|
||||
app.put("/file/at/bigmac/")
|
||||
project_names = [project.name for project in macgyver.projects]
|
||||
assert 'bigmac' in project_names
|
||||
bigmac = get_project(macgyver, macgyver, 'bigmac')
|
||||
assert not bigmac.list_files()
|
||||
|
||||
def test_import_from_the_web():
|
||||
tests = [tarfilename, zipfilename]
|
||||
|
||||
def run_one(f):
|
||||
_init_data()
|
||||
filename = os.path.basename(f)
|
||||
print "Trying %s" % filename
|
||||
app.post("/project/import/newproj", upload_files=[
|
||||
("filedata", filename, open(f).read())
|
||||
])
|
||||
resp = app.get("/file/at/newproj/config.js")
|
||||
assert resp.body == ""
|
||||
app.post("/file/close/newproj/config.js")
|
||||
|
||||
for test in tests:
|
||||
yield run_one, test
|
||||
|
||||
def test_import_unknown_file_type():
|
||||
_init_data()
|
||||
app.post("/project/import/newproj", upload_files=[
|
||||
("filedata", "foo.bar", "Some dummy text")
|
||||
], status=400)
|
||||
|
||||
def test_export_unknown_file_type():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar", "INFO!")
|
||||
app.get("/project/export/bigmac.foo", status=404)
|
||||
|
||||
def test_export_tarball_from_the_web():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar", "INFO!")
|
||||
resp = app.get("/project/export/bigmac.tgz")
|
||||
assert resp.content_type == "application/x-tar-gz"
|
||||
tfile = tarfile.open("bigmac.tgz", "r:gz", StringIO(resp.body))
|
||||
members = tfile.getmembers()
|
||||
assert len(members) == 3
|
||||
membersnames = [member.name for member in members]
|
||||
assert "bigmac/foo/bar" in membersnames
|
||||
|
||||
def test_export_zipfile_from_the_web():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("foo/bar", "INFO!")
|
||||
resp = app.get("/project/export/bigmac.zip")
|
||||
assert resp.content_type == "application/zip"
|
||||
zfile = zipfile.ZipFile(StringIO(resp.body))
|
||||
members = zfile.infolist()
|
||||
assert len(members) == 1
|
||||
assert "bigmac/foo/bar" == members[0].filename
|
||||
|
||||
def test_delete_project_from_the_web():
|
||||
global macgyver
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
bigmac.save_file("README.txt", "This is the readme file.")
|
||||
resp = app.delete("/file/at/bigmac/")
|
||||
assert len(macgyver.projects) == 2
|
||||
|
||||
def test_rename_project():
|
||||
_init_data()
|
||||
app.post("/project/rename/bigmac/", "foobar", status=404)
|
||||
app.put("/file/at/bigmac/")
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
bigmac.metadata['hello'] = "world"
|
||||
|
||||
app.post("/project/rename/bigmac/", "foobar")
|
||||
try:
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
assert False, "The bigmac project should have been renamed"
|
||||
except FileNotFound:
|
||||
pass
|
||||
foobar = get_project(macgyver, macgyver, "foobar")
|
||||
assert foobar.metadata['hello'] == 'world'
|
||||
|
||||
app.put("/file/at/bigmac/")
|
||||
# should get a conflict error if you try to rename to a project
|
||||
# that exists
|
||||
app.post("/project/rename/foobar/", "bigmac", status=409)
|
||||
|
||||
def test_install_template_from_the_web():
|
||||
_init_data()
|
||||
app.post("/project/template/bigmac/", '{"templateName" : "/etc/passwd"}', status=400)
|
||||
app.post("/project/template/bigmac/", '{"templateName" : "../foo"}', status=400)
|
||||
app.post("/project/template/bigmac/", '{"templateName" : "ttemplate"}')
|
||||
resp = app.get("/file/list/bigmac/")
|
||||
files = simplejson.loads(resp.body)
|
||||
assert files[0]['name'] == "newfile-bigmac.js"
|
||||
|
||||
def test_basic_project_metadata_use():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
metadata = bigmac.metadata
|
||||
try:
|
||||
value = metadata['remote_auth']
|
||||
assert False, "should have gotten key error for unset value"
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
metadata['remote_auth'] = "both"
|
||||
metadata.close()
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
metadata = bigmac.metadata
|
||||
# this should re-open the database and retrieve the value
|
||||
value = metadata['remote_auth']
|
||||
assert value == "both"
|
||||
|
||||
metadata['remote_auth'] = "write"
|
||||
metadata.close()
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
metadata = bigmac.metadata
|
||||
# this should re-open the database and retrieve the value
|
||||
value = metadata['remote_auth']
|
||||
assert value == "write"
|
||||
|
||||
del metadata['remote_auth']
|
||||
|
||||
try:
|
||||
value = metadata['remote_auth']
|
||||
assert False, "expected key to be gone from DB"
|
||||
except KeyError:
|
||||
pass
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
from __init__ import BespinTestApp
|
||||
import simplejson
|
||||
|
||||
from bespin import config, controllers
|
||||
from bespin.database import User, Base
|
||||
|
||||
app = None
|
||||
session = None
|
||||
|
||||
def setup_module(module):
|
||||
global app, session
|
||||
config.set_profile('test')
|
||||
config.activate_profile()
|
||||
Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
Base.metadata.create_all(bind=config.c.dbengine)
|
||||
session = config.c.session_factory()
|
||||
User.create_user("BillBixby", "", "bill@bixby.com")
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
app.post("/register/login/BillBixby", dict(password=""))
|
||||
|
||||
def test_auth_required():
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
app.post('/settings/', {'foo' : 'bar'}, status=401)
|
||||
app.get('/settings/', status=401)
|
||||
app.get('/settings/foo', status=401)
|
||||
|
||||
def test_set_settings():
|
||||
resp = app.post('/settings/', {'antigravity' : 'on', 'write_my_code' : 'on'})
|
||||
assert not resp.body
|
||||
user = User.find_user('BillBixby')
|
||||
session.expunge(user)
|
||||
user = User.find_user('BillBixby')
|
||||
assert user.settings['antigravity'] == 'on'
|
||||
assert user.settings['write_my_code'] == 'on'
|
||||
|
||||
resp = app.get('/settings/')
|
||||
assert resp.content_type == 'application/json'
|
||||
data = simplejson.loads(resp.body)
|
||||
assert data == {'antigravity' : 'on', 'write_my_code' : 'on'}
|
||||
|
||||
resp = app.get('/settings/antigravity')
|
||||
assert resp.content_type == "application/json"
|
||||
assert resp.body == '"on"'
|
||||
|
||||
def test_non_existent_setting_sends_404():
|
||||
resp = app.get('/settings/BADONE', status=404)
|
||||
|
||||
def test_delete_setting():
|
||||
resp = app.post('/settings/', {'newone' : 'hi there'})
|
||||
resp = app.delete('/settings/newone')
|
||||
user = User.find_user('BillBixby')
|
||||
session.expunge(user)
|
||||
user = User.find_user('BillBixby')
|
||||
assert 'newone' not in user.settings
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
from datetime import date
|
||||
|
||||
from bespin import stats
|
||||
|
||||
def test_stats_operations():
|
||||
ms = stats.MemoryStats()
|
||||
result = ms.incr("foo")
|
||||
assert result == 1
|
||||
result = ms.decr("foo")
|
||||
assert result == 0
|
||||
|
||||
result = ms.incr("foo", 100)
|
||||
assert result == 100
|
||||
|
||||
result = ms.incr("foo_DATE", 100)
|
||||
|
||||
datekey = "foo_" + date.today().strftime("%Y%m%d")
|
||||
assert datekey in ms.storage
|
||||
|
||||
result = ms.multiget(['foo', datekey])
|
||||
assert result == {'foo':100, datekey:100}
|
||||
|
|
@ -0,0 +1,344 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import simplejson
|
||||
|
||||
from bespin import config, controllers, auth
|
||||
from bespin.database import User, Base, ConflictError
|
||||
from bespin.filesystem import get_project
|
||||
|
||||
from bespin.tests import BespinTestApp
|
||||
from bespin.tests.mock import patch
|
||||
|
||||
def setup_module(module):
|
||||
config.set_profile("test")
|
||||
config.activate_profile()
|
||||
|
||||
def _clear_db():
|
||||
Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
Base.metadata.create_all(bind=config.c.dbengine)
|
||||
fsroot = config.c.fsroot
|
||||
if fsroot.exists() and fsroot.basename() == "testfiles":
|
||||
fsroot.rmtree()
|
||||
fsroot.makedirs()
|
||||
|
||||
|
||||
def _get_session(clear=False):
|
||||
if clear:
|
||||
_clear_db()
|
||||
s = config.c.session_factory()
|
||||
return s
|
||||
|
||||
# Model tests
|
||||
def test_create_new_user():
|
||||
s = _get_session(True)
|
||||
num_users = s.query(User).count()
|
||||
assert num_users == 0
|
||||
user = User.create_user("BillBixby", "hulkrulez", "bill@bixby.com")
|
||||
assert len(user.uuid) == 36
|
||||
num_users = s.query(User).count()
|
||||
assert num_users == 1
|
||||
|
||||
users = User.find_by_email("bill@bixby.com")
|
||||
assert users[0].username == "BillBixby"
|
||||
|
||||
def test_create_duplicate_user():
|
||||
s = _get_session(True)
|
||||
u = User.create_user("BillBixby", "somepass", "bill@bixby.com")
|
||||
s.commit()
|
||||
original_password = u.password
|
||||
try:
|
||||
User.create_user("BillBixby", "otherpass", "bill@bixby.com")
|
||||
assert False, "Should have gotten a ConflictError"
|
||||
except ConflictError:
|
||||
s.rollback()
|
||||
s = _get_session(False)
|
||||
user = User.find_user("BillBixby")
|
||||
assert user.password == original_password, "Password should not have changed"
|
||||
|
||||
def test_get_user_returns_none_for_nonexistent():
|
||||
s = _get_session(True)
|
||||
user = User.find_user("NOT THERE. NO REALLY!")
|
||||
assert user is None
|
||||
|
||||
|
||||
# Controller Tests
|
||||
|
||||
def test_register_returns_empty_when_not_logged_in():
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.get('/register/userinfo/', status=401)
|
||||
assert resp.body == ""
|
||||
|
||||
def test_register_and_verify_user():
|
||||
config.activate_profile()
|
||||
_clear_db()
|
||||
s = _get_session()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
assert resp.content_type == "application/json"
|
||||
data = simplejson.loads(resp.body)
|
||||
assert data == {}
|
||||
assert resp.cookies_set['auth_tkt']
|
||||
assert app.cookies
|
||||
billbixby = User.find_user("BillBixby")
|
||||
sample_project = get_project(billbixby, billbixby, "SampleProject")
|
||||
files = [file.name for file in sample_project.list_files()]
|
||||
assert "readme.txt" in files
|
||||
|
||||
# should be able to run again without an exception appearing
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"),
|
||||
status=409)
|
||||
|
||||
# with the cookie set, we should be able to retrieve the
|
||||
# logged in name
|
||||
resp = app.get('/register/userinfo/')
|
||||
assert resp.content_type == 'application/json'
|
||||
data = simplejson.loads(resp.body)
|
||||
assert data['username'] == 'BillBixby'
|
||||
assert 'quota' in data
|
||||
assert data['quota'] == 15728640
|
||||
assert 'amountUsed' in data
|
||||
|
||||
resp = app.get("/file/at/BespinSettings/config")
|
||||
app.post("/file/close/BespinSettings/config")
|
||||
|
||||
def test_logout():
|
||||
s = _get_session(True)
|
||||
User.create_user("BillBixby", "hulkrulez", "bill@bixby.com")
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post("/register/login/BillBixby",
|
||||
dict(password='hulkrulez'))
|
||||
resp = app.get("/register/logout/")
|
||||
assert resp.cookies_set['auth_tkt'] == '""'
|
||||
|
||||
def test_bad_login_yields_401():
|
||||
s = _get_session(True)
|
||||
User.create_user("BillBixby", "hulkrulez", "bill@bixby.com")
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post("/register/login/BillBixby",
|
||||
dict(password="NOTHULK"), status=401)
|
||||
|
||||
def test_login_without_cookie():
|
||||
s = _get_session(True)
|
||||
User.create_user("BillBixby", "hulkrulez", "bill@bixby.com")
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post("/register/login/BillBixby",
|
||||
dict(password="hulkrulez"))
|
||||
assert resp.cookies_set['auth_tkt']
|
||||
|
||||
def test_static_files_with_auth():
|
||||
_clear_db()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.get('/editor.html', status=302)
|
||||
assert resp.location == "http://localhost/"
|
||||
resp = app.post('/register/new/Aldus', dict(password="foo",
|
||||
email="a@b.com"))
|
||||
resp = app.get('/editor.html')
|
||||
|
||||
def test_register_existing_user_should_not_authenticate():
|
||||
s = _get_session(True)
|
||||
app_orig = controllers.make_app()
|
||||
app = BespinTestApp(app_orig)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
app = BespinTestApp(app_orig)
|
||||
resp = app.post("/register/new/BillBixby", dict(email="bill@bixby.com",
|
||||
password="somethingelse"),
|
||||
status=409)
|
||||
assert not resp.cookies_set
|
||||
user = User.find_user("BillBixby", 'notangry')
|
||||
assert user is not None
|
||||
|
||||
def test_bad_ticket_is_ignored():
|
||||
_clear_db()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post("/register/new/Aldus", dict(password="foo",
|
||||
email="a@b.com"))
|
||||
app.cookies['auth_tkt'] = app.cookies['auth_tkt'][:-1]
|
||||
resp = app.get("/preview/at/SampleProjectFor%3AAldus/index.html", status=401)
|
||||
|
||||
def test_api_version_header():
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.get("/register/userinfo/", status=401)
|
||||
assert resp.headers.get("X-Bespin-API") == "dev"
|
||||
|
||||
def test_username_with_bad_characters():
|
||||
_clear_db()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post("/register/new/Thinga%20Majig",
|
||||
dict(password="foo", email="thinga@majig"), status=400)
|
||||
resp = app.post("/register/new/Thinga<majig>",
|
||||
dict(password="foo", email="thinga@majig"), status=400)
|
||||
resp = app.post("/register/new/Thing/",
|
||||
dict(password="foo", email="thinga@majig"), status=400)
|
||||
resp = app.post("/register/new/..",
|
||||
dict(password="foo", email="thinga@majig"), status=400)
|
||||
|
||||
def test_messages_sent_from_server_to_user():
|
||||
_clear_db()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post("/register/new/macgyver",
|
||||
dict(password="foo", email="macgyver@ducttape.macgyver"))
|
||||
s = _get_session()
|
||||
macgyver = User.find_user("macgyver")
|
||||
assert len(macgyver.messages) == 0
|
||||
macgyver.publish(dict(my="message"))
|
||||
s.commit()
|
||||
resp = app.post("/messages/")
|
||||
assert resp.content_type == "application/json"
|
||||
data = simplejson.loads(resp.body)
|
||||
assert len(data) == 1
|
||||
assert data[0] == dict(my="message")
|
||||
|
||||
# the message should be consumed
|
||||
resp = app.post("/messages/")
|
||||
data = simplejson.loads(resp.body)
|
||||
assert len(data) == 0
|
||||
|
||||
def test_get_users_settings():
|
||||
_clear_db()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post("/register/new/macgyver",
|
||||
dict(password="foo", email="macgyver@ducttape.macgyver"))
|
||||
resp = app.put("/file/at/BespinSettings/settings", """
|
||||
vcsuser Mack Gyver <gyver@mac.com>
|
||||
|
||||
""")
|
||||
s = _get_session()
|
||||
macgyver = User.find_user("macgyver")
|
||||
settings = macgyver.get_settings()
|
||||
assert settings == dict(vcsuser="Mack Gyver <gyver@mac.com>")
|
||||
|
||||
def test_users_can_be_locked_out():
|
||||
config.set_profile("test")
|
||||
config.c.login_failure_tracking = "memory"
|
||||
config.c.login_attempts = "1"
|
||||
config.c.lockout_period = "1"
|
||||
config.activate_profile()
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
_clear_db()
|
||||
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
resp = app.post("/register/login/BillBixby",
|
||||
dict(password="NOTHULK"), status=401)
|
||||
|
||||
# fail with good password now, because we're locked out
|
||||
resp = app.post("/register/login/BillBixby",
|
||||
dict(password="notangry"), status=401)
|
||||
|
||||
@patch('bespin.utils.send_text_email')
|
||||
def test_lost_username(send_text_email):
|
||||
config.set_profile("test")
|
||||
config.activate_profile()
|
||||
_clear_db()
|
||||
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
|
||||
resp = app.post('/register/lost/', dict(email='bill@bixby.com'))
|
||||
assert send_text_email.called
|
||||
args = send_text_email.call_args[0]
|
||||
assert args[0] == 'bill@bixby.com'
|
||||
assert args[1].startswith("Your username for ")
|
||||
assert "Your username is:" in args[2]
|
||||
assert "BillBixby" in args[2]
|
||||
|
||||
@patch('bespin.utils.send_text_email')
|
||||
def test_lost_password_request(send_text_email):
|
||||
config.set_profile("test")
|
||||
config.activate_profile()
|
||||
_clear_db()
|
||||
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
|
||||
app.reset()
|
||||
resp = app.post('/register/lost/', dict(username='BillBixby'))
|
||||
assert send_text_email.called
|
||||
args = send_text_email.call_args[0]
|
||||
assert args[0] == 'bill@bixby.com'
|
||||
assert args[1].startswith("Requested password change for ")
|
||||
user = User.find_user("BillBixby")
|
||||
verify_code = controllers._get_password_verify_code(user)
|
||||
assert verify_code in args[2]
|
||||
|
||||
def test_password_change_with_confirmation_code():
|
||||
config.set_profile("test")
|
||||
config.activate_profile()
|
||||
_clear_db()
|
||||
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
app.reset()
|
||||
|
||||
user = User.find_user("BillBixby")
|
||||
verify_code = controllers._get_password_verify_code(user)
|
||||
resp = app.post('/register/password/BillBixby', dict(
|
||||
code=verify_code,
|
||||
newPassword="hatetraffic"))
|
||||
|
||||
user = User.find_user('BillBixby', 'hatetraffic')
|
||||
assert user
|
||||
|
||||
def test_password_change_bad_code():
|
||||
config.set_profile("test")
|
||||
config.activate_profile()
|
||||
_clear_db()
|
||||
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
resp = app.post('/register/new/BillBixby', dict(email="bill@bixby.com",
|
||||
password="notangry"))
|
||||
app.reset()
|
||||
|
||||
resp = app.post('/register/password/BillBixby', dict(
|
||||
code="42",
|
||||
newPassword="hatetraffic"),
|
||||
status=400)
|
||||
|
|
@ -0,0 +1,478 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
from uvc.tests.util import mock_run_command
|
||||
from uvc import hg
|
||||
import simplejson
|
||||
from path import path
|
||||
|
||||
from bespin import vcs, config, controllers
|
||||
from bespin.database import User
|
||||
from bespin.database import Base
|
||||
from bespin.filesystem import get_project, NotAuthorized
|
||||
|
||||
from bespin.tests import BespinTestApp
|
||||
|
||||
macgyver = None
|
||||
app = None
|
||||
|
||||
def setup_module(module):
|
||||
global app
|
||||
config.set_profile('test')
|
||||
app = controllers.make_app()
|
||||
app = BespinTestApp(app)
|
||||
|
||||
def _init_data():
|
||||
global macgyver
|
||||
config.activate_profile()
|
||||
|
||||
fsroot = config.c.fsroot
|
||||
if fsroot.exists() and fsroot.basename() == "testfiles":
|
||||
fsroot.rmtree()
|
||||
fsroot.makedirs()
|
||||
|
||||
app.reset()
|
||||
|
||||
Base.metadata.drop_all(bind=config.c.dbengine)
|
||||
Base.metadata.create_all(bind=config.c.dbengine)
|
||||
s = config.c.session_factory()
|
||||
|
||||
app.post("/register/new/MacGyver",
|
||||
dict(password="richarddean", email="rich@sg1.com"))
|
||||
|
||||
macgyver = User.find_user("MacGyver")
|
||||
s.flush()
|
||||
|
||||
|
||||
clone_output = """requesting all changes
|
||||
adding changesets
|
||||
adding manifests
|
||||
adding file changes
|
||||
added 9 changesets with 39 changes to 10 files
|
||||
updating working directory
|
||||
10 files updated, 0 files merged, 0 files removed, 0 files unresolved
|
||||
"""
|
||||
|
||||
@mock_run_command(clone_output, "bespin")
|
||||
def test_run_an_hg_clone(run_command_params):
|
||||
_init_data()
|
||||
vcsuser = "Big Edna <edna@bigednas.com>"
|
||||
output = vcs._clone_impl(macgyver, source="http://hg.mozilla.org/labs/bespin",
|
||||
qid="1", vcsuser=vcsuser)
|
||||
command, context = run_command_params
|
||||
|
||||
assert isinstance(command, hg.clone)
|
||||
working_dir = context.working_dir
|
||||
assert working_dir == macgyver.get_location()
|
||||
assert output['output'] == clone_output
|
||||
assert output['project'] == "bespin"
|
||||
assert str(command) == "clone http://hg.mozilla.org/labs/bespin bespin"
|
||||
|
||||
bespin = get_project(macgyver, macgyver, "bespin")
|
||||
metadata = bespin.metadata
|
||||
|
||||
assert 'remote_auth' not in metadata
|
||||
assert 'push' not in metadata
|
||||
assert metadata['remote_url'] == "http://hg.mozilla.org/labs/bespin"
|
||||
assert metadata['vcsuser'] == vcsuser
|
||||
metadata.close()
|
||||
|
||||
diff_output = """diff -r ff44251fbb1e uvc/main.py
|
||||
--- a/uvc/main.py Thu Mar 19 11:55:30 2009 -0400
|
||||
+++ b/uvc/main.py Fri Mar 20 15:01:07 2009 -0400
|
||||
@@ -1,4 +1,5 @@
|
||||
"Implements the uvc command processing."
|
||||
+# Copyright 2009 Mozilla Corporation
|
||||
|
||||
import sys
|
||||
import os
|
||||
"""
|
||||
|
||||
@mock_run_command(diff_output)
|
||||
def test_run_a_diff(run_command_params):
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, 'bigmac', create=True)
|
||||
bigmac.save_file(".hg/hgrc", "# test rc file\n")
|
||||
cmd = ["diff"]
|
||||
output = vcs._run_command_impl(macgyver, bigmac, cmd, None)
|
||||
command, context = run_command_params
|
||||
|
||||
working_dir = context.working_dir
|
||||
|
||||
assert isinstance(command, hg.diff)
|
||||
assert working_dir == bigmac.location
|
||||
assert output['output'] == diff_output
|
||||
|
||||
update_output = """27 files updates from 97 changesets with 3.2 changes per file,
|
||||
all on line 10."""
|
||||
|
||||
@mock_run_command(update_output)
|
||||
def test_provide_auth_info_to_update_command(run_command_params):
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, 'bigmac', create=True)
|
||||
bigmac.save_file(".hg/hgrc", "# test rc file\n")
|
||||
metadata = bigmac.metadata
|
||||
metadata['remote_url'] = 'http://hg.mozilla.org/labs/bespin'
|
||||
metadata.close()
|
||||
keychain = vcs.KeyChain(macgyver, "foobar")
|
||||
keychain.set_ssh_for_project(bigmac, vcs.AUTH_BOTH)
|
||||
|
||||
cmd = ["update", "_BESPIN_REMOTE_URL"]
|
||||
output = vcs.run_command(macgyver, bigmac, cmd, "foobar")
|
||||
|
||||
command, context = run_command_params
|
||||
command_line = command.get_command_line()
|
||||
assert command_line[:3] == ["hg", "fetch", "--ssh"]
|
||||
assert command_line[3].startswith("ssh -i")
|
||||
assert command_line[4] == "http://hg.mozilla.org/labs/bespin"
|
||||
# make sure it's not unicode
|
||||
assert isinstance(command_line[4], str)
|
||||
assert len(command_line) == 5
|
||||
|
||||
@mock_run_command(update_output)
|
||||
def test_dont_provide_auth_info_to_update_command(run_command_params):
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, 'bigmac', create=True)
|
||||
bigmac.save_file(".hg/hgrc", "# test rc file\n")
|
||||
keychain = vcs.KeyChain(macgyver, "foobar")
|
||||
keychain.set_ssh_for_project(bigmac, vcs.AUTH_BOTH)
|
||||
|
||||
cmd = ["update"]
|
||||
output = vcs.run_command(macgyver, bigmac, cmd)
|
||||
|
||||
resp = app.post("/messages/")
|
||||
messages = simplejson.loads(resp.body)
|
||||
assert len(messages) == 1
|
||||
output = messages[0]
|
||||
assert 'output' in output
|
||||
assert output['output'] == 'Keychain password is required for this command.'
|
||||
|
||||
def test_bad_keychain_password():
|
||||
_init_data()
|
||||
keychain = vcs.KeyChain(macgyver, "foobar")
|
||||
keychain.get_ssh_key()
|
||||
|
||||
try:
|
||||
keychain = vcs.KeyChain(macgyver, "blorg")
|
||||
keychain.get_ssh_key()
|
||||
assert False, "Expected exception for bad keychain password"
|
||||
except NotAuthorized:
|
||||
pass
|
||||
|
||||
def test_get_users_vcs_name():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
user = vcs._get_vcs_user(macgyver, bigmac)
|
||||
assert user == "MacGyver"
|
||||
|
||||
settings = get_project(macgyver, macgyver, "BespinSettings")
|
||||
settings.save_file("settings", """
|
||||
vcsuser Mack Gyver <gyver@mac.com>
|
||||
""")
|
||||
user = vcs._get_vcs_user(macgyver, bigmac)
|
||||
assert user == "Mack Gyver <gyver@mac.com>"
|
||||
|
||||
metadata = bigmac.metadata
|
||||
metadata['vcsuser'] = "Big MacGyver <mrbig@macgyver.com>"
|
||||
metadata.close()
|
||||
user = vcs._get_vcs_user(macgyver, bigmac)
|
||||
assert user == "Big MacGyver <mrbig@macgyver.com>"
|
||||
|
||||
# Web tests
|
||||
|
||||
@mock_run_command(clone_output, create_dir="bigmac")
|
||||
def test_hg_clone_on_web(run_command_params):
|
||||
_init_data()
|
||||
resp = app.post("/vcs/clone/",
|
||||
dict(source="http://hg.mozilla.org/labs/bespin",
|
||||
dest="bigmac",
|
||||
push="ssh://hg.mozilla.org/labs/bespin",
|
||||
remoteauth="both",
|
||||
authtype="password",
|
||||
username="someuser",
|
||||
password="theirpass",
|
||||
kcpass="foobar"
|
||||
))
|
||||
assert resp.content_type == "application/json"
|
||||
output = simplejson.loads(resp.body)
|
||||
assert 'jobid' in output
|
||||
|
||||
resp = app.post("/messages/")
|
||||
messages = simplejson.loads(resp.body)
|
||||
assert len(messages) == 2
|
||||
output = messages[1]['output']
|
||||
assert output['project'] == "bigmac"
|
||||
assert 'output' in output
|
||||
output = output['output']
|
||||
command, context = run_command_params
|
||||
|
||||
working_dir = context.working_dir
|
||||
|
||||
global macgyver
|
||||
macgyver = User.find_user("MacGyver")
|
||||
command_line = " ".join(command.get_command_line())
|
||||
assert command_line == "hg clone http://someuser:theirpass@hg.mozilla.org/labs/bespin bigmac"
|
||||
assert working_dir == macgyver.get_location()
|
||||
assert output == clone_output
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac")
|
||||
metadata = bigmac.metadata
|
||||
assert metadata['remote_auth'] == vcs.AUTH_BOTH
|
||||
assert metadata['push'] == "ssh://hg.mozilla.org/labs/bespin"
|
||||
metadata.close()
|
||||
|
||||
@mock_run_command(clone_output, create_dir="bespin")
|
||||
def test_hg_clone_on_web_with_ssh(run_command_params):
|
||||
_init_data()
|
||||
resp = app.post("/vcs/clone/",
|
||||
dict(source="http://hg.mozilla.org/labs/bespin",
|
||||
push="ssh://hg.mozilla.org/labs/bespin",
|
||||
remoteauth="both",
|
||||
authtype="ssh",
|
||||
kcpass="foobar"
|
||||
))
|
||||
assert resp.content_type == "application/json"
|
||||
output = simplejson.loads(resp.body)
|
||||
assert 'jobid' in output
|
||||
|
||||
resp = app.post("/messages/")
|
||||
messages = simplejson.loads(resp.body)
|
||||
assert len(messages) == 2
|
||||
output = messages[1]
|
||||
assert 'output' in output
|
||||
output = output['output']
|
||||
command, context = run_command_params
|
||||
|
||||
working_dir = context.working_dir
|
||||
|
||||
global macgyver
|
||||
macgyver = User.find_user("MacGyver")
|
||||
command_line = command.get_command_line()
|
||||
assert command_line[0:3] == ["hg", "clone", "--ssh"]
|
||||
assert command_line[3].startswith("ssh -i")
|
||||
assert command_line[4] == "http://hg.mozilla.org/labs/bespin"
|
||||
assert command_line[5] == "bespin"
|
||||
assert working_dir == macgyver.get_location()
|
||||
assert output['output'] == clone_output
|
||||
|
||||
bespin = get_project(macgyver, macgyver, "bespin")
|
||||
metadata = bespin.metadata
|
||||
assert metadata['remote_auth'] == vcs.AUTH_BOTH
|
||||
assert metadata['push'] == "ssh://hg.mozilla.org/labs/bespin"
|
||||
metadata.close()
|
||||
|
||||
push_output = "Changes pushed."
|
||||
|
||||
@mock_run_command(push_output)
|
||||
def test_hg_push_on_web(run_command_params):
|
||||
_init_data()
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
# generate key pair
|
||||
kc.get_ssh_key()
|
||||
bigmac = get_project(macgyver, macgyver, 'bigmac', create=True)
|
||||
kc.set_ssh_for_project(bigmac, vcs.AUTH_WRITE)
|
||||
metadata = bigmac.metadata
|
||||
metadata['remote_url'] = "http://hg.mozilla.org/labs/bespin"
|
||||
metadata['push'] = "ssh://hg.mozilla.org/labs/bespin"
|
||||
metadata.close()
|
||||
bigmac.save_file(".hg/hgrc", "# test rc file\n")
|
||||
|
||||
request = simplejson.dumps({'command' : ['push', '_BESPIN_PUSH'],
|
||||
'kcpass' : 'foobar'})
|
||||
resp = app.post("/vcs/command/bigmac/", request)
|
||||
resp = app.post("/messages/")
|
||||
|
||||
command, context = run_command_params
|
||||
|
||||
assert context.user == "MacGyver"
|
||||
|
||||
command_line = command.get_command_line()
|
||||
print command_line
|
||||
assert command_line[0:3] == ["hg", "push", "--ssh"]
|
||||
assert command_line[3].startswith("ssh -i")
|
||||
assert command_line[4] == "ssh://hg.mozilla.org/labs/bespin"
|
||||
|
||||
@mock_run_command(diff_output)
|
||||
def test_hg_diff_on_web(run_command_params):
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, 'bigmac', create=True)
|
||||
bigmac.save_file(".hg/hgrc", "# test rc file\n")
|
||||
|
||||
request = simplejson.dumps({'command' : ['diff']})
|
||||
resp = app.post("/vcs/command/bigmac/", request)
|
||||
|
||||
assert resp.content_type == "application/json"
|
||||
output = simplejson.loads(resp.body)
|
||||
assert 'jobid' in output
|
||||
|
||||
resp = app.post("/messages/")
|
||||
messages = simplejson.loads(resp.body)
|
||||
assert len(messages) == 1
|
||||
output = messages[0]
|
||||
assert 'output' in output
|
||||
output = output['output']
|
||||
command, context = run_command_params
|
||||
|
||||
working_dir = context.working_dir
|
||||
|
||||
command_line = " ".join(command.get_command_line())
|
||||
assert command_line == "hg diff"
|
||||
assert working_dir == bigmac.location
|
||||
print "output=%s" % (output['output'],)
|
||||
print "diff_output=%s" % (diff_output,)
|
||||
assert output['output'] == diff_output
|
||||
|
||||
def test_keychain_creation():
|
||||
_init_data()
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
public_key, private_key = kc.get_ssh_key()
|
||||
|
||||
assert public_key.startswith("ssh-rsa")
|
||||
assert "RSA PRIVATE KEY" in private_key
|
||||
|
||||
public_key2 = vcs.KeyChain.get_ssh_public_key(macgyver)
|
||||
assert public_key2 == public_key
|
||||
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
|
||||
kc.set_ssh_for_project(bigmac, vcs.AUTH_BOTH)
|
||||
|
||||
kcfile = path(macgyver.get_location()) / ".bespin-keychain"
|
||||
assert kcfile.exists()
|
||||
metadata = bigmac.metadata
|
||||
assert metadata['remote_auth'] == vcs.AUTH_BOTH
|
||||
metadata.close()
|
||||
|
||||
# make sure the file is encrypted
|
||||
text = kcfile.bytes()
|
||||
assert "RSA PRIVATE KEY" not in text
|
||||
assert "ssh-rsa" not in text
|
||||
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
public_key2, private_key2 = kc.get_ssh_key()
|
||||
assert public_key2 == public_key
|
||||
assert private_key2 == private_key
|
||||
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert "RSA PRIVATE KEY" in credentials['ssh_private_key']
|
||||
assert credentials['type'] == "ssh"
|
||||
|
||||
kc.delete_credentials_for_project(bigmac)
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials is None
|
||||
metadata = bigmac.metadata
|
||||
try:
|
||||
value = metadata['remote_auth']
|
||||
assert False, "expected remote_auth key to be removed from project"
|
||||
except KeyError:
|
||||
pass
|
||||
metadata.close()
|
||||
|
||||
kc.set_credentials_for_project(bigmac, vcs.AUTH_WRITE, "macG", "coolpass")
|
||||
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials['type'] == 'password'
|
||||
assert credentials['username'] == 'macG'
|
||||
assert credentials['password'] == 'coolpass'
|
||||
|
||||
kc.delete_credentials_for_project(bigmac)
|
||||
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials is None
|
||||
|
||||
def test_vcs_auth_set_password_on_web():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, 'bigmac', create=True)
|
||||
resp = app.post("/vcs/setauth/bigmac/", dict(kcpass="foobar",
|
||||
type="password", username="macG",
|
||||
password="coolpass",
|
||||
remoteauth="write"))
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials['type'] == 'password'
|
||||
assert credentials['username'] == 'macG'
|
||||
assert credentials['password'] == 'coolpass'
|
||||
metadata = bigmac.metadata
|
||||
assert metadata[vcs.AUTH_PROPERTY] == vcs.AUTH_WRITE
|
||||
metadata.close()
|
||||
|
||||
def test_vcs_auth_set_ssh_newkey_on_web():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.post("/vcs/setauth/bigmac/", dict(kcpass="foobar",
|
||||
type="ssh", remoteauth="both"))
|
||||
assert resp.content_type == "application/json"
|
||||
assert "ssh-rsa" in resp.body
|
||||
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
|
||||
credentials = kc.get_credentials_for_project(bigmac)
|
||||
assert credentials['type'] == 'ssh'
|
||||
assert "RSA PRIVATE KEY" in credentials['ssh_private_key']
|
||||
metadata = bigmac.metadata
|
||||
assert metadata[vcs.AUTH_PROPERTY] == vcs.AUTH_BOTH
|
||||
metadata.close()
|
||||
|
||||
def test_vcs_auth_set_should_have_good_remote_auth_value():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
resp = app.post("/vcs/setauth/bigmac/", dict(kcpass="foobar",
|
||||
type="ssh", remoteauth="foo"), status=400)
|
||||
|
||||
def test_vcs_get_ssh_key_from_web():
|
||||
_init_data()
|
||||
resp = app.post("/vcs/getkey/", dict(kcpass="foobar"))
|
||||
assert resp.content_type == "application/x-ssh-key"
|
||||
assert resp.body.startswith("ssh-rsa")
|
||||
|
||||
def test_vcs_get_ssh_key_from_web_without_password_no_pubkey():
|
||||
_init_data()
|
||||
resp = app.post("/vcs/getkey/", status=401)
|
||||
|
||||
def test_vcs_get_ssh_key_from_web_without_password_with_pubkey():
|
||||
_init_data()
|
||||
kc = vcs.KeyChain(macgyver, "foobar")
|
||||
# generate the key pair
|
||||
kc.get_ssh_key()
|
||||
resp = app.post("/vcs/getkey/")
|
||||
assert resp.content_type == "application/x-ssh-key"
|
||||
assert resp.body.startswith("ssh-rsa")
|
||||
|
||||
def test_find_out_remote_auth_info_from_web():
|
||||
_init_data()
|
||||
bigmac = get_project(macgyver, macgyver, "bigmac", create=True)
|
||||
keychain = vcs.KeyChain(macgyver, "foobar")
|
||||
keychain.set_ssh_for_project(bigmac, vcs.AUTH_BOTH)
|
||||
resp = app.get("/vcs/remoteauth/bigmac/")
|
||||
assert resp.body == "both"
|
||||
|
||||
keychain.delete_credentials_for_project(bigmac)
|
||||
resp = app.get("/vcs/remoteauth/bigmac/")
|
||||
assert resp.body == ""
|
||||
|
||||
keychain.set_ssh_for_project(bigmac, vcs.AUTH_WRITE)
|
||||
resp = app.get("/vcs/remoteauth/bigmac/")
|
||||
assert resp.body == "write"
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
// {filename}
|
||||
// Created by {username} (who likes the number {answer})
|
||||
|
||||
alert("Welcome to {project}");
|
Двоичный файл не отображается.
Двоичный файл не отображается.
|
@ -0,0 +1,11 @@
|
|||
This is the directory for your commands.
|
||||
|
||||
Name them: COMMAND_NAME.js, e.g. calculate.js.
|
||||
|
||||
To access you can run:
|
||||
|
||||
* cmdlist
|
||||
* cmdload commandname
|
||||
* cmdedit commandname
|
||||
* cmdadd commandname
|
||||
* cmdrm commandname
|
|
@ -0,0 +1,13 @@
|
|||
------------------------------------------------------------------------------
|
||||
Bespin Plugins Directory
|
||||
------------------------------------------------------------------------------
|
||||
|
||||
This is where your Bespin plugins go.
|
||||
|
||||
There are a few ways to install a new plugin:
|
||||
|
||||
- Run "plugin install http://path/to/theme" and it will copy it in here
|
||||
- Manually create a file named YourPlugin.js that has the correct values in it (copy another theme to make sure)
|
||||
|
||||
Now you can "plugin load YourPlugin" to see it in action.
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
------------------------------------------------------------------------------
|
||||
Bespin Themes Directory
|
||||
------------------------------------------------------------------------------
|
||||
|
||||
This is where your custom themes go.
|
||||
|
||||
There are a few ways to create a new theme:
|
||||
|
||||
- Run "theme install http://path/to/theme" and it will copy it in
|
||||
- Manually create a file named YourTheme.js that has the correct values in it (copy another theme such as the "greenonblack.js" to make sure)
|
||||
|
||||
Now you can "set theme YourTheme" to see it in action.
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License
|
||||
* Version 1.1 (the "License"); you may not use this file except in
|
||||
* compliance with the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS"
|
||||
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing rights and
|
||||
* limitations under the License.
|
||||
*
|
||||
* The Original Code is Bespin.
|
||||
*
|
||||
* The Initial Developer of the Original Code is Mozilla.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2009
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Bespin Team (bespin@mozilla.com)
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
dojo.provide("bespin.themes.greenonblack");
|
||||
|
||||
// = Green on Black Theme =
|
||||
bespin.themes.greenonblack = {
|
||||
backgroundStyle: "#000000",
|
||||
gutterStyle: "#d2d2d2",
|
||||
lineNumberColor: "#888888",
|
||||
lineNumberFont: "10pt Monaco, Lucida Console, monospace",
|
||||
zebraStripeColor: "#000000", //"#111111",
|
||||
highlightCurrentLineColor: "#3a312b",
|
||||
editorTextFont: "10pt Monaco, Lucida Console, monospace",
|
||||
editorTextColor: "#2fe41f",
|
||||
editorSelectedTextColor: "rgb(240, 240, 240)",
|
||||
editorSelectedTextBackground: "#243b75",
|
||||
cursorStyle: "#879aff",
|
||||
cursorType: "ibeam", // one of "underline" or "ibeam"
|
||||
unfocusedCursorStrokeStyle: "#FF0033",
|
||||
unfocusedCursorFillStyle: "#73171E",
|
||||
partialNibStyle: "rgba(100, 100, 100, 0.3)",
|
||||
partialNibArrowStyle: "rgba(255, 255, 255, 0.3)",
|
||||
partialNibStrokeStyle: "rgba(150, 150, 150, 0.3)",
|
||||
fullNibStyle: "rgb(100, 100, 100)",
|
||||
fullNibArrowStyle: "rgb(255, 255, 255)",
|
||||
fullNibStrokeStyle: "rgb(150, 150, 150)",
|
||||
scrollTrackFillStyle: "rgba(50, 50, 50, 0.8)",
|
||||
scrollTrackStrokeStyle: "rgb(150, 150, 150)",
|
||||
scrollBarFillStyle: "rgba(0, 0, 0, %a)",
|
||||
scrollBarFillGradientTopStart: "rgba(90, 90, 90, %a)",
|
||||
scrollBarFillGradientTopStop: "rgba(40, 40, 40, %a)",
|
||||
scrollBarFillGradientBottomStart: "rgba(22, 22, 22, %a)",
|
||||
scrollBarFillGradientBottomStop: "rgba(44, 44, 44, %a)",
|
||||
tabSpace: "#E0D4CB",
|
||||
searchHighlight: "#B55C00",
|
||||
searchHighlightSelected: "#FF9A00",
|
||||
|
||||
// syntax definitions
|
||||
plain: "#bdae9d",
|
||||
preprocessor: "rgb(100,100,100)",
|
||||
keyword: "#42a8ed",
|
||||
string: "#039a0a",
|
||||
comment: "#666666",
|
||||
'c-comment': "#666666",
|
||||
punctuation: "#888888",
|
||||
attribute: "#BF9464",
|
||||
test: "rgb(255,0,0)",
|
||||
cdata: "#bdae9d",
|
||||
"attribute-value": "#BF9464",
|
||||
tag: "#bdae9d",
|
||||
"tag-name": "#bdae9d",
|
||||
value: "#BF9464",
|
||||
important: "#990000",
|
||||
cssclass: "#BF9464",
|
||||
cssid: "#bdae9d"
|
||||
|
||||
// Codemirror additions
|
||||
// TODO:
|
||||
};
|
||||
|
||||
// ** Black Zebra Theme **
|
||||
bespin.themes.greenonblackzebra = {};
|
||||
dojo.mixin(bespin.themes.greenonblackzebra, bespin.themes.greenonblack);
|
||||
bespin.themes.greenonblackzebra.zebraStripeColor = '#111111';
|
|
@ -0,0 +1,89 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import re
|
||||
import logging
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from bespin import config, jsontemplate
|
||||
|
||||
log = logging.getLogger("bespin.model")
|
||||
|
||||
class BadValue(Exception):
|
||||
pass
|
||||
|
||||
good_characters = r'\w-'
|
||||
good_pattern = re.compile(r'^\w[%s]*$' % good_characters, re.UNICODE)
|
||||
|
||||
def _check_identifiers(kind, value):
|
||||
if not config.c.restrict_identifiers:
|
||||
return
|
||||
if not good_pattern.match(value):
|
||||
log.error("Invalid identifier kind='%s', value='%s'" % (kind, value))
|
||||
raise BadValue("%s must only contain letters, numbers and dashes and must start with a letter or number." % (kind))
|
||||
|
||||
def send_text_email(to_addr, subject, text, from_addr=None):
|
||||
"""Send email to addresses given by to_addr (can be a string or a list),
|
||||
with the subject provided and the message text given.
|
||||
If not given, from_addr will be set to the configured email_from value.
|
||||
The message will be sent via the configured server at email_host, email_port.
|
||||
|
||||
If the configured email_host is None or "", no email will be sent.
|
||||
"""
|
||||
|
||||
if not config.c.email_host:
|
||||
return
|
||||
|
||||
if from_addr is None:
|
||||
from_addr = config.c.email_from
|
||||
|
||||
if isinstance(to_addr, basestring):
|
||||
to_addr = [to_addr]
|
||||
msg = MIMEText(text)
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = from_addr
|
||||
msg['To'] = ", ".join(to_addr)
|
||||
|
||||
s = smtplib.SMTP()
|
||||
s.connect(config.c.email_host, config.c.email_port)
|
||||
s.sendmail(from_addr, to_addr, msg.as_string())
|
||||
s.quit()
|
||||
|
||||
def send_email_template(to_addr, subject, template_name, context, from_addr=None):
|
||||
"""Send an email by applying context to the template in bespin/mailtemplates
|
||||
given by template_name and passing the resulting text to send_text_email."""
|
||||
template_filename = pkg_resources.resource_filename("bespin",
|
||||
"mailtemplates/%s" % template_name)
|
||||
template_file = open(template_filename)
|
||||
|
||||
try:
|
||||
template = jsontemplate.FromFile(template_file)
|
||||
finally:
|
||||
template_file.close()
|
||||
|
||||
text = template.expand(context)
|
||||
send_text_email(to_addr, subject, text, from_addr)
|
||||
|
|
@ -0,0 +1,556 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
# for the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
"""Manages the interaction with version control systems. UVC is responsible
|
||||
for handling the main interaction. This code manages the keychain which
|
||||
contains a user's credentials for the remote side of a VCS.
|
||||
|
||||
The PyCrypto code used to encrypt the keychain is based on the example
|
||||
from here:
|
||||
|
||||
http://www.codekoala.com/blog/2009/mar/16/aes-encryption-python-using-pycrypto/
|
||||
"""
|
||||
import os
|
||||
import tempfile
|
||||
import random
|
||||
from traceback import format_exc
|
||||
import logging
|
||||
from cStringIO import StringIO
|
||||
import time
|
||||
|
||||
from path import path
|
||||
import simplejson
|
||||
from uvc import main
|
||||
from uvc.main import is_new_project_command
|
||||
from uvc.commands import GetValueFromEditor, inserted_value
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
from bespin import config, queue, database, filesystem
|
||||
from bespin.database import User, Message
|
||||
from bespin.filesystem import FSException, NotAuthorized, get_project
|
||||
|
||||
log = logging.getLogger("bespin.vcs")
|
||||
|
||||
# remote repository requires authentication for read and write
|
||||
AUTH_BOTH = "both"
|
||||
|
||||
# remote repository requires authentication only for writing
|
||||
AUTH_WRITE = "write"
|
||||
|
||||
# project property used to save the authentication type for a project
|
||||
AUTH_PROPERTY = "remote_auth"
|
||||
|
||||
# the block size for the cipher object; must be 16, 24, or 32 for AES
|
||||
BLOCK_SIZE = 32
|
||||
|
||||
# the character used for padding--with a block cipher such as AES, the value
|
||||
# you encrypt must be a multiple of BLOCK_SIZE in length. This character is
|
||||
# used to ensure that your value is always a multiple of BLOCK_SIZE
|
||||
PADDING = '{'
|
||||
|
||||
# one-liner to sufficiently pad the text to be encrypted
|
||||
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
|
||||
|
||||
# one-liners to encrypt/encode and decrypt/decode a string
|
||||
# encrypt with AES, encode with base64
|
||||
EncodeAES = lambda c, s: c.encrypt(pad(s)).encode("base64")
|
||||
DecodeAES = lambda c, e: c.decrypt(e.decode("base64")).rstrip(PADDING)
|
||||
|
||||
def _get_vcs_user(user, project):
|
||||
metadata = project.metadata
|
||||
vcsuser = metadata.get("vcsuser")
|
||||
if vcsuser:
|
||||
return vcsuser
|
||||
|
||||
settings = user.get_settings()
|
||||
vcsuser = settings.get("vcsuser")
|
||||
if vcsuser:
|
||||
return vcsuser
|
||||
return user.username
|
||||
|
||||
def clone(user, source, dest=None, push=None, remoteauth="write",
|
||||
authtype=None, username=None, password=None, kcpass="",
|
||||
vcs="hg", vcsuser=None):
|
||||
"""Clones or checks out the repository using the command provided."""
|
||||
user = user.username
|
||||
job_body = dict(user=user, source=source, dest=dest, push=push,
|
||||
remoteauth=remoteauth,
|
||||
authtype=authtype, username=username, password=password,
|
||||
kcpass=kcpass, vcs=vcs, vcsuser=vcsuser)
|
||||
return queue.enqueue("vcs", job_body, execute="bespin.vcs:clone_run",
|
||||
error_handler="bespin.vcs:vcs_error",
|
||||
use_db=True)
|
||||
|
||||
def vcs_error(qi, e):
|
||||
"""Handles exceptions that come up during VCS operations.
|
||||
A message is added to the user's message queue."""
|
||||
log.debug("Handling VCS error: %s", e)
|
||||
s = database._get_session()
|
||||
user = qi.message['user']
|
||||
# if the user hadn't already been looked up, go ahead and pull
|
||||
# them out of the database
|
||||
if isinstance(user, basestring):
|
||||
user = User.find_user(user)
|
||||
else:
|
||||
s.add(user)
|
||||
|
||||
# if we didn't find the user in the database, there's not much
|
||||
# we can do.
|
||||
if user:
|
||||
if isinstance(e, (FSException, main.UVCError)):
|
||||
# for exceptions that are our types, just display the
|
||||
# error message
|
||||
tb = str(e)
|
||||
else:
|
||||
# otherwise, it looks like a programming error and we
|
||||
# want more information
|
||||
tb = format_exc()
|
||||
message = dict(jobid=qi.id, output=tb, error=True)
|
||||
message['asyncDone'] = True
|
||||
retval = Message(user_id=user.id, message=simplejson.dumps(message))
|
||||
s.add(retval)
|
||||
|
||||
def clone_run(qi):
|
||||
"""Runs the queued up clone job."""
|
||||
message = qi.message
|
||||
s = database._get_session()
|
||||
user = User.find_user(message['user'])
|
||||
message['user'] = user
|
||||
|
||||
# wrap the output in "output" in a new dictionary, because
|
||||
# the client will peel off one layer from this.
|
||||
result = dict(output=_clone_impl(qid=qi.id, **message))
|
||||
result.update(dict(jobid=qi.id, asyncDone=True))
|
||||
retvalue = Message(user_id=user.id, message=simplejson.dumps(result))
|
||||
s.add(retvalue)
|
||||
config.c.stats.incr('vcs_DATE')
|
||||
|
||||
class LineCounterOutput(object):
|
||||
def __init__(self, user, qid, output_file, label="files received"):
|
||||
self.user_id = user.id
|
||||
self.qid = qid
|
||||
self.next_time = 0
|
||||
self.line_count = 0
|
||||
self.label = label
|
||||
self.output_file = output_file
|
||||
|
||||
def write(self, data):
|
||||
self.output_file.write(data)
|
||||
|
||||
num_lines = data.count("\n")
|
||||
if num_lines:
|
||||
self.line_count += num_lines
|
||||
now = time.time()
|
||||
if now >= self.next_time:
|
||||
self.post_message()
|
||||
# 5 seconds per message
|
||||
self.next_time = now + 5
|
||||
|
||||
def post_message(self):
|
||||
s = database._get_session()
|
||||
message_body = dict(jobid=self.qid, asyncDone=False,
|
||||
output="%s %s" % (self.line_count, self.label))
|
||||
message = Message(user_id = self.user_id,
|
||||
message=simplejson.dumps(message_body))
|
||||
s.add(message)
|
||||
s.commit()
|
||||
|
||||
def _clone_impl(user, source, qid, dest=None, push=None, remoteauth="write",
|
||||
authtype=None, username=None, password=None, kcpass="",
|
||||
vcs="hg", vcsuser=None):
|
||||
working_dir = user.get_location()
|
||||
|
||||
args = ["clone", source]
|
||||
if dest:
|
||||
args.append(dest)
|
||||
auth = {}
|
||||
if username:
|
||||
auth['username'] = username
|
||||
if password:
|
||||
auth['password'] = password
|
||||
|
||||
keychain = KeyChain(user, kcpass)
|
||||
keyfile = None
|
||||
|
||||
if vcs:
|
||||
dialect = main.get_dialect(vcs)
|
||||
else:
|
||||
dialect = None
|
||||
|
||||
if authtype:
|
||||
auth['type'] = authtype
|
||||
if authtype == "ssh":
|
||||
public_key, private_key = keychain.get_ssh_key()
|
||||
keyfile = TempSSHKeyFile()
|
||||
keyfile.store(public_key, private_key)
|
||||
auth['key'] = keyfile.filename
|
||||
|
||||
try:
|
||||
output_file = StringIO()
|
||||
output = LineCounterOutput(user, qid, output_file)
|
||||
context = main.SecureContext(working_dir, auth,
|
||||
timeout=config.c.vcs_timeout,
|
||||
output=output)
|
||||
command = main.convert(context, args, dialect)
|
||||
|
||||
main.run_command(command, context)
|
||||
log.debug(output)
|
||||
finally:
|
||||
if keyfile:
|
||||
keyfile.delete()
|
||||
|
||||
if output.return_code:
|
||||
return dict(success=False,
|
||||
output=output_file.getvalue(),
|
||||
command="clone")
|
||||
|
||||
project = filesystem.get_project(user, user, command.dest)
|
||||
|
||||
if authtype == "ssh":
|
||||
keychain.set_ssh_for_project(project, remoteauth)
|
||||
elif authtype == "password":
|
||||
keychain.set_credentials_for_project(project, remoteauth, username,
|
||||
password)
|
||||
|
||||
metadata = project.metadata
|
||||
metadata['remote_url'] = source
|
||||
|
||||
if push:
|
||||
metadata['push'] = push
|
||||
|
||||
if vcsuser:
|
||||
metadata['vcsuser'] = vcsuser
|
||||
|
||||
space_used = project.scan_files()
|
||||
user.amount_used += space_used
|
||||
|
||||
metadata.close()
|
||||
|
||||
result = dict(success=True,
|
||||
output=output_file.getvalue(),
|
||||
command="clone", project=command.dest)
|
||||
return result
|
||||
|
||||
def run_command(user, project, args, kcpass=None):
|
||||
"""Run any VCS command through UVC."""
|
||||
user = user.username
|
||||
project = project.name
|
||||
job_body = dict(user=user, project=project, args=args, kcpass=kcpass)
|
||||
return queue.enqueue("vcs", job_body, execute="bespin.vcs:run_command_run",
|
||||
error_handler="bespin.vcs:vcs_error",
|
||||
use_db=True)
|
||||
|
||||
def run_command_run(qi):
|
||||
"""Runs the queued up run_command job."""
|
||||
message = qi.message
|
||||
s = database._get_session()
|
||||
user = User.find_user(message['user'])
|
||||
message['user'] = user
|
||||
message['project'] = get_project(user, user, message['project'])
|
||||
|
||||
# wrap the output in "output" in a new dictionary, because
|
||||
# the client will peel off one layer from this.
|
||||
result = dict(output=_run_command_impl(**message))
|
||||
|
||||
result.update(dict(jobid=qi.id, asyncDone=True))
|
||||
retvalue = Message(user_id=user.id, message=simplejson.dumps(result))
|
||||
s.add(retvalue)
|
||||
config.c.stats.incr('vcs_DATE')
|
||||
|
||||
def _run_command_impl(user, project, args, kcpass):
|
||||
"""Synchronous implementation of run_command."""
|
||||
working_dir = project.location
|
||||
metadata = project.metadata
|
||||
|
||||
try:
|
||||
for i in range(0, len(args)):
|
||||
if args[i] == "_BESPIN_REMOTE_URL":
|
||||
try:
|
||||
args[i] = metadata["remote_url"].encode("utf8")
|
||||
except KeyError:
|
||||
del args[i]
|
||||
break
|
||||
elif args[i] == "_BESPIN_PUSH":
|
||||
try:
|
||||
args[i] = metadata["push"].encode("utf8")
|
||||
except KeyError:
|
||||
del args[i]
|
||||
break
|
||||
|
||||
output_file = StringIO()
|
||||
output = main.IOWrapper(output_file)
|
||||
context = main.SecureContext(working_dir,
|
||||
timeout=config.c.vcs_timeout,
|
||||
output=output)
|
||||
context.user = _get_vcs_user(user, project)
|
||||
|
||||
if args and args[0] in main.dialects:
|
||||
dialect = None
|
||||
elif not is_new_project_command(args):
|
||||
dialect = main.infer_dialect(working_dir)
|
||||
else:
|
||||
dialect = None
|
||||
|
||||
command_class = main.get_command_class(context, args, dialect)
|
||||
command_name = command_class.__name__
|
||||
|
||||
keyfile = None
|
||||
|
||||
if command_class.reads_remote or command_class.writes_remote:
|
||||
remote_auth = metadata.get(AUTH_PROPERTY)
|
||||
if command_class.writes_remote or remote_auth == AUTH_BOTH:
|
||||
if not kcpass:
|
||||
raise NotAuthorized("Keychain password is required for this command.")
|
||||
keychain = KeyChain(user, kcpass)
|
||||
credentials = keychain.get_credentials_for_project(project)
|
||||
if credentials['type'] == 'ssh':
|
||||
keyfile = TempSSHKeyFile()
|
||||
keyfile.store(credentials['ssh_public_key'],
|
||||
credentials['ssh_private_key'])
|
||||
auth = dict(type='ssh', key=keyfile.filename)
|
||||
else:
|
||||
auth = credentials
|
||||
context.auth = auth
|
||||
|
||||
try:
|
||||
try:
|
||||
command = command_class.from_args(context, args)
|
||||
except GetValueFromEditor, e:
|
||||
new_args = e.template_args
|
||||
for i in range(0, len(new_args)):
|
||||
if new_args[i] == inserted_value:
|
||||
new_args[i] = "VALUE-GOES_HERE"
|
||||
return dict(success=False,
|
||||
needsInput=True, args=new_args,
|
||||
prompt = e.prompt)
|
||||
|
||||
main.run_command(command, context)
|
||||
finally:
|
||||
if keyfile:
|
||||
keyfile.delete()
|
||||
|
||||
if output.return_code:
|
||||
return dict(command=command_name, success=False,
|
||||
output=output_file.getvalue())
|
||||
finally:
|
||||
metadata.close()
|
||||
|
||||
result = dict(command=command_name,
|
||||
output=output_file.getvalue(),
|
||||
success=True)
|
||||
return result
|
||||
|
||||
class TempSSHKeyFile(object):
|
||||
def __init__(self):
|
||||
self.tdir = path(tempfile.mkdtemp())
|
||||
self.filename = self.tdir / str(random.randint(10, 20000000))
|
||||
|
||||
def create_key(self):
|
||||
destfile = self.filename
|
||||
os.system("ssh-keygen -N '' -f %s > /dev/null" % (destfile))
|
||||
private_key = destfile.bytes()
|
||||
pubkeyfile = destfile + ".pub"
|
||||
pubkey = pubkeyfile.bytes()
|
||||
return pubkey, private_key
|
||||
|
||||
def store(self, public_key, private_key):
|
||||
destfile = self.filename
|
||||
destfile.write_bytes(private_key)
|
||||
pubkeyfile = destfile + ".pub"
|
||||
pubkeyfile.write_bytes(public_key)
|
||||
self.fix_permissions()
|
||||
|
||||
def fix_permissions(self):
|
||||
destfile = self.filename
|
||||
destfile.chmod(0600)
|
||||
destfile = destfile + ".pub"
|
||||
destfile.chmod(0600)
|
||||
|
||||
def delete(self):
|
||||
self.tdir.rmtree()
|
||||
|
||||
class KeyChain(object):
|
||||
"""The KeyChain holds the user's credentials for remote
|
||||
repositories. These credentials are stored in an encrypted
|
||||
file (the file is encrypted with the password provided)."""
|
||||
|
||||
def __init__(self, user, password):
|
||||
self.user = user
|
||||
self.password = pad(password[:31])
|
||||
self._kcdata = None
|
||||
|
||||
@classmethod
|
||||
def get_ssh_public_key(cls, user):
|
||||
"""Retrieve the user's public key without decrypting
|
||||
the keychain."""
|
||||
# external API users should not instantiate without a KeyChain password
|
||||
kc = cls(user, "")
|
||||
pubfile = kc.public_key_file
|
||||
if not pubfile.exists():
|
||||
raise NotAuthorized("Keychain is not set up. Please initialize with a password.")
|
||||
return pubfile.bytes()
|
||||
|
||||
def get_ssh_key(self):
|
||||
"""Returns the SSH key pair for this key chain. If necessary,
|
||||
this function will generate a new key pair."""
|
||||
kcdata = self.kcdata
|
||||
if "ssh" in kcdata:
|
||||
return kcdata['ssh']['public'], kcdata['ssh']['private']
|
||||
|
||||
sshkeyfile = TempSSHKeyFile()
|
||||
try:
|
||||
pubkey, private_key = sshkeyfile.create_key()
|
||||
finally:
|
||||
sshkeyfile.delete()
|
||||
|
||||
kcdata['ssh'] = dict(public=pubkey, private=private_key)
|
||||
self.public_key_file.write_bytes(pubkey)
|
||||
self._save()
|
||||
return pubkey, private_key
|
||||
|
||||
def set_ssh_key(self, private_key, public_key):
|
||||
"""Sets the SSH key. This key should be a passwordless key."""
|
||||
kcdata = self.kcdata
|
||||
kcdata['ssh'] = dict(public=public_key, private=private_key)
|
||||
self.public_key_file.write_bytes(public_key)
|
||||
self._save()
|
||||
|
||||
@property
|
||||
def kcdata(self):
|
||||
"""Return the data object representing the keychain data."""
|
||||
if self._kcdata is None:
|
||||
kcfile = self.kcfile
|
||||
if not kcfile.exists():
|
||||
self._kcdata = {}
|
||||
else:
|
||||
text = kcfile.bytes()
|
||||
|
||||
# create a cipher object using the random secret
|
||||
cipher = AES.new(self.password)
|
||||
text = DecodeAES(cipher, text)
|
||||
|
||||
if not text.startswith("{"):
|
||||
raise NotAuthorized("Bad keychain password")
|
||||
|
||||
self._kcdata = simplejson.loads(text)
|
||||
return self._kcdata
|
||||
|
||||
@property
|
||||
def public_key_file(self):
|
||||
"""Get the public key filename"""
|
||||
return self.kcfile + "-public"
|
||||
|
||||
@property
|
||||
def kcfile(self):
|
||||
"""Return path object pointing to the keychain file on disk"""
|
||||
return path(self.user.get_location()) / ".bespin-keychain"
|
||||
|
||||
def _save(self):
|
||||
"""Saves the new state of the keychain to the keychain file."""
|
||||
# the keychain data has not even been loaded, so we can move on.
|
||||
if self._kcdata is None:
|
||||
return
|
||||
newdata = simplejson.dumps(self.kcdata)
|
||||
|
||||
# create a cipher object using the random secret
|
||||
cipher = AES.new(self.password)
|
||||
newdata = EncodeAES(cipher, newdata)
|
||||
|
||||
self.kcfile.write_bytes(newdata)
|
||||
|
||||
def set_ssh_for_project(self, project, remote_auth):
|
||||
"""Stores that the SSH key in this keychain
|
||||
should be used as the credentials for the project
|
||||
given. If there is no SSH key, one will be
|
||||
generated. The SSH public key will be
|
||||
returned. remote_auth should be one of vcs.AUTH_BOTH
|
||||
when authentication is required for read and write
|
||||
and vcs.AUTH_WRITE when authentication is only
|
||||
required for writing to the remote repository."""
|
||||
kcdata = self.kcdata
|
||||
pubkey = self.get_ssh_key()
|
||||
projects = kcdata.setdefault("projects", {})
|
||||
projects[project.full_name] = dict(type="ssh")
|
||||
|
||||
self._save()
|
||||
metadata = project.metadata
|
||||
metadata[AUTH_PROPERTY] = remote_auth
|
||||
metadata.close()
|
||||
return pubkey
|
||||
|
||||
def set_credentials_for_project(self, project, remote_auth, username,
|
||||
password):
|
||||
"""Sets up username/password authentication for the
|
||||
given project."""
|
||||
kcdata = self.kcdata
|
||||
projects = kcdata.setdefault("projects", {})
|
||||
projects[project.full_name] = dict(type="password",
|
||||
username=username, password=password)
|
||||
|
||||
self._save()
|
||||
metadata = project.metadata
|
||||
metadata[AUTH_PROPERTY] = remote_auth
|
||||
metadata.close()
|
||||
|
||||
def get_credentials_for_project(self, project):
|
||||
"""Returns a dictionary with the user's information for
|
||||
the given project. The dictionary will have 'type'
|
||||
with values 'ssh', or 'password'. If the type is ssh,
|
||||
there will be an ssh_key entry. If the type is password,
|
||||
there will be username and password entries. If there
|
||||
are no credentials stored for the given project,
|
||||
None is returned."""
|
||||
kcdata = self.kcdata
|
||||
projects = kcdata.setdefault("projects", {})
|
||||
|
||||
value = projects.get(project.full_name)
|
||||
|
||||
if value is not None:
|
||||
# we're going to make a copy of the data so that it
|
||||
# doesn't get mutated against our wishes
|
||||
value = dict(value)
|
||||
|
||||
# for SSH, we need to change the SSH key name into the key itself.
|
||||
if value['type'] == "ssh":
|
||||
value['ssh_private_key'] = kcdata['ssh']['private']
|
||||
value['ssh_public_key'] = kcdata['ssh']['public']
|
||||
|
||||
return value
|
||||
|
||||
def delete_credentials_for_project(self, project):
|
||||
"""Forget the authentication information provided
|
||||
for the given project. Note that this will not
|
||||
remove any SSH keys used by the project."""
|
||||
kcdata = self.kcdata
|
||||
projects = kcdata.setdefault("projects", {})
|
||||
try:
|
||||
del projects[project.full_name]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
self._save()
|
||||
metadata = project.metadata
|
||||
del metadata[AUTH_PROPERTY]
|
||||
metadata.close()
|
||||
|
||||
|
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
|
@ -0,0 +1,176 @@
|
|||
# ***** BEGIN LICENSE BLOCK *****
|
||||
# Version: MPL 1.1
|
||||
#
|
||||
# The contents of this file are subject to the Mozilla Public License
|
||||
# Version
|
||||
# 1.1 (the "License"); you may not use this file except in compliance
|
||||
# with
|
||||
# the License. You may obtain a copy of the License at
|
||||
# http://www.mozilla.org/MPL/
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS"
|
||||
# basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
|
||||
# License
|
||||
# for the specific language governing rights and limitations under the
|
||||
# License.
|
||||
#
|
||||
# The Original Code is Bespin.
|
||||
#
|
||||
# The Initial Developer of the Original Code is Mozilla.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2009
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
#
|
||||
# ***** END LICENSE BLOCK *****
|
||||
#
|
||||
|
||||
import re
|
||||
import os
|
||||
|
||||
from setuptools import find_packages
|
||||
from paver.setuputils import find_package_data
|
||||
|
||||
from paver.easy import *
|
||||
from paver import setuputils
|
||||
setuputils.install_distutils_tasks()
|
||||
|
||||
|
||||
execfile(os.path.join('bespin', '__init__.py'))
|
||||
|
||||
options(
|
||||
setup=Bunch(
|
||||
name="BespinServer",
|
||||
version=VERSION,
|
||||
packages=find_packages(),
|
||||
package_data=find_package_data('bespin', 'bespin',
|
||||
only_in_packages=False),
|
||||
entry_points="""
|
||||
[console_scripts]
|
||||
bespin_worker=bespin.queue:process_queue
|
||||
queue_stats=bespin.queuewatch:command
|
||||
telnet_mobwrite=bespin.mobwrite.mobwrite_daemon:process_mobwrite
|
||||
bespin_mobwrite=bespin.mobwrite.mobwrite_web:start_server
|
||||
"""
|
||||
),
|
||||
server=Bunch(
|
||||
# set to true to allow connections from other machines
|
||||
address="",
|
||||
port=8080,
|
||||
try_build=False,
|
||||
dburl=None,
|
||||
async=False,
|
||||
config_file=path("devconfig.py")
|
||||
)
|
||||
)
|
||||
|
||||
@task
|
||||
@needs(['setuptools.command.develop'])
|
||||
def develop():
|
||||
sh("easy_install ext/pip-0.4.1.tar.gz")
|
||||
sh("pip install -r requirements.txt")
|
||||
|
||||
@task
|
||||
def start():
|
||||
"""Starts the BespinServer on localhost port 8080 for development.
|
||||
|
||||
You can change the port and allow remote connections by setting
|
||||
server.port or server.address on the command line.
|
||||
|
||||
paver server.address=your.ip.address server.port=8000 start
|
||||
|
||||
will allow remote connections (assuming you don't have a firewall
|
||||
blocking the connection) and start the server on port 8000.
|
||||
"""
|
||||
from bespin import config, controllers
|
||||
from paste.httpserver import serve
|
||||
|
||||
options.order('server')
|
||||
|
||||
config.set_profile('dev')
|
||||
|
||||
if options.server.try_build:
|
||||
config.c.static_dir = (options.build_dir / "frontend").abspath()
|
||||
|
||||
if options.server.dburl:
|
||||
config.c.dburl = options.server.dburl
|
||||
|
||||
if options.server.async:
|
||||
config.c.async_jobs = True
|
||||
|
||||
config_file = options.server.config_file
|
||||
if config_file.exists():
|
||||
info("Loading config: %s", config_file)
|
||||
code = compile(config_file.bytes(), config_file, "exec")
|
||||
exec code in {}
|
||||
|
||||
config.activate_profile()
|
||||
port = int(options.port)
|
||||
serve(controllers.make_app(), options.address, port, use_threadpool=True)
|
||||
|
||||
|
||||
@task
|
||||
@needs(['sdist'])
|
||||
def production():
|
||||
"""Gets things ready for production."""
|
||||
non_production_packages = set(["py", "WebTest", "boto", "virtualenv",
|
||||
"Paver", "BespinServer", "nose",
|
||||
"path", "httplib2",
|
||||
"MySQL-python"])
|
||||
production = path("production")
|
||||
production_requirements = production / "requirements.txt"
|
||||
|
||||
libs_dest = production / "libs"
|
||||
libs_dest.rmtree()
|
||||
libs_dest.mkdir()
|
||||
|
||||
sdist_file = path("dist/BespinServer-%s.tar.gz" % options.version)
|
||||
sdist_file.move(libs_dest)
|
||||
|
||||
ext_dir = path("ext")
|
||||
external_libs = []
|
||||
for f in ext_dir.glob("*"):
|
||||
f.copy(libs_dest)
|
||||
name = f.basename()
|
||||
name = name[:name.index("-")]
|
||||
non_production_packages.add(name)
|
||||
external_libs.append("libs/%s" % (f.basename()))
|
||||
|
||||
lines = production_requirements.lines() if production_requirements.exists() else []
|
||||
|
||||
requirement_pattern = re.compile(r'^(.*)==')
|
||||
|
||||
i = 0
|
||||
found_packages = set()
|
||||
while i < len(lines):
|
||||
rmatch = requirement_pattern.match(lines[i])
|
||||
if rmatch:
|
||||
name = rmatch.group(1)
|
||||
found_packages.add(name)
|
||||
deleted = False
|
||||
for npp in non_production_packages:
|
||||
if name == npp or "BespinServer-tip" in npp:
|
||||
del lines[i]
|
||||
deleted = True
|
||||
break
|
||||
if deleted:
|
||||
continue
|
||||
i+=1
|
||||
|
||||
lines.append("libs/BespinServer-%s.tar.gz" % options.version)
|
||||
|
||||
# path.py doesn't install properly via pip/easy_install
|
||||
lines.append("http://pypi.python.org/packages/source/p/path.py/"
|
||||
"path-2.2.zip#md5=941660081788282887f652510d80e64e")
|
||||
|
||||
lines.append("http://httplib2.googlecode.com/files/httplib2-0.4.0.tar.gz")
|
||||
|
||||
lines.append("http://pypi.python.org/packages/source/"
|
||||
"M/MySQL-python/MySQL-python-1.2.3c1.tar.gz")
|
||||
|
||||
lines.extend(external_libs)
|
||||
production_requirements.write_lines(lines)
|
||||
|
||||
call_pavement("production/pavement.py", "bootstrap")
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
import os
|
||||
|
||||
from paver.easy import *
|
||||
|
||||
import paver.virtual
|
||||
|
||||
options(
|
||||
virtualenv=Bunch(
|
||||
packages_to_install=['pip'],
|
||||
paver_command_line="setup"
|
||||
),
|
||||
db=Bunch(
|
||||
wsgiscript=lambda: os.path.abspath("../wsgi-apps/bespin.wsgi")
|
||||
)
|
||||
)
|
||||
|
||||
def _clean_up_extra_egg_info(d):
|
||||
"""pip freeze has a problem with the extra egg-info directories
|
||||
that setuptools leaves lying around when you install using
|
||||
--single-version-externally-managed (which is what pip does).
|
||||
|
||||
This does the simplest thing possible to fix this, which is to
|
||||
get rid of all but the last egg-info directory.
|
||||
|
||||
The directory passed in should be the "lib" directory" which
|
||||
is above the pythonX.X/site-packages directory. This function
|
||||
will figure out which version of python is in use."""
|
||||
d = path(d)
|
||||
vi = sys.version_info
|
||||
d = d / ("python%s.%s" % (vi[0], vi[1])) / "site-packages"
|
||||
lastname = None
|
||||
lastf = None
|
||||
for f in d.glob("*.egg-info"):
|
||||
fullname = f.basename()
|
||||
pkgname = fullname.split("-", 1)[0]
|
||||
# check for a match against the previous
|
||||
if lastname == pkgname:
|
||||
# we have a match, so delete the previous which is
|
||||
# in theory an older version
|
||||
info("Deleting old egg-info: %s", lastf)
|
||||
lastf.rmtree()
|
||||
lastf = f
|
||||
lastname = pkgname
|
||||
|
||||
@task
|
||||
def setup():
|
||||
"""Get this production environment setup."""
|
||||
downloads = path("downloads")
|
||||
downloads.mkdir()
|
||||
os.environ["PIP_DOWNLOAD_CACHE"] = downloads.abspath()
|
||||
sh("bin/pip install -U -r requirements.txt")
|
||||
_clean_up_extra_egg_info("lib")
|
||||
print "Don't forget to run the database upgrade! (paver db)"
|
||||
|
||||
@task
|
||||
def db(options):
|
||||
"""Perform a database upgrade, if necessary.
|
||||
|
||||
Your WSGI script is loaded in order to properly get the configuration
|
||||
set up. By default, the WSGI script is ../wsgi-apps/bespin.wsgi.
|
||||
You can override this on the command line like so:
|
||||
|
||||
paver db.wsgiscript=/path/to/script.wsgi db
|
||||
"""
|
||||
from bespin import config, model, db_versions
|
||||
from migrate.versioning.shell import main
|
||||
|
||||
execfile(options.wsgiscript, {'__file__' : options.wsgiscript})
|
||||
|
||||
repository = str(path(db_versions.__file__).dirname())
|
||||
dburl = config.c.dburl
|
||||
dry("Run the database upgrade", main, ["upgrade", dburl, repository])
|
||||
|
||||
# touch the wsgi app so that mod_wsgi sees that we've updated
|
||||
sh("touch %s" % options.wsgiscript)
|
||||
|
||||
@task
|
||||
def create_db():
|
||||
"""Creates the production database.
|
||||
|
||||
Your WSGI script is loaded in order to properly get the configuration
|
||||
set up. By default, the WSGI script is ../wsgi-apps/bespin.wsgi.
|
||||
You can override this on the command line like so:
|
||||
|
||||
paver db.wsgiscript=/path/to/script.wsgi create_db
|
||||
"""
|
||||
from bespin import config, database, db_versions
|
||||
from migrate.versioning.shell import main
|
||||
|
||||
script = options.db.wsgiscript
|
||||
|
||||
execfile(script, {'__file__' : script})
|
||||
|
||||
dry("Create database tables", database.Base.metadata.create_all, bind=config.c.dbengine)
|
||||
|
||||
repository = str(path(db_versions.__file__).dirname())
|
||||
dburl = config.c.dburl
|
||||
dry("Turn on migrate versioning", main, ["version_control", dburl, repository])
|
|
@ -0,0 +1,16 @@
|
|||
urlrelay>0.7
|
||||
WebOb
|
||||
WebTest
|
||||
nose
|
||||
http://pypi.python.org/packages/source/s/simplejson/simplejson-2.0.9.tar.gz#egg=simplejson
|
||||
SQLAlchemy
|
||||
sqlalchemy-migrate>0.5.1
|
||||
ext/Paste-1.7.3dev-r7791.tar.gz
|
||||
static
|
||||
ext/virtualenv-1.3.4dev.tar.gz
|
||||
http://httplib2.googlecode.com/files/httplib2-0.4.0.tar.gz
|
||||
http://pypi.python.org/packages/source/p/path.py/path-2.2.zip#md5=941660081788282887f652510d80e64e
|
||||
PyCrypto
|
||||
uvc>=0.4.4
|
||||
argparse
|
||||
ext/omnisync-0.1a0.tar.gz
|
|
@ -0,0 +1,10 @@
|
|||
# Don't run this. This is just for pip.
|
||||
|
||||
from paver import tasks
|
||||
tasks.environment = tasks.Environment()
|
||||
import pavement
|
||||
from setuptools import setup
|
||||
|
||||
kw = pavement.options.setup
|
||||
|
||||
setup(**kw)
|
Загрузка…
Ссылка в новой задаче