зеркало из https://github.com/mozilla/labs-vcap.git
Merge branch 'master' into router_v2
Conflicts: router/Gemfile.lock router/Rakefile router/spec/Rakefile router/spec/functional/router_spec.rb router/spec/functional/spec_helper.rb Change-Id: I5c91bf1bab78855b05350948dfdfd4e13557432e
This commit is contained in:
Коммит
aed8fafea2
33
README.md
33
README.md
|
@ -62,19 +62,9 @@ To install ssh:
|
|||
sudo apt-get install openssh-server
|
||||
|
||||
#### Step 2: run the automated setup process
|
||||
Run the install script. It'll ask for your sudo password at the
|
||||
beginning and towards the end. The entire process takes ~1 hour, so just
|
||||
keep a loose eye on it.
|
||||
|
||||
sudo apt-get install curl
|
||||
bash < <(curl -s -k -B https://raw.github.com/cloudfoundry/vcap/master/setup/install)
|
||||
|
||||
_**Experimental**_ Or, instead of the above steps, try the new Chef based
|
||||
installation. Note: the Chef based setup will eventually replace the
|
||||
vcap_setup based install above. The Chef recipies don't cover all frameworks
|
||||
and runtimes supported by the traditional installation method, e.g. Erlang
|
||||
isn't supported yet, but it will soon. At that point, the vcap_setup based
|
||||
installer will be removed from the source tree.
|
||||
Run the install script. It'll ask for your sudo password at the beginning and
|
||||
towards the end. The entire process takes about half an hour, so just keep a
|
||||
loose eye on it.
|
||||
|
||||
sudo apt-get install curl
|
||||
bash < <(curl -s -k -B https://raw.github.com/cloudfoundry/vcap/master/dev_setup/bin/vcap_dev_setup)
|
||||
|
@ -85,14 +75,7 @@ the following steps
|
|||
|
||||
#### Step 3: start the system
|
||||
|
||||
cd ~/cloudfoundry/vcap
|
||||
bin/vcap start
|
||||
bin/vcap tail # see aggregate logs
|
||||
|
||||
_**Experimental**_ if you used the new Chef based installer in step 2:
|
||||
|
||||
cd ~/cloudfoundry/vcap
|
||||
dev_setup/bin/vcap_dev start
|
||||
~/cloudfoundry/vcap/dev_setup/bin/vcap_dev start
|
||||
|
||||
#### Step 4: *Optional, mac/linux users only*, create a local ssh tunnel
|
||||
|
||||
|
@ -144,7 +127,13 @@ Testing your setup
|
|||
------------------
|
||||
|
||||
Once the system is installed, you can run the following command Basic System
|
||||
Validation Tests (BVT) to ensure that major functionality is working.
|
||||
Validation Tests (BVT) to ensure that major functionality is working. BVTs
|
||||
require additional dependencies of Maven and the JDK, which can be installed
|
||||
with:
|
||||
|
||||
sudo apt-get install default-jdk maven2
|
||||
|
||||
Now that you have the necessary dependencies, you can run the BVTs:
|
||||
|
||||
cd cloudfoundry/vcap
|
||||
cd tests && bundle package; bundle install && cd ..
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit ff16a04c7e0ca56cbfcdbf0f385f0127dee81f60
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env ruby
|
||||
# Copyright (c) 2009-2011 VMware, Inc.
|
||||
|
||||
exec(File.expand_path("../../acm/bin/acm", __FILE__), *ARGV)
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env ruby
|
||||
# Copyright (c) 2009-2011 VMware, Inc.
|
||||
|
||||
exec(File.expand_path("../../package_cache/bin/package_cache", __FILE__), *ARGV)
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env ruby
|
||||
# Copyright (c) 2009-2011 VMware, Inc.
|
||||
|
||||
exec(File.expand_path("../../../services/filesystem/bin/filesystem_gateway", __FILE__), *ARGV)
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
exec(File.expand_path("../../../services/mongodb/bin/mongodb_worker", __FILE__), *ARGV)
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
exec(File.expand_path("../../../services/mysql/bin/mysql_worker", __FILE__), *ARGV)
|
|
@ -1,6 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
ENV['BUNDLE_GEMFILE'] = File.dirname(__FILE__) + '/../../services/rabbit/Gemfile'
|
||||
$LOAD_PATH.unshift(File.dirname(__FILE__) + '/../../services/rabbit')
|
||||
|
||||
require 'rabbit_service'
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
exec(File.expand_path("../../../services/redis/bin/redis_worker", __FILE__), *ARGV)
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
exec(File.expand_path("../../../services/serialization_data_server/bin/serialization_data_server", __FILE__), *ARGV)
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env ruby
|
||||
# Copyright (c) 2009-2012 VMware, Inc.
|
||||
|
||||
exec(File.expand_path("../../../services/vblob/bin/vblob_gateway", __FILE__), *ARGV)
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env ruby
|
||||
# Copyright (c) 2009-2012 VMware, Inc.
|
||||
|
||||
exec(File.expand_path("../../../services/vblob/bin/vblob_node", __FILE__), *ARGV)
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env ruby
|
||||
# Copyright (c) 2009-2011 VMware, Inc.
|
||||
|
||||
exec(File.expand_path("../../uaa/bin/uaa", __FILE__), *ARGV)
|
7
bin/vcap
7
bin/vcap
|
@ -9,7 +9,11 @@
|
|||
# - router
|
||||
# - cloud_controller
|
||||
# - health_manager
|
||||
# - stager
|
||||
# - package_cache
|
||||
# - dea
|
||||
# - uaa
|
||||
# - acm
|
||||
#
|
||||
# services
|
||||
# - redis
|
||||
|
@ -435,7 +439,7 @@ module Run
|
|||
private
|
||||
|
||||
def self.core
|
||||
%w(router cloud_controller dea health_manager stager)
|
||||
%w(router cloud_controller dea health_manager stager uaa acm package_cache)
|
||||
end
|
||||
|
||||
def self.services
|
||||
|
@ -596,6 +600,7 @@ opts_parser = OptionParser.new do |opts|
|
|||
opts.on('--noprompt', '-n') { $noprompt = true }
|
||||
|
||||
end
|
||||
$configdir ||= ENV['CLOUD_FOUNDRY_CONFIG_PATH']
|
||||
args = opts_parser.parse!(args)
|
||||
|
||||
$nocolor = true unless STDOUT.tty?
|
||||
|
|
|
@ -5,16 +5,15 @@ gem 'rails', '~> 3.0.5'
|
|||
# Message bus
|
||||
gem 'nats', :require => 'nats/client'
|
||||
|
||||
gem 'logging', '>= 1.5.0'
|
||||
|
||||
# VCAP common components
|
||||
gem 'vcap_common', :require => ['vcap/common', 'vcap/component']
|
||||
gem 'vcap_logging', :require => ['vcap/logging']
|
||||
gem 'vcap_staging'
|
||||
gem 'vcap_staging', '~> 0.1.50'
|
||||
gem 'cf-uaa-client', '>= 0.0.6'
|
||||
|
||||
# For queuing staging tasks
|
||||
gem 'em-hiredis'
|
||||
gem 'vcap_stager', '~> 0.1.8'
|
||||
gem 'vcap_stager', '~> 0.1.14'
|
||||
|
||||
# Databases
|
||||
gem 'sqlite3'
|
||||
|
|
|
@ -33,10 +33,15 @@ GEM
|
|||
arel (2.0.9)
|
||||
bcrypt-ruby (2.1.4)
|
||||
builder (2.1.2)
|
||||
cf-uaa-client (0.0.6)
|
||||
em-http-request (= 1.0.0.beta.3)
|
||||
eventmachine
|
||||
json_pure
|
||||
rest-client
|
||||
chronic (0.6.4)
|
||||
ci_reporter (1.6.4)
|
||||
builder (>= 2.1.2)
|
||||
daemons (1.1.5)
|
||||
daemons (1.1.8)
|
||||
delorean (1.1.0)
|
||||
chronic
|
||||
diff-lcs (1.1.3)
|
||||
|
@ -57,10 +62,7 @@ GEM
|
|||
hiredis (0.3.2)
|
||||
http_parser.rb (0.5.1)
|
||||
i18n (0.5.0)
|
||||
json_pure (1.6.4)
|
||||
little-plugger (1.1.3)
|
||||
logging (1.6.1)
|
||||
little-plugger (>= 1.1.2)
|
||||
json_pure (1.6.5)
|
||||
mail (2.2.15)
|
||||
activesupport (>= 2.3.6)
|
||||
i18n (>= 0.4.0)
|
||||
|
@ -69,7 +71,7 @@ GEM
|
|||
mime-types (1.16)
|
||||
mocha (0.9.12)
|
||||
mysql2 (0.2.7)
|
||||
nats (0.4.22.beta.4)
|
||||
nats (0.4.22)
|
||||
daemons (>= 1.1.4)
|
||||
eventmachine (>= 0.12.10)
|
||||
json_pure (>= 1.6.1)
|
||||
|
@ -129,20 +131,20 @@ GEM
|
|||
polyglot (>= 0.3.1)
|
||||
tzinfo (0.3.26)
|
||||
uuidtools (2.1.2)
|
||||
vcap_common (1.0.3)
|
||||
vcap_common (1.0.10)
|
||||
eventmachine (~> 0.12.11.cloudfoundry.3)
|
||||
logging (>= 1.5.0)
|
||||
nats (~> 0.4.22.beta.4)
|
||||
nats (~> 0.4.22.beta.8)
|
||||
posix-spawn (~> 0.3.6)
|
||||
thin (~> 1.3.1)
|
||||
yajl-ruby (~> 0.8.3)
|
||||
vcap_logging (0.1.3)
|
||||
vcap_stager (0.1.8)
|
||||
vcap_staging (0.1.32)
|
||||
vcap_logging (1.0.0)
|
||||
vcap_stager (0.1.14)
|
||||
vcap_staging (0.1.50)
|
||||
nokogiri (>= 1.4.4)
|
||||
rake
|
||||
rspec
|
||||
vcap_common
|
||||
uuidtools (~> 2.1.2)
|
||||
vcap_common (~> 1.0.8)
|
||||
yajl-ruby (>= 0.7.9)
|
||||
yajl-ruby (0.8.3)
|
||||
|
||||
|
@ -152,13 +154,13 @@ PLATFORMS
|
|||
DEPENDENCIES
|
||||
SystemTimer (~> 1.2)
|
||||
bcrypt-ruby (~> 2.1.4)
|
||||
cf-uaa-client (>= 0.0.6)
|
||||
ci_reporter
|
||||
delorean
|
||||
em-hiredis
|
||||
em-http-request (~> 1.0.0.beta.3)
|
||||
em-redis (~> 0.3.0)
|
||||
eventmachine
|
||||
logging (>= 1.5.0)
|
||||
mocha
|
||||
mysql2 (>= 0.2.6)
|
||||
nats
|
||||
|
@ -177,6 +179,6 @@ DEPENDENCIES
|
|||
uuidtools (~> 2.1.2)
|
||||
vcap_common
|
||||
vcap_logging
|
||||
vcap_stager (~> 0.1.7)
|
||||
vcap_staging
|
||||
vcap_stager (~> 0.1.14)
|
||||
vcap_staging (~> 0.1.50)
|
||||
yajl-ruby (~> 0.8.3)
|
||||
|
|
|
@ -61,20 +61,32 @@ class ApplicationController < ActionController::Base
|
|||
def json_param(name)
|
||||
raw = params[name]
|
||||
Yajl::Parser.parse(raw, :symbolize_keys => true)
|
||||
rescue Yajl::ParseError
|
||||
rescue Yajl::ParseError => e
|
||||
CloudController.logger.error("json_param yajl error: #{e.message}")
|
||||
raise CloudError.new(CloudError::BAD_REQUEST)
|
||||
end
|
||||
|
||||
def fetch_user_from_token
|
||||
reset_user!
|
||||
unless auth_token_header.blank?
|
||||
token = UserToken.decode(auth_token_header)
|
||||
if token.valid?
|
||||
@current_user = ::User.find_by_email(token.user_name)
|
||||
unless @current_user.nil?
|
||||
if AppConfig[:https_required] or (@current_user.admin? and AppConfig[:https_required_for_admins])
|
||||
raise CloudError.new(CloudError::HTTPS_REQUIRED) unless request_https?
|
||||
end
|
||||
user_email = nil
|
||||
if uaa_enabled? && UaaToken.is_uaa_token?(auth_token_header)
|
||||
user_email = UaaToken.decode_token(auth_token_header)
|
||||
else
|
||||
token = UserToken.decode(auth_token_header)
|
||||
if token.valid?
|
||||
user_email = token.user_name
|
||||
end
|
||||
end
|
||||
|
||||
if (!user_email.nil?)
|
||||
CloudController.logger.debug("user_email decoded from token is #{user_email.inspect}")
|
||||
@current_user = ::User.find_by_email(user_email)
|
||||
end
|
||||
|
||||
unless @current_user.nil?
|
||||
if AppConfig[:https_required] or (@current_user.admin? and AppConfig[:https_required_for_admins])
|
||||
raise CloudError.new(CloudError::HTTPS_REQUIRED) unless request_https?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -158,4 +170,8 @@ class ApplicationController < ActionController::Base
|
|||
render_cloud_error CloudError.new(CloudError::SYSTEM_ERROR)
|
||||
end
|
||||
|
||||
def uaa_enabled?
|
||||
AppConfig[:uaa][:enabled] && !AppConfig[:uaa][:url].nil?
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -43,6 +43,7 @@ class AppsController < ApplicationController
|
|||
end
|
||||
|
||||
def delete
|
||||
CloudController.logger.info("Deleting app, name=#{@app.name} id=#{@app.id}")
|
||||
@app.purge_all_resources!
|
||||
@app.destroy
|
||||
render :nothing => true, :status => 200
|
||||
|
@ -298,6 +299,7 @@ class AppsController < ApplicationController
|
|||
end
|
||||
|
||||
app.metadata[:debug] = body_params[:debug] if body_params
|
||||
app.metadata[:console] = body_params[:console] if body_params
|
||||
|
||||
# 'app.save' can actually raise an exception, if whatever is
|
||||
# invalid happens all the way down at the DB layer.
|
||||
|
@ -405,6 +407,7 @@ class AppsController < ApplicationController
|
|||
app.framework = body_params[:staging][:framework]
|
||||
app.runtime = body_params[:staging][:runtime]
|
||||
end
|
||||
app.metadata[:command] = body_params[:staging][:command] if body_params[:staging][:command]
|
||||
end
|
||||
unless app.framework
|
||||
CloudController.logger.error "app: #{app.id} No app framework indicated"
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
class BulkController < ApplicationController
|
||||
|
||||
skip_before_filter :fetch_user_from_token
|
||||
before_filter :authenticate_bulk_api
|
||||
|
||||
#the password is randomly generated at startup and is
|
||||
#discoverable through NATS.request('cloudcontroller.bulk.credentials')
|
||||
|
||||
DEFAULT_BATCH_SIZE = 200
|
||||
|
||||
|
||||
def users
|
||||
render_results_and_token_for_model(User)
|
||||
end
|
||||
|
||||
def apps
|
||||
render_results_and_token_for_model(App)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def authenticate_bulk_api
|
||||
authenticate_or_request_with_http_basic do |user, pass|
|
||||
if user==AppConfig[:bulk_api][:auth][:user] &&
|
||||
pass==AppConfig[:bulk_api][:auth][:password]
|
||||
true
|
||||
else
|
||||
CloudController.logger.error("Bulk api auth failed (user=#{user}, pass=#{pass} from #{request.remote_ip}", :tags => [:auth_failure, :bulk_api])
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
def render_results_and_token_for_model(model)
|
||||
results = retrieve_results(model)
|
||||
update_token(results)
|
||||
render :json => { :results => hash_by_id(results), :bulk_token => bulk_token }
|
||||
end
|
||||
|
||||
def retrieve_results(model)
|
||||
model.where(where_clause).limit(batch_size).to_a
|
||||
end
|
||||
|
||||
def hash_by_id arr
|
||||
arr.inject({}) { |hash, elem| hash[elem.id] = elem; hash }
|
||||
end
|
||||
|
||||
def update_token(results)
|
||||
@bulk_token = results.empty? ? {} : {:id => results.last.id}
|
||||
end
|
||||
|
||||
def where_clause
|
||||
@where_clause ||= bulk_token.to_a.map { |k,v| "#{sanitize_atom(k)} > #{sanitize_atom(v)}" }.join(" AND ")
|
||||
end
|
||||
|
||||
def sanitize_atom(atom)
|
||||
unless atom =~ /^\w+$/
|
||||
raise CloudError.new(CloudError::BAD_REQUEST, "bad atom #{atom} in bulk_api token")
|
||||
end
|
||||
atom
|
||||
end
|
||||
|
||||
|
||||
def batch_size
|
||||
@batch_size||=params['batch_size'] ? json_param('batch_size').to_i : DEFAULT_BATCH_SIZE
|
||||
end
|
||||
|
||||
def bulk_token
|
||||
@bulk_token||=params['bulk_token'] ? params['bulk_token'] : {}
|
||||
end
|
||||
end
|
|
@ -10,6 +10,10 @@ class DefaultController < ApplicationController
|
|||
:description => AppConfig[:description],
|
||||
:allow_debug => AppConfig[:allow_debug]
|
||||
}
|
||||
if uaa_enabled?
|
||||
info[:authorization_endpoint] = AppConfig[:uaa][:url]
|
||||
info[:authenticationEndpoint] = AppConfig[:uaa][:url] # obsolete, can be removed after this release
|
||||
end
|
||||
# If there is a logged in user, give out additional information
|
||||
if user
|
||||
info[:user] = user.email
|
||||
|
@ -34,7 +38,7 @@ class DefaultController < ApplicationController
|
|||
ret[svc_type] ||= {}
|
||||
ret[svc_type][svc.name] ||= {}
|
||||
ret[svc_type][svc.name][svc.version] ||= {}
|
||||
ret[svc_type][svc.name][svc.version] = svc.as_legacy
|
||||
ret[svc_type][svc.name][svc.version] = svc.as_legacy(user)
|
||||
end
|
||||
|
||||
render :json => ret
|
||||
|
@ -59,4 +63,3 @@ class DefaultController < ApplicationController
|
|||
end
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ class LegacyServicesController < ApplicationController
|
|||
# Legacy api fell back to matching by vendor if no version matched
|
||||
svc ||= ::Service.find_by_name(req.vendor)
|
||||
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless svc && svc.visible_to_user?(user)
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless svc && svc.visible_to_user?(user, req.tier)
|
||||
|
||||
plan_option = nil
|
||||
if req.options && req.options['plan_option']
|
||||
|
|
|
@ -8,7 +8,11 @@ class ServicesController < ApplicationController
|
|||
|
||||
before_filter :validate_content_type
|
||||
before_filter :require_service_auth_token, :only => [:create, :delete, :update_handle, :list_handles, :list_brokered_services]
|
||||
before_filter :require_user, :only => [:provision, :bind, :bind_external, :unbind, :unprovision]
|
||||
before_filter :require_user, :only => [:provision, :bind, :bind_external, :unbind, :unprovision,
|
||||
:create_snapshot, :enum_snapshots, :snapshot_details,:rollback_snapshot,
|
||||
:serialized_url, :import_from_url, :import_from_data, :job_info]
|
||||
before_filter :require_lifecycle_extension, :only => [:create_snapshot, :enum_snapshots, :snapshot_details,:rollback_snapshot,
|
||||
:serialized_url, :import_from_url, :import_from_data, :job_info]
|
||||
|
||||
rescue_from(JsonMessage::Error) {|e| render :status => 400, :json => {:errors => e.to_s}}
|
||||
rescue_from(ActiveRecord::RecordInvalid) {|e| render :status => 400, :json => {:errors => e.to_s}}
|
||||
|
@ -155,7 +159,7 @@ class ServicesController < ApplicationController
|
|||
req = VCAP::Services::Api::CloudControllerProvisionRequest.decode(request_body)
|
||||
|
||||
svc = Service.find_by_label(req.label)
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless svc && svc.visible_to_user?(user)
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless svc && svc.visible_to_user?(user, req.plan)
|
||||
|
||||
cfg = ServiceConfig.provision(svc, user, req.name, req.plan, req.plan_option)
|
||||
|
||||
|
@ -179,6 +183,110 @@ class ServicesController < ApplicationController
|
|||
render :json => {}
|
||||
end
|
||||
|
||||
# Create a snapshot for service instance
|
||||
#
|
||||
def create_snapshot
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.create_snapshot
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# Enumerate all snapshots of the given instance
|
||||
#
|
||||
def enum_snapshots
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.enum_snapshots
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# Get snapshot detail information
|
||||
#
|
||||
def snapshot_details
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.snapshot_details params['sid']
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# Rollback to a snapshot
|
||||
#
|
||||
def rollback_snapshot
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.rollback_snapshot params['sid']
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# Get the url to download serialized data for an instance
|
||||
#
|
||||
def serialized_url
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.serialized_url
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# import serialized data to an instance from url
|
||||
#
|
||||
def import_from_url
|
||||
req = VCAP::Services::Api::SerializedURL.decode(request_body)
|
||||
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.import_from_url req
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# import serialized data to an instance from request data
|
||||
#
|
||||
def import_from_data
|
||||
max_upload_size = AppConfig[:service_lifecycle][:max_upload_size] || 1
|
||||
max_upload_size = max_upload_size * 1024 * 1024
|
||||
raise CloudError.new(CloudError::BAD_REQUEST) unless request.content_length < max_upload_size
|
||||
|
||||
req = VCAP::Services::Api::SerializedData.decode(request_body)
|
||||
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.import_from_data req
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# Get job information
|
||||
#
|
||||
def job_info
|
||||
cfg = ServiceConfig.find_by_user_id_and_name(user.id, params['id'])
|
||||
raise CloudError.new(CloudError::SERVICE_NOT_FOUND) unless cfg
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless cfg.provisioned_by?(user)
|
||||
|
||||
result = cfg.job_info params['job_id']
|
||||
|
||||
render :json => result
|
||||
end
|
||||
|
||||
# Binds a provisioned instance to an app
|
||||
#
|
||||
def bind
|
||||
|
@ -251,4 +359,8 @@ class ServicesController < ApplicationController
|
|||
@service_auth_token = request.headers[hdr]
|
||||
raise CloudError.new(CloudError::FORBIDDEN) unless @service_auth_token
|
||||
end
|
||||
|
||||
def require_lifecycle_extension
|
||||
raise CloudError.new(CloudError::EXTENSION_NOT_IMPL, "lifecycle") unless AppConfig.has_key?(:service_lifecycle)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
require 'uaa/user_account'
|
||||
|
||||
class UsersController < ApplicationController
|
||||
before_filter :enforce_registration_policy, :only => :create
|
||||
before_filter :grab_event_user
|
||||
|
@ -5,6 +7,20 @@ class UsersController < ApplicationController
|
|||
before_filter :require_admin, :only => [:delete, :list]
|
||||
|
||||
def create
|
||||
if uaa_enabled?
|
||||
begin
|
||||
user_account = Cloudfoundry::Uaa::UserAccount.new(AppConfig[:uaa][:url], UaaToken.access_token)
|
||||
user_account.async = true
|
||||
user_account.trace = true
|
||||
user_account.logger = CloudController.logger
|
||||
user = user_account.create(body_params[:email], body_params[:password], body_params[:email])
|
||||
|
||||
CloudController.logger.info("User with email #{body_params[:email]} and id #{user[:id]} created in the UAA") unless user == nil
|
||||
rescue => e
|
||||
CloudController.logger.error("Error trying to create a UAA user - message #{e.message} trace #{e.backtrace[0..10]}")
|
||||
end
|
||||
end
|
||||
|
||||
user = ::User.new :email => body_params[:email]
|
||||
user.set_and_encrypt_password(body_params[:password])
|
||||
|
||||
|
@ -16,6 +32,18 @@ class UsersController < ApplicationController
|
|||
end
|
||||
|
||||
def delete
|
||||
if uaa_enabled?
|
||||
begin
|
||||
user_account = Cloudfoundry::Uaa::UserAccount.new(AppConfig[:uaa][:url], UaaToken.access_token)
|
||||
user_account.async = true
|
||||
user_account.trace = true
|
||||
user_account.logger = CloudController.logger
|
||||
user_account.delete_by_name(params['email'])
|
||||
rescue => e
|
||||
CloudController.logger.error("Error trying to delete a UAA user - message #{e.message} trace #{e.backtrace[0..10]}")
|
||||
end
|
||||
end
|
||||
|
||||
if target_user = ::User.find_by_email(params['email'])
|
||||
|
||||
# Cleanup leftover services
|
||||
|
@ -38,6 +66,19 @@ class UsersController < ApplicationController
|
|||
|
||||
# Change Password
|
||||
def update
|
||||
if uaa_enabled?
|
||||
begin
|
||||
user_account = Cloudfoundry::Uaa::UserAccount.new(AppConfig[:uaa][:url], UaaToken.access_token)
|
||||
user_account.async = true
|
||||
user_account.trace = true
|
||||
user_account.logger = CloudController.logger
|
||||
user_account.change_password_by_name(user.email, body_params[:password])
|
||||
|
||||
rescue => e
|
||||
CloudController.logger.error("Error trying to change the password for a UAA user - message #{e.message} trace #{e.backtrace[0..10]}")
|
||||
end
|
||||
end
|
||||
|
||||
user.set_and_encrypt_password(body_params[:password])
|
||||
user.save!
|
||||
render :status => 204, :nothing => true
|
||||
|
|
|
@ -23,8 +23,10 @@ class App < ActiveRecord::Base
|
|||
|
||||
AppStates = %w[STOPPED STARTED]
|
||||
PackageStates = %w[PENDING STAGED FAILED]
|
||||
Runtimes = %w[ruby18 ruby19 java node php erlangR14B02 python26]
|
||||
Frameworks = %w[sinatra rails3 java_web spring grails node php otp_rebar lift wsgi django unknown]
|
||||
|
||||
Runtimes = %w[ruby18 ruby19 java node node06 php erlangR14B02 python2]
|
||||
Frameworks = %w[sinatra rack rails3 java_web spring grails node php otp_rebar lift wsgi django standalone unknown]
|
||||
|
||||
|
||||
validates_presence_of :name, :framework, :runtime
|
||||
|
||||
|
@ -119,8 +121,9 @@ class App < ActiveRecord::Base
|
|||
services = service_bindings(true).map {|sb| sb.for_staging}
|
||||
{ :services => services,
|
||||
:framework => framework,
|
||||
:runtime => runtime,
|
||||
:resources => resource_requirements }
|
||||
:runtime => runtime,
|
||||
:resources => resource_requirements,
|
||||
:meta => metadata }
|
||||
end
|
||||
|
||||
def staging_task_properties
|
||||
|
@ -129,7 +132,8 @@ class App < ActiveRecord::Base
|
|||
:framework => framework,
|
||||
:runtime => runtime,
|
||||
:resources => resource_requirements,
|
||||
:environment => environment}
|
||||
:environment => environment,
|
||||
:meta => metadata }
|
||||
end
|
||||
|
||||
# Returns an array of the URLs that point to this application
|
||||
|
@ -521,6 +525,13 @@ class App < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def update_staged_package(upload_path)
|
||||
# Remove old package if needed
|
||||
if self.staged_package_path
|
||||
CloudController.logger.info("Removing old staged package for" \
|
||||
+ " app_id=#{self.id} app_name=#{self.name}" \
|
||||
+ " path=#{self.staged_package_path}")
|
||||
FileUtils.rm_f(self.staged_package_path)
|
||||
end
|
||||
self.staged_package_hash = Digest::SHA1.file(upload_path).hexdigest
|
||||
FileUtils.mv(upload_path, self.staged_package_path)
|
||||
end
|
||||
|
|
|
@ -89,6 +89,7 @@ class AppManager
|
|||
|
||||
AppManager.secure_staging_dir(job[:user], job[:staging_dir])
|
||||
AppManager.secure_staging_dir(job[:user], job[:exploded_dir])
|
||||
AppManager.secure_staging_dir(job[:user], job[:work_dir])
|
||||
end
|
||||
|
||||
Bundler.with_clean_env do
|
||||
|
@ -117,6 +118,7 @@ class AppManager
|
|||
ensure
|
||||
FileUtils.rm_rf(job[:staging_dir])
|
||||
FileUtils.rm_rf(job[:exploded_dir])
|
||||
FileUtils.rm_rf(job[:work_dir]) if job[:work_dir]
|
||||
end
|
||||
end.resume
|
||||
|
||||
|
@ -136,11 +138,15 @@ class AppManager
|
|||
end
|
||||
|
||||
def run_staging_command(script, exploded_dir, staging_dir, env_json)
|
||||
work_dir = Dir.mktmpdir
|
||||
plugin_env_path = File.join(work_dir, 'plugin_env.json')
|
||||
File.open(plugin_env_path, 'w') {|f| f.write(env_json) }
|
||||
job = {
|
||||
:app => @app,
|
||||
:cmd => "#{script} #{exploded_dir} #{staging_dir} #{env_json} #{AppManager.staging_manifest_directory}",
|
||||
:cmd => "#{script} #{exploded_dir} #{staging_dir} #{plugin_env_path} #{AppManager.staging_manifest_directory}",
|
||||
:staging_dir => staging_dir,
|
||||
:exploded_dir => exploded_dir
|
||||
:exploded_dir => exploded_dir,
|
||||
:work_dir => work_dir
|
||||
}
|
||||
|
||||
CloudController.logger.debug("Queueing staging command #{job[:cmd]}", :tags => [:staging])
|
||||
|
@ -194,11 +200,12 @@ class AppManager
|
|||
message = start_message.dup
|
||||
message[:executableUri] = download_app_uri(message[:executableUri])
|
||||
message[:debug] = @app.metadata[:debug]
|
||||
message[:console] = @app.metadata[:console]
|
||||
(index...max_to_start).each do |i|
|
||||
message[:index] = i
|
||||
dea_id = find_dea_for(message)
|
||||
json = Yajl::Encoder.encode(message)
|
||||
if dea_id
|
||||
json = Yajl::Encoder.encode(message)
|
||||
CloudController.logger.debug("Sending start message #{json} to DEA #{dea_id}")
|
||||
NATS.publish("dea.#{dea_id}.start", json)
|
||||
else
|
||||
|
@ -287,15 +294,15 @@ class AppManager
|
|||
end
|
||||
|
||||
runtime = nil
|
||||
|
||||
manifest['runtimes'].each do |hash|
|
||||
runtime ||= hash[app.runtime]
|
||||
end
|
||||
|
||||
unless runtime
|
||||
raise CloudError.new(CloudError::APP_INVALID_RUNTIME, app.runtime, app.framework)
|
||||
end
|
||||
|
||||
env_json = Yajl::Encoder.encode(app.staging_environment)
|
||||
env_json = app.staging_environment
|
||||
|
||||
app_source_dir = Dir.mktmpdir
|
||||
app.explode_into(app_source_dir)
|
||||
|
@ -368,7 +375,9 @@ class AppManager
|
|||
:state => instance_json[:state],
|
||||
:since => instance_json[:state_timestamp],
|
||||
:debug_ip => instance_json[:debug_ip],
|
||||
:debug_port => instance_json[:debug_port]
|
||||
:debug_port => instance_json[:debug_port],
|
||||
:console_ip => instance_json[:console_ip],
|
||||
:console_port => instance_json[:console_port]
|
||||
}
|
||||
end
|
||||
end
|
||||
|
@ -440,6 +449,7 @@ class AppManager
|
|||
['http://', "#{CloudController.bind_address}:#{CloudController.external_port}", path].join
|
||||
end
|
||||
|
||||
|
||||
# start_instance involves several moving pieces, from sending requests for help to the
|
||||
# dea_pool, to sending the actual start messages. In addition, many of these can be
|
||||
# triggered by one update call, so we simply queue them for the next go around through
|
||||
|
@ -452,8 +462,8 @@ class AppManager
|
|||
message[:executableUri] = download_app_uri(message[:executableUri])
|
||||
message[:index] = index
|
||||
dea_id = find_dea_for(message)
|
||||
json = Yajl::Encoder.encode(message)
|
||||
if dea_id
|
||||
json = Yajl::Encoder.encode(message)
|
||||
CloudController.logger.debug("Sending start message #{json} to DEA #{dea_id}")
|
||||
NATS.publish("dea.#{dea_id}.start", json)
|
||||
else
|
||||
|
@ -465,18 +475,22 @@ class AppManager
|
|||
end
|
||||
|
||||
def find_dea_for(message)
|
||||
find_dea_message = {
|
||||
:droplet => message[:droplet],
|
||||
:limits => message[:limits],
|
||||
:name => message[:name],
|
||||
:runtime => message[:runtime],
|
||||
:sha => message[:sha1]
|
||||
}
|
||||
json_msg = Yajl::Encoder.encode(find_dea_message)
|
||||
result = NATS.timed_request('dea.discover', json_msg, :timeout => 2).first
|
||||
return nil if result.nil?
|
||||
CloudController.logger.debug "Received #{result.inspect} in response to dea.discover request"
|
||||
Yajl::Parser.parse(result, :symbolize_keys => true)[:id]
|
||||
if AppConfig[:new_initial_placement]
|
||||
DEAPool.find_dea(message)
|
||||
else
|
||||
find_dea_message = {
|
||||
:droplet => message[:droplet],
|
||||
:limits => message[:limits],
|
||||
:name => message[:name],
|
||||
:runtime => message[:runtime],
|
||||
:sha => message[:sha1]
|
||||
}
|
||||
json_msg = Yajl::Encoder.encode(find_dea_message)
|
||||
result = NATS.timed_request('dea.discover', json_msg, :timeout => 2).first
|
||||
return nil if result.nil?
|
||||
CloudController.logger.debug "Received #{result.inspect} in response to dea.discover request"
|
||||
Yajl::Parser.parse(result, :symbolize_keys => true)[:id]
|
||||
end
|
||||
end
|
||||
|
||||
def stop_instances(indices)
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
class DEAPool
|
||||
DEA_PROFILE_EXPIRATION_TIME = 10
|
||||
|
||||
class << self
|
||||
def dea_profiles
|
||||
@dea_profiles ||= {}
|
||||
end
|
||||
|
||||
def process_advertise_message(msg)
|
||||
CloudController.logger.debug2("Got DEA advertisement#{msg}.")
|
||||
dea_profiles[msg[:id]] = {:profile => msg, :time => Time.now.to_i}
|
||||
end
|
||||
|
||||
def find_dea(app)
|
||||
required_mem = app[:limits][:mem]
|
||||
required_runtime = app[:runtime]
|
||||
keys = dea_profiles.keys.shuffle
|
||||
keys.each do |key|
|
||||
entry = dea_profiles[key]
|
||||
dea = entry[:profile]
|
||||
last_update = entry[:time]
|
||||
if Time.now.to_i - last_update > DEA_PROFILE_EXPIRATION_TIME
|
||||
CloudController.logger.debug("DEA #{dea[:id]} expired from pool.")
|
||||
dea_profiles.delete(key)
|
||||
next
|
||||
end
|
||||
|
||||
if (dea[:available_memory] >= required_mem) && (dea[:runtimes].member? required_runtime)
|
||||
CloudController.logger.debug("Found DEA #{dea[:id]}.")
|
||||
return dea[:id]
|
||||
end
|
||||
end
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
|
@ -31,33 +31,73 @@ class Service < ActiveRecord::Base
|
|||
# Predicate function that returns true if the service is visible to the supplied
|
||||
# user. False otherwise.
|
||||
#
|
||||
# NB: This is currently a stub. When we implement scoped services it will be filled in.
|
||||
def visible_to_user?(user)
|
||||
(!self.acls || user_in_userlist?(user) || user_match_wildcards?(user))
|
||||
# There are two parts of acls. One is service acls applied to service as a whole
|
||||
# One is plan acls applied to specific service plan.
|
||||
#
|
||||
# A example of acls structure:
|
||||
# acls:
|
||||
# users: #service acls
|
||||
# - foo@bar.com
|
||||
# - foo1@bar.com
|
||||
# wildcards: #service acls
|
||||
# - *@foo.com
|
||||
# - *@foo1.com
|
||||
# plans:
|
||||
# plan_a: #plan acls
|
||||
# users:
|
||||
# - foo2@foo.com
|
||||
# wildcards:
|
||||
# - *@foo1.com
|
||||
#
|
||||
# The following chart shows service visibility:
|
||||
#
|
||||
# P_ACLs\S_ACLs | Empty | HasACLs |
|
||||
# Empty | True | S_ACL(user) |
|
||||
# HasACLs | P_ACL(user) | S_ACL(user) && P_ACL(user) |
|
||||
def visible_to_user?(user, plan=nil)
|
||||
return false if !plans || !user.email
|
||||
return true unless acls
|
||||
|
||||
if !plan
|
||||
plans.each do |p|
|
||||
return true if visible_to_user?(user, p)
|
||||
end
|
||||
return false
|
||||
else
|
||||
# for certain plan, user should match service acls and plan acls
|
||||
p_acls = acls["plans"] && acls["plans"][plan]
|
||||
validate_by_acls?(user, acls) && validate_by_acls?(user, p_acls)
|
||||
end
|
||||
end
|
||||
|
||||
# Return true if acls is empty or user matches user list or wildcards
|
||||
# false otherwise.
|
||||
def validate_by_acls?(user, acl)
|
||||
!acl ||
|
||||
(!acl["users"] && !acl["wildcards"]) ||
|
||||
user_in_userlist?(user, acl["users"]) ||
|
||||
user_match_wildcards?(user, acl["wildcards"])
|
||||
end
|
||||
|
||||
# Returns true if the user's email is contained in the set of user emails
|
||||
def user_in_userlist?(user)
|
||||
return false if (!self.acls || self.acls['users'].empty? || !user.email)
|
||||
return true if self.acls['users'].empty?
|
||||
Set.new(self.acls['users']).include?(user.email)
|
||||
# false otherwise
|
||||
def user_in_userlist?(user, userlist)
|
||||
userlist && userlist.include?(user.email)
|
||||
end
|
||||
|
||||
# Returns true if user matches any of the wildcards, false otherwise.
|
||||
def user_match_wildcards?(user)
|
||||
return false if (!self.acls || self.acls['wildcards'].empty? || !user.email)
|
||||
for wc in self.acls['wildcards']
|
||||
parts = wc.split('*')
|
||||
re_str = parts.map{|p| Regexp.escape(p)}.join('.*')
|
||||
if Regexp.new("^#{re_str}$").match(user.email)
|
||||
return true
|
||||
end
|
||||
end
|
||||
# Returns true if user matches any of the wildcards
|
||||
# false otherwise.
|
||||
def user_match_wildcards?(user, wildcards)
|
||||
wildcards.each do |wc|
|
||||
re_str = Regexp.escape(wc).gsub('\*', '.*?')
|
||||
return true if user.email =~ /^#{re_str}$/
|
||||
end if wildcards
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
# Returns the service represented as a legacy hash
|
||||
def as_legacy
|
||||
def as_legacy(user)
|
||||
# Synthesize tier info
|
||||
tiers = {}
|
||||
|
||||
|
@ -68,6 +108,7 @@ class Service < ActiveRecord::Base
|
|||
end
|
||||
|
||||
self.plans.each do |p|
|
||||
next unless visible_to_user?(user, p)
|
||||
tiers[p] = {
|
||||
:options => {},
|
||||
:order => sort_orders[p], # XXX - Sort order. Synthesized for now (alphabetical), may want to add support for this to svcs api.
|
||||
|
|
|
@ -116,6 +116,82 @@ class ServiceConfig < ActiveRecord::Base
|
|||
end
|
||||
end
|
||||
|
||||
def create_snapshot
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/snapshots"
|
||||
result = perform_gateway_request(:create_snapshot, endpoint, service.token, :post, service.timeout, VCAP::Services::Api::Job, empty_msg_class, :service_id => name)
|
||||
result
|
||||
end
|
||||
|
||||
def enum_snapshots
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/snapshots"
|
||||
result = perform_gateway_request(:enum_snapshots, endpoint, service.token, :get, service.timeout, VCAP::Services::Api::SnapshotList, empty_msg_class, :service_id => name)
|
||||
result
|
||||
end
|
||||
|
||||
def snapshot_details(sid)
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/snapshots/#{sid}"
|
||||
result = perform_gateway_request(:snapshot_details, endpoint, service.token, :get, service.timeout, VCAP::Services::Api::Snapshot, empty_msg_class, :service_id => name, :snapshot_id => sid)
|
||||
result
|
||||
end
|
||||
|
||||
def rollback_snapshot(sid)
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/snapshots/#{sid}"
|
||||
result = perform_gateway_request(:rollback_snapshot, endpoint, service.token, :put, service.timeout, VCAP::Services::Api::Job, empty_msg_class, :service_id => name, :snapshot_id => sid)
|
||||
result
|
||||
end
|
||||
|
||||
def serialized_url
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/serialized/url"
|
||||
result = perform_gateway_request(:serialized_url, endpoint, service.token, :get, service.timeout, VCAP::Services::Api::Job, empty_msg_class, :service_id => name)
|
||||
result
|
||||
end
|
||||
|
||||
def import_from_url req
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/serialized/url"
|
||||
result = perform_gateway_request(:import_from_url, endpoint, service.token, :put, service.timeout, VCAP::Services::Api::Job, req, :service_id => name, :msg => req)
|
||||
result
|
||||
end
|
||||
|
||||
def import_from_data req
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/serialized/data"
|
||||
result = perform_gateway_request(:import_from_data, endpoint, service.token, :put, service.timeout, VCAP::Services::Api::Job, req, :service_id => name, :msg => req)
|
||||
result
|
||||
end
|
||||
|
||||
def job_info job_id
|
||||
endpoint = "#{service.url}/gateway/v1/configurations/#{name}/jobs/#{job_id}"
|
||||
result = perform_gateway_request(:job_info, endpoint, service.token, :get, service.timeout, VCAP::Services::Api::Job, empty_msg_class, :service_id => name, :job_id => job_id)
|
||||
result
|
||||
end
|
||||
|
||||
def empty_msg_class
|
||||
VCAP::Services::Api::EMPTY_REQUEST
|
||||
end
|
||||
|
||||
# Perform gateway request and decode request to object
|
||||
#
|
||||
def perform_gateway_request(action, endpoint, token, http_method, timeout, decoder_class, msg, opts={})
|
||||
result = nil
|
||||
if EM.reactor_running?
|
||||
http = VCAP::Services::Api::AsyncHttpRequest.fibered(endpoint, token, http_method, timeout, msg)
|
||||
if !http.error.empty?
|
||||
raise "Error sending #{action} request for #{name} to gateway #{service.url}: #{http.error}"
|
||||
elsif http.response_header.status != 200
|
||||
raise "Error sending #{action} request for #{name}: non 200 response from gateway #{service.url}: #{http.response_header.status} #{http.response}"
|
||||
end
|
||||
result = decoder_class.decode(http.response)
|
||||
else
|
||||
uri = URI.parse(endpoint)
|
||||
gw = VCAP::Services::Api::ServiceGatewayClient.new(uri.host, token, uri.port)
|
||||
result = gw.send(action, opts)
|
||||
end
|
||||
result.extract
|
||||
rescue => e
|
||||
CloudController.logger.error("Error talking to gateway: #{e}")
|
||||
CloudController.logger.error(e)
|
||||
raise CloudError.new(CloudError::SERVICE_GATEWAY_ERROR)
|
||||
end
|
||||
|
||||
def provisioned_by?(user)
|
||||
(self.user_id == user.id)
|
||||
end
|
||||
|
|
|
@ -6,14 +6,7 @@ class StagingTaskLog
|
|||
"staging_task_log:#{app_id}"
|
||||
end
|
||||
|
||||
def fetch(app_id, redis=nil)
|
||||
redis ||= @redis
|
||||
key = key_for_id(app_id)
|
||||
result = redis.get(key)
|
||||
result ? StagingTaskLog.new(app_id, result) : nil
|
||||
end
|
||||
|
||||
def fetch_fibered(app_id, timeout=5, redis=nil)
|
||||
def fetch_fibered(app_id, redis=nil, timeout=5)
|
||||
redis ||= @redis
|
||||
f = Fiber.current
|
||||
key = key_for_id(app_id)
|
||||
|
@ -24,7 +17,7 @@ class StagingTaskLog
|
|||
get_def = redis.get(key)
|
||||
get_def.timeout(timeout)
|
||||
get_def.errback do |e|
|
||||
e = VCAP::Stager::TaskResultTimeoutError.new("Timed out fetching result") if e == nil
|
||||
e = VCAP::Stager::StagingTimeoutError.new("Timed out fetching result") if e == nil
|
||||
logger.error("Failed fetching result for key '#{key}': #{e}")
|
||||
logger.error(e)
|
||||
f.resume([false, e])
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
require "uaa/token_coder"
|
||||
require "uaa/token_issuer"
|
||||
|
||||
class UaaToken
|
||||
|
||||
@uaa_token_coder ||= Cloudfoundry::Uaa::TokenCoder.new(AppConfig[:uaa][:resource_id],
|
||||
AppConfig[:uaa][:token_secret])
|
||||
|
||||
@token_issuer ||= Cloudfoundry::Uaa::TokenIssuer.new(AppConfig[:uaa][:url],
|
||||
AppConfig[:uaa][:resource_id],
|
||||
AppConfig[:uaa][:client_secret],
|
||||
"read write password",
|
||||
nil)
|
||||
|
||||
class << self
|
||||
|
||||
def is_uaa_token?(token)
|
||||
token.nil? || /\s+/.match(token.strip()).nil?? false : true
|
||||
end
|
||||
|
||||
def decode_token(auth_token)
|
||||
if (auth_token.nil?)
|
||||
return nil
|
||||
end
|
||||
|
||||
CloudController.logger.debug("uaa token coder #{@uaa_token_coder.inspect}")
|
||||
CloudController.logger.debug("Auth token is #{auth_token.inspect}")
|
||||
|
||||
token_information = nil
|
||||
begin
|
||||
token_information = @uaa_token_coder.decode(auth_token)
|
||||
CloudController.logger.info("Token received from the UAA #{token_information.inspect}")
|
||||
rescue => e
|
||||
CloudController.logger.error("Invalid bearer token Message: #{e.message}")
|
||||
end
|
||||
token_information[:email] if token_information
|
||||
end
|
||||
|
||||
def expired?(access_token)
|
||||
expiry = Cloudfoundry::Uaa::TokenCoder.decode(access_token.split()[1], AppConfig[:uaa][:token_secret])[:expires_at]
|
||||
expiry.is_a?(Integer) && expiry <= Time.now.to_i
|
||||
end
|
||||
|
||||
def access_token
|
||||
if @access_token.nil? || expired?(@access_token)
|
||||
#Get a new one
|
||||
@token_issuer.async = true
|
||||
@token_issuer.trace = true
|
||||
@token_issuer.logger = CloudController.logger
|
||||
@access_token = @token_issuer.client_credentials_grant()
|
||||
end
|
||||
CloudController.logger.debug("access_token #{@access_token}")
|
||||
@access_token
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,5 @@
|
|||
EM.next_tick do
|
||||
NATS.subscribe('cloudcontroller.bulk.credentials') do |_, reply|
|
||||
NATS.publish(reply, AppConfig[:bulk_api][:auth].to_json)
|
||||
end
|
||||
end
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
EM.next_tick do
|
||||
|
||||
NATS.subscribe('dea.advertise') do |msg|
|
||||
begin
|
||||
payload = Yajl::Parser.parse(msg, :symbolize_keys => true)
|
||||
CloudController::UTILITY_FIBER_POOL.spawn do
|
||||
DEAPool.process_advertise_message(payload)
|
||||
end
|
||||
rescue => e
|
||||
CloudController.logger.error("Exception processing dea advertisement: '#{msg}'")
|
||||
CloudController.logger.error(e)
|
||||
end
|
||||
end
|
||||
NATS.publish('dea.locate')
|
||||
end
|
|
@ -4,6 +4,7 @@
|
|||
# we are starting in production mode.
|
||||
require 'vcap/common'
|
||||
require 'vcap/staging/plugin/common'
|
||||
require 'openssl'
|
||||
|
||||
config_file = ENV['CLOUD_CONTROLLER_CONFIG'] || File.expand_path('../cloud_controller.yml', __FILE__)
|
||||
begin
|
||||
|
@ -53,7 +54,12 @@ required = { :external_uri => 'api.vcap.me',
|
|||
:default_account_capacity => { :memory => 2048,
|
||||
:app_uris => 4,
|
||||
:services => 16,
|
||||
:apps => 20 } }
|
||||
:apps => 20 },
|
||||
:uaa => { :enabled => 'true',
|
||||
:url => 'http://uaa.vcap.me',
|
||||
:resource_id => 'cloud_controller',
|
||||
:token_secret => 'tokensecret',
|
||||
:client_secret => 'cloudcontrollerclientsecret'}}
|
||||
|
||||
# Does the given hash have at least the keys contained in the default?
|
||||
required_keys = Proc.new do |candidate, default|
|
||||
|
@ -89,6 +95,18 @@ env_overrides.each do |cfg_key, env_key|
|
|||
end
|
||||
end
|
||||
|
||||
unless AppConfig.key? :new_initial_placement
|
||||
AppConfig[:new_initial_placement] = false
|
||||
end
|
||||
|
||||
#generate bulk api credentials unless they've been explicitly specified (not that they should)
|
||||
unless AppConfig.key? :bulk_api
|
||||
AppConfig[:bulk_api] = { :auth =>
|
||||
{ :user => 'bulk_api',
|
||||
:password => VCAP.secure_uuid }}
|
||||
end
|
||||
|
||||
|
||||
# Check on new style app_uris and map old into new style.
|
||||
unless AppConfig.key? :app_uris
|
||||
AppConfig[:app_uris] = {
|
||||
|
|
|
@ -5,9 +5,6 @@ require "active_record/railtie"
|
|||
require "action_controller/railtie"
|
||||
require "rails/test_unit/railtie"
|
||||
|
||||
common_lib_dir = File.expand_path('../../../lib', __FILE__)
|
||||
$LOAD_PATH << common_lib_dir unless $LOAD_PATH.include?(common_lib_dir)
|
||||
|
||||
# If you have a Gemfile, require the gems listed there, including any gems
|
||||
# you've limited to :test, :development, or :production.
|
||||
Bundler.require(:default, Rails.env) if defined?(Bundler)
|
||||
|
|
|
@ -92,6 +92,14 @@ database_environment: # replaces database.yml
|
|||
database: db/test.sqlite3
|
||||
encoding: utf8
|
||||
|
||||
# Enable the use of the uaa
|
||||
uaa:
|
||||
enabled: true
|
||||
url: http://uaa.vcap.me
|
||||
resource_id: cloud_controller
|
||||
token_secret: tokensecret
|
||||
client_secret: cloudcontrollerclientsecret
|
||||
|
||||
# App staging parameters
|
||||
staging:
|
||||
max_concurrent_stagers: 10
|
||||
|
@ -133,14 +141,25 @@ builtin_services:
|
|||
token: "0xdeadbeef"
|
||||
atmos:
|
||||
token: "0xdeadbeef"
|
||||
filesystem:
|
||||
token: "0xdeadbeef"
|
||||
vblob:
|
||||
token: "0xdeadbeef"
|
||||
|
||||
# Service broker
|
||||
service_broker:
|
||||
token: "0xdeadbeef"
|
||||
|
||||
# lifecycle
|
||||
service_lifecycle:
|
||||
max_upload_size: 1 # in MB
|
||||
|
||||
# Enable/disable starting apps in debug modes.
|
||||
allow_debug: true
|
||||
|
||||
# Enable new initial placement protocol.
|
||||
new_initial_placement: false
|
||||
|
||||
# Supported runtime versions and debug modes.
|
||||
# Used for /info/runtimes endpoint (served unfiltered as JSON)
|
||||
runtimes:
|
||||
|
@ -150,6 +169,14 @@ runtimes:
|
|||
version: 1.9.2
|
||||
node:
|
||||
version: 0.4.12
|
||||
debug_modes:
|
||||
- run
|
||||
- suspend
|
||||
node06:
|
||||
version: 0.6.8
|
||||
debug_modes:
|
||||
- run
|
||||
- suspend
|
||||
java:
|
||||
version: 1.6.0
|
||||
debug_modes:
|
||||
|
@ -159,7 +186,7 @@ runtimes:
|
|||
version: 5.3.[2-6]
|
||||
erlangR14B02:
|
||||
version: ".* 5.8.3"
|
||||
python26:
|
||||
python2:
|
||||
version: 2.6.5
|
||||
|
||||
# Used for /healthz and /vars endpoints. If not provided random
|
||||
|
|
|
@ -27,6 +27,12 @@ CloudController::Application.routes.draw do
|
|||
get 'apps/:name/update' => 'apps#check_update'
|
||||
put 'apps/:name/update' => 'apps#start_update'
|
||||
|
||||
#bulk APIs for health manager v.2 and billing
|
||||
#retrieving batches of items. An opaque token is returned with every request to resume the retrieval
|
||||
#from where the last request left off.
|
||||
get 'bulk/apps' => 'bulk#apps', :as => :bulk_apps
|
||||
get 'bulk/users' => 'bulk#users', :as => :bulk_users
|
||||
|
||||
# Stagers interact with the CC via these urls
|
||||
post 'staging/droplet/:id/:upload_id' => 'staging#upload_droplet', :as => :upload_droplet
|
||||
get 'staging/app/:id' => 'staging#download_app', :as => :download_unstaged_app
|
||||
|
@ -46,6 +52,17 @@ CloudController::Application.routes.draw do
|
|||
# Brokered Services
|
||||
get 'brokered_services/poc/offerings' => 'services#list_brokered_services', :as => :service_list_brokered_services
|
||||
|
||||
# Service life cycle apis
|
||||
post 'services/v1/configurations/:id/snapshots' => 'services#create_snapshot', :as => :service_create_snapshot, :id => /[^\/]+/
|
||||
get 'services/v1/configurations/:id/snapshots' => 'services#enum_snapshots', :as => :service_enum_snapshots, :id => /[^\/]+/
|
||||
get 'services/v1/configurations/:id/snapshots/:sid' => 'services#snapshot_details', :as => :service_snapshot_details, :id => /[^\/]+/, :sid => /[^\/]+/
|
||||
put 'services/v1/configurations/:id/snapshots/:sid' => 'services#rollback_snapshot', :as => :service_rollback_snapshot, :id => /[^\/]+/, :sid => /[^\/]+/
|
||||
get 'services/v1/configurations/:id/serialized/url' => 'services#serialized_url', :as => :service_serialized_url, :id => /[^\/]+/
|
||||
put 'services/v1/configurations/:id/serialized/url' => 'services#import_from_url', :as => :service_import_from_url, :id => /[^\/]+/
|
||||
put 'services/v1/configurations/:id/serialized/data' => 'services#import_from_data', :as => :service_import_from_data, :id => /[^\/]+/
|
||||
get 'services/v1/configurations/:id/jobs/:job_id' => 'services#job_info', :as => :service_job_info, :id => /[^\/]+/, :job_id => /[^\/]+/
|
||||
|
||||
|
||||
# Legacy services implementation (for old vmc)
|
||||
get 'services' => 'legacy_services#list', :as => :legacy_service_list
|
||||
post 'services' => 'legacy_services#provision', :as => :legacy_service_provision
|
||||
|
|
|
@ -16,6 +16,7 @@ class CloudError < StandardError
|
|||
HTTP_FORBIDDEN = 403
|
||||
HTTP_NOT_FOUND = 404
|
||||
HTTP_INTERNAL_SERVER_ERROR = 500
|
||||
HTTP_NOT_IMPLEMENTED = 501
|
||||
HTTP_BAD_GATEWAY = 502
|
||||
|
||||
# HTTP / JSON errors
|
||||
|
@ -53,6 +54,7 @@ class CloudError < StandardError
|
|||
TOKEN_NOT_FOUND = [502, HTTP_NOT_FOUND, "Token not found"]
|
||||
SERVICE_GATEWAY_ERROR = [503, HTTP_BAD_GATEWAY, "Unexpected response from service gateway"]
|
||||
ACCOUNT_TOO_MANY_SERVICES = [504, HTTP_FORBIDDEN, "Too many Services provisioned: %s, you're allowed: %s"]
|
||||
EXTENSION_NOT_IMPL = [505, HTTP_NOT_IMPLEMENTED, "Service extension %s is not implemented."]
|
||||
|
||||
# Account Capacity
|
||||
ACCOUNT_NOT_ENOUGH_MEMORY = [600, HTTP_FORBIDDEN, "Not enough memory capacity, you're allowed: %s"]
|
||||
|
|
|
@ -21,7 +21,6 @@ class StagingTaskManager
|
|||
# @return VCAP::Stager::TaskResult
|
||||
def run_staging_task(app, dl_uri, ul_uri)
|
||||
inbox = "cc.staging." + VCAP.secure_uuid
|
||||
nonce = VCAP.secure_uuid
|
||||
f = Fiber.current
|
||||
|
||||
# Wait for notification from the stager
|
||||
|
|
|
@ -18,7 +18,10 @@ args = ARGV.dup
|
|||
|
||||
if args.length > 2
|
||||
begin
|
||||
args[2] = Yajl::Parser.parse(args[2], :symbolize_keys => true)
|
||||
File.open(args[2], "r") { |f|
|
||||
env_json = f.read
|
||||
args[2] = Yajl::Parser.parse(env_json, :symbolize_keys => true)
|
||||
}
|
||||
rescue => e
|
||||
puts "ERROR DECODING ENVIRONMENT: #{e}"
|
||||
exit 1
|
||||
|
|
|
@ -100,8 +100,8 @@ describe ServicesController do
|
|||
|
||||
it 'should update existing offerings' do
|
||||
acls = {
|
||||
'users' => ['foo@bar.com'],
|
||||
'wildcards' => ['*@foo.com'],
|
||||
'plans' => {'free' => {'users' => ['a@b.com']}}
|
||||
}
|
||||
svc = Service.create(
|
||||
:label => 'foo-bar',
|
||||
|
@ -129,8 +129,8 @@ describe ServicesController do
|
|||
|
||||
it 'should support reverting existing offerings to nil' do
|
||||
acls = {
|
||||
'users' => ['foo@bar.com'],
|
||||
'wildcards' => ['*@foo.com'],
|
||||
'plans' => {'free' => {'users' => ['aaa@bbb.com']}}
|
||||
}
|
||||
svc = Service.create(
|
||||
:label => 'foo-bar',
|
||||
|
@ -384,6 +384,7 @@ describe ServicesController do
|
|||
svc.label = "foo-bar"
|
||||
svc.url = "http://localhost:56789"
|
||||
svc.token = 'foobar'
|
||||
svc.plans = ['free', 'nonfree']
|
||||
svc.save
|
||||
svc.should be_valid
|
||||
@svc = svc
|
||||
|
@ -659,6 +660,157 @@ describe ServicesController do
|
|||
end
|
||||
end
|
||||
|
||||
describe "#lifecycle_extension" do
|
||||
it 'should return not implemented error when lifecycle is disabled' do
|
||||
begin
|
||||
origin = AppConfig.delete :service_lifecycle
|
||||
%w(create_snapshot enum_snapshots serialized_url import_from_url import_from_data).each do |api|
|
||||
post api.to_sym, :id => 'xxx'
|
||||
response.status.should == 501
|
||||
resp = Yajl::Parser.parse(response.body)
|
||||
resp['description'].include?("not implemented").should == true
|
||||
end
|
||||
|
||||
%w(snapshot_details rollback_snapshot).each do |api|
|
||||
post api.to_sym, :id => 'xxx', :sid => '1'
|
||||
response.status.should == 501
|
||||
resp = Yajl::Parser.parse(response.body)
|
||||
resp['description'].include?("not implemented").should == true
|
||||
end
|
||||
|
||||
get :job_info, :id => 'xxx', :job_id => '1'
|
||||
response.status.should == 501
|
||||
resp = Yajl::Parser.parse(response.body)
|
||||
resp['description'].include?("not implemented").should == true
|
||||
ensure
|
||||
AppConfig[:service_lifecycle] = origin
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#create_snapshot" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
post :create_snapshot, :id => 'xxx'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
post :create_snapshot, :id => 'xxx'
|
||||
response.status.should == 404
|
||||
end
|
||||
end
|
||||
|
||||
describe "#enum_snapshots" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
get :enum_snapshots, :id => 'xxx'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
get :enum_snapshots, :id => 'xxx'
|
||||
response.status.should == 404
|
||||
end
|
||||
end
|
||||
|
||||
describe "#snapshot_details" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
get :snapshot_details, :id => 'xxx' , :sid => 'yyy'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
get :snapshot_details, :id => 'xxx', :sid => 'yyy'
|
||||
response.status.should == 404
|
||||
end
|
||||
end
|
||||
|
||||
describe "#rollback_snapshot" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
put :rollback_snapshot, :id => 'xxx', :sid => 'yyy'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
put :snapshot_details, :id => 'xxx' , :sid => 'yyy'
|
||||
response.status.should == 404
|
||||
end
|
||||
end
|
||||
|
||||
describe "#serialized_url" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
get :serialized_url, :id => 'xxx'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
get :serialized_url, :id => 'xxx'
|
||||
response.status.should == 404
|
||||
end
|
||||
end
|
||||
|
||||
describe "#import_from_url" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
put :import_from_url, :id => 'xxx'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
put_msg :import_from_url, :id => 'xxx' do
|
||||
VCAP::Services::Api::SerializedURL.new(:url => 'http://api.vcap.me')
|
||||
end
|
||||
response.status.should == 404
|
||||
end
|
||||
|
||||
it 'should return bad request for malformed request' do
|
||||
put_msg :import_from_url, :id => 'xxx' do
|
||||
# supply wrong request
|
||||
VCAP::Services::Api::SerializedData.new(:data => "raw_data")
|
||||
end
|
||||
response.status.should == 400
|
||||
end
|
||||
end
|
||||
|
||||
describe "#import_from_data" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
put :import_from_data, :id => 'xxx'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
put_msg :import_from_data, :id => 'xxx' do
|
||||
VCAP::Services::Api::SerializedData.new(:data => 'raw_data')
|
||||
end
|
||||
response.status.should == 404
|
||||
end
|
||||
end
|
||||
|
||||
describe "#job_info" do
|
||||
|
||||
it 'should return not authorized for unknown users' do
|
||||
request.env['HTTP_AUTHORIZATION'] = UserToken.create('bar@foo.com').encode
|
||||
get :job_info, :id => 'xxx', :job_id => 'yyy'
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return not found for unknown ids' do
|
||||
get :job_info, :id => 'xxx' , :job_id => 'yyy'
|
||||
response.status.should == 404
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def start_gateway(svc, shim)
|
||||
|
@ -700,6 +852,12 @@ describe ServicesController do
|
|||
post(*args)
|
||||
end
|
||||
|
||||
def put_msg(*args, &blk)
|
||||
msg = yield
|
||||
request.env['RAW_POST_DATA'] = msg.encode
|
||||
put(*args)
|
||||
end
|
||||
|
||||
def delete_msg(*args, &blk)
|
||||
msg = yield
|
||||
request.env['RAW_POST_DATA'] = msg.encode
|
||||
|
|
|
@ -1,144 +1,148 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe UsersController do
|
||||
before :each do
|
||||
build_admin_and_user
|
||||
@user_headers = headers_for(@user.email, nil)
|
||||
@admin_headers = headers_for(@admin.email, nil)
|
||||
request.env["HTTP_AUTHORIZATION"] = ""
|
||||
end
|
||||
|
||||
describe "#info" do
|
||||
it 'should return an user info as an user requesting for himself' do
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
@user.admin?.should be_false
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => @user.email}
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['email'].should == @user.email
|
||||
json['admin'].should == @user.admin?
|
||||
shared_examples_for "any request to the users controller" do
|
||||
before :each do
|
||||
build_admin_and_user
|
||||
@user_headers = headers_for(@user.email, nil)
|
||||
@admin_headers = headers_for(@admin.email, nil)
|
||||
request.env["HTTP_AUTHORIZATION"] = ""
|
||||
end
|
||||
|
||||
it 'should return an user info as an admin requesting for an existent user' do
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => @user.email}
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['email'].should == @user.email
|
||||
json['admin'].should == @user.admin?
|
||||
end
|
||||
|
||||
it 'should return an error as an admin requesting for a non-existent user' do
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => 'non-existent@example.com'}
|
||||
response.status.should == 403
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['code'].should == 201
|
||||
json['description'].should == 'User not found'
|
||||
end
|
||||
|
||||
it 'should return an error as a user requesting for another user' do
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
@user.admin?.should be_false
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => @admin.email}
|
||||
response.status.should == 403
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['code'].should == 200
|
||||
json['description'].should == 'Operation not permitted'
|
||||
end
|
||||
end
|
||||
|
||||
describe '#create' do
|
||||
it 'should return 403 if the user is not an admin and registration is disabled' do
|
||||
AppConfig[:allow_registration] = false
|
||||
post_with_body :create do
|
||||
{ :email => 'foo@bar.com',
|
||||
:password => 'testpass',
|
||||
}
|
||||
describe "#info" do
|
||||
it 'should return an user info as an user requesting for himself' do
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
@user.admin?.should be_false
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => @user.email}
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['email'].should == @user.email
|
||||
json['admin'].should == @user.admin?
|
||||
end
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should create users if the user is an admin and registration is disabled' do
|
||||
AppConfig[:allow_registration] = false
|
||||
User.find_by_email('foo@bar.com').should be_nil
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
post_with_body :create do
|
||||
{ :email => 'foo@bar.com',
|
||||
:password => 'testpass',
|
||||
}
|
||||
it 'should return an user info as an admin requesting for an existent user' do
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => @user.email}
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['email'].should == @user.email
|
||||
json['admin'].should == @user.admin?
|
||||
end
|
||||
response.status.should == 204
|
||||
User.find_by_email('foo@bar.com').should_not be_nil
|
||||
end
|
||||
|
||||
it 'should create users if the user is not an admin and registration is allowed' do
|
||||
AppConfig[:allow_registration] = true
|
||||
User.find_by_email('foo@bar.com').should be_nil
|
||||
post_with_body :create do
|
||||
{ :email => 'foo@bar.com',
|
||||
:password => 'testpass',
|
||||
}
|
||||
it 'should return an error as an admin requesting for a non-existent user' do
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => 'non-existent@example.com'}
|
||||
response.status.should == 403
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['code'].should == 201
|
||||
json['description'].should == 'User not found'
|
||||
end
|
||||
|
||||
it 'should return an error as a user requesting for another user' do
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
@user.admin?.should be_false
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
get :info, {:email => @admin.email}
|
||||
response.status.should == 403
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Hash)
|
||||
json['code'].should == 200
|
||||
json['description'].should == 'Operation not permitted'
|
||||
end
|
||||
response.status.should == 204
|
||||
User.find_by_email('foo@bar.com').should_not be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe "#list" do
|
||||
it 'should return 200 as an admin' do
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
get :list
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Array)
|
||||
json.count.should >= 2
|
||||
end
|
||||
|
||||
it 'should return 403 as a user' do
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
get :list
|
||||
response.status.should == 403
|
||||
describe '#create' do
|
||||
it 'should return 403 if the user is not an admin and registration is disabled' do
|
||||
AppConfig[:allow_registration] = false
|
||||
post_with_body :create do
|
||||
{ :email => 'foo@bar.com',
|
||||
:password => 'testpass',
|
||||
}
|
||||
end
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should create users if the user is an admin and registration is disabled' do
|
||||
AppConfig[:allow_registration] = false
|
||||
User.find_by_email('foo@bar.com').should be_nil
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
post_with_body :create do
|
||||
{ :email => 'foo@bar.com',
|
||||
:password => 'testpass',
|
||||
}
|
||||
end
|
||||
response.status.should == 204
|
||||
User.find_by_email('foo@bar.com').should_not be_nil
|
||||
end
|
||||
|
||||
it 'should create users if the user is not an admin and registration is allowed' do
|
||||
AppConfig[:allow_registration] = true
|
||||
User.find_by_email('foo@bar.com').should be_nil
|
||||
post_with_body :create do
|
||||
{ :email => 'foo@bar.com',
|
||||
:password => 'testpass',
|
||||
}
|
||||
end
|
||||
response.status.should == 204
|
||||
User.find_by_email('foo@bar.com').should_not be_nil
|
||||
end
|
||||
end
|
||||
|
||||
it 'should return 403 without authentication' do
|
||||
get :list
|
||||
response.status.should == 403
|
||||
end
|
||||
end
|
||||
describe "#list" do
|
||||
it 'should return 200 as an admin' do
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
get :list
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should be_kind_of(Array)
|
||||
json.count.should >= 2
|
||||
end
|
||||
|
||||
describe "#delete" do
|
||||
it 'should return 204 as an admin' do
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
delete :delete, {:email => @user.email}
|
||||
response.status.should == 204
|
||||
User.find_by_email(@user.email).should be_nil
|
||||
User.find_by_email(@admin.email).should_not be_nil
|
||||
it 'should return 403 as a user' do
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
get :list
|
||||
response.status.should == 403
|
||||
end
|
||||
|
||||
it 'should return 403 without authentication' do
|
||||
get :list
|
||||
response.status.should == 403
|
||||
end
|
||||
end
|
||||
|
||||
it 'should return 403 as a user' do
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
delete :delete, {:email => @user.email}
|
||||
response.status.should == 403
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
end
|
||||
describe "#delete" do
|
||||
it 'should return 204 as an admin' do
|
||||
@admin.admin?.should be_true
|
||||
@admin_headers.each {|key, value| request.env[key] = value}
|
||||
delete :delete, {:email => @user.email}
|
||||
response.status.should == 204
|
||||
User.find_by_email(@user.email).should be_nil
|
||||
User.find_by_email(@admin.email).should_not be_nil
|
||||
end
|
||||
|
||||
it 'should return 403 without authentication' do
|
||||
delete :delete, {:email => @user.email}
|
||||
response.status.should == 403
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
it 'should return 403 as a user' do
|
||||
@user_headers.each {|key, value| request.env[key] = value}
|
||||
delete :delete, {:email => @user.email}
|
||||
response.status.should == 403
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
end
|
||||
|
||||
it 'should return 403 without authentication' do
|
||||
delete :delete, {:email => @user.email}
|
||||
response.status.should == 403
|
||||
User.find_by_email(@user.email).should_not be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -147,4 +151,17 @@ describe UsersController do
|
|||
request.env['RAW_POST_DATA'] = Yajl::Encoder.encode(body)
|
||||
post(*args)
|
||||
end
|
||||
|
||||
context "using conventional tokens" do
|
||||
it_should_behave_like "any request to the users controller"
|
||||
end
|
||||
|
||||
context "using jwt tokens" do
|
||||
before :all do
|
||||
CloudSpecHelpers.use_jwt_token = true
|
||||
end
|
||||
|
||||
it_should_behave_like "any request to the users controller"
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -194,14 +194,14 @@ describe AppPackage do
|
|||
|
||||
def create_zip(zip_name, file_count, file_size=1024)
|
||||
total_size = file_count * file_size
|
||||
file_paths = []
|
||||
files = []
|
||||
file_count.times do |ii|
|
||||
tf = Tempfile.new("ziptest_#{ii}")
|
||||
file_paths << tf.path
|
||||
files << tf
|
||||
tf.write("A" * file_size)
|
||||
tf.close
|
||||
end
|
||||
system("zip #{zip_name} #{file_paths.join(' ')}").should be_true
|
||||
system("zip #{zip_name} #{files.map(&:path).join(' ')}").should be_true
|
||||
total_size
|
||||
end
|
||||
end
|
||||
|
|
|
@ -56,6 +56,29 @@ describe App do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#update_staged_package' do
|
||||
let(:app) { App.new }
|
||||
|
||||
before :each do
|
||||
@tmpdir = Dir.mktmpdir
|
||||
AppPackage.stubs(:package_dir).returns(@tmpdir)
|
||||
end
|
||||
|
||||
after :each do
|
||||
FileUtils.rm_rf(@tmpdir)
|
||||
end
|
||||
|
||||
it 'should remove the old package' do
|
||||
old_package = create_test_package(@tmpdir)
|
||||
app.staged_package_hash = old_package[:name]
|
||||
|
||||
new_package = create_test_package(@tmpdir)
|
||||
app.update_staged_package(new_package[:path])
|
||||
|
||||
File.exist?(old_package[:path]).should be_false
|
||||
end
|
||||
end
|
||||
|
||||
def create_user(email, pw)
|
||||
u = User.new(:email => email)
|
||||
u.set_and_encrypt_password(pw)
|
||||
|
@ -63,4 +86,16 @@ describe App do
|
|||
u.should be_valid
|
||||
u
|
||||
end
|
||||
|
||||
def create_test_package(base_dir)
|
||||
name = "test_package#{Time.now.to_f}#{Process.pid}"
|
||||
ret = {
|
||||
:name => name,
|
||||
:path => File.join(base_dir, name),
|
||||
:contents => name,
|
||||
}
|
||||
File.open(ret[:path], 'w+') {|f| f.write(ret[:contents]) }
|
||||
|
||||
ret
|
||||
end
|
||||
end
|
||||
|
|
|
@ -74,8 +74,13 @@ describe Service do
|
|||
@user_b.set_and_encrypt_password('foo')
|
||||
@user_b.should be_valid
|
||||
|
||||
@user_c = User.new(:email => 'c@foo.com')
|
||||
@user_c.set_and_encrypt_password('foo')
|
||||
@user_c.should be_valid
|
||||
|
||||
@svc = make_service(
|
||||
:url => 'http://www.foo.com',
|
||||
:plans => ['plan_a', 'plan_b', 'plan_c'],
|
||||
:label => 'foo-bar',
|
||||
:token => 'foobar'
|
||||
)
|
||||
|
@ -83,6 +88,7 @@ describe Service do
|
|||
|
||||
@user_acl_svc = make_service(
|
||||
:url => 'http://www.foo.com',
|
||||
:plans => ['plan_a', 'plan_b', 'plan_c'],
|
||||
:label => 'foo-bar1',
|
||||
:token => 'foobar',
|
||||
:acls => {'users' => ['a@bar.com'], 'wildcards' => []}
|
||||
|
@ -91,14 +97,43 @@ describe Service do
|
|||
|
||||
@wc_acl_svc = make_service(
|
||||
:url => 'http://www.foo.com',
|
||||
:plans => ['plan_a', 'plan_b', 'plan_c'],
|
||||
:label => 'foo-bar2',
|
||||
:token => 'foobar',
|
||||
:acls => {'users' => [], 'wildcards' => ['*@bar.com']}
|
||||
)
|
||||
@wc_acl_svc.should be_valid
|
||||
|
||||
@p_acl_svc = make_service(
|
||||
:url => 'http://www.foo.com',
|
||||
:plans => ['plan_a', 'plan_b', 'plan_c'],
|
||||
:label => 'foo-bar3',
|
||||
:token => 'foobar',
|
||||
:acls => {
|
||||
'plans' => {
|
||||
'plan_a' => {'wildcards' => ['*@bar.com']}
|
||||
}
|
||||
}
|
||||
)
|
||||
@p_acl_svc.should be_valid
|
||||
|
||||
@combo_acl_svc = make_service(
|
||||
:url => 'http://www.foo.com',
|
||||
:plans => ['plan_a', 'plan_b', 'plan_c'],
|
||||
:label => 'foo-bar4',
|
||||
:token => 'foobar',
|
||||
:acls => {
|
||||
'wildcards' => ['*@bar.com'],
|
||||
'plans' => {
|
||||
'plan_a' => {'users' => ['a@bar.com']}
|
||||
}
|
||||
}
|
||||
)
|
||||
@combo_acl_svc.should be_valid
|
||||
end
|
||||
|
||||
it "should return true for services with no acls" do
|
||||
@svc.visible_to_user?(@user_a, 'plan_a').should be_true
|
||||
@svc.visible_to_user?(@user_a).should be_true
|
||||
end
|
||||
|
||||
|
@ -110,6 +145,27 @@ describe Service do
|
|||
it "should correctly validate users in the wildcard acl" do
|
||||
@wc_acl_svc.visible_to_user?(@user_a).should be_true
|
||||
@wc_acl_svc.visible_to_user?(@user_b).should be_true
|
||||
@wc_acl_svc.visible_to_user?(@user_c).should be_false
|
||||
end
|
||||
|
||||
it "should correctly validate user in the plan acls" do
|
||||
@p_acl_svc.visible_to_user?(@user_a).should be_true # can see plan_a, plan_b, plan_c
|
||||
@p_acl_svc.visible_to_user?(@user_b).should be_true # can see plan_a, plan_b, plan_c
|
||||
@p_acl_svc.visible_to_user?(@user_c).should be_true # can see plan_b, plan_c
|
||||
|
||||
@p_acl_svc.visible_to_user?(@user_a, "plan_a").should be_true
|
||||
@p_acl_svc.visible_to_user?(@user_b, "plan_a").should be_true
|
||||
@p_acl_svc.visible_to_user?(@user_c, "plan_a").should be_false
|
||||
end
|
||||
|
||||
it "should correctly validate user in the service acls and the plan acls" do
|
||||
@combo_acl_svc.visible_to_user?(@user_a).should be_true # can see plan_a, plan_b, plan_c
|
||||
@combo_acl_svc.visible_to_user?(@user_b).should be_true # can see plan_b, plan_c
|
||||
@combo_acl_svc.visible_to_user?(@user_c).should be_false # can not see service
|
||||
|
||||
@combo_acl_svc.visible_to_user?(@user_a, "plan_a").should be_true
|
||||
@combo_acl_svc.visible_to_user?(@user_b, "plan_a").should be_false
|
||||
@combo_acl_svc.visible_to_user?(@user_c, "plan_a").should be_false
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -22,26 +22,55 @@ describe StagingTaskLog do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#fetch' do
|
||||
describe '#fetch_fibered' do
|
||||
before :each do
|
||||
@redis_mock = mock()
|
||||
@deferrable_mock = EM::DefaultDeferrable.new()
|
||||
@deferrable_mock.stubs(:timeout)
|
||||
@redis_mock.expects(:get).with(@task_key).returns(@deferrable_mock)
|
||||
end
|
||||
|
||||
it 'should fetch and decode an existing task result' do
|
||||
redis_mock = mock()
|
||||
redis_mock.expects(:get).with(@task_key).returns(@task_log.task_log)
|
||||
res = StagingTaskLog.fetch(@task_id, redis_mock)
|
||||
res.should be_instance_of(StagingTaskLog)
|
||||
Fiber.new do
|
||||
res = StagingTaskLog.fetch_fibered(@task_id, @redis_mock)
|
||||
res.should be_instance_of(StagingTaskLog)
|
||||
end.resume
|
||||
@deferrable_mock.succeed(@task_log.task_log)
|
||||
end
|
||||
|
||||
it 'should return nil if no key exists' do
|
||||
redis_mock = mock()
|
||||
redis_mock.expects(:get).with(@task_key).returns(nil)
|
||||
res = StagingTaskLog.fetch(@task_id, redis_mock)
|
||||
res.should be_nil
|
||||
Fiber.new do
|
||||
res = StagingTaskLog.fetch_fibered(@task_id, @redis_mock)
|
||||
res.should be_nil
|
||||
end.resume
|
||||
@deferrable_mock.succeed(nil)
|
||||
end
|
||||
|
||||
it 'should use the static instance of redis if none is provided' do
|
||||
redis_mock = mock()
|
||||
redis_mock.expects(:get).with(@task_key).returns(nil)
|
||||
StagingTaskLog.redis = redis_mock
|
||||
res = StagingTaskLog.fetch(@task_id, redis_mock)
|
||||
Fiber.new do
|
||||
StagingTaskLog.redis = @redis_mock
|
||||
res = StagingTaskLog.fetch_fibered(@task_id)
|
||||
end.resume
|
||||
@deferrable_mock.succeed(nil)
|
||||
end
|
||||
|
||||
it 'should raise TimeoutError when timed out fetching result' do
|
||||
Fiber.new do
|
||||
expect do
|
||||
res = StagingTaskLog.fetch_fibered(@task_id, @redis_mock)
|
||||
end.to raise_error(VCAP::Stager::TaskError)
|
||||
end.resume
|
||||
@deferrable_mock.fail(nil)
|
||||
end
|
||||
|
||||
it 'should raise error when redis fetching fails' do
|
||||
Fiber.new do
|
||||
expect do
|
||||
res = StagingTaskLog.fetch_fibered(@task_id, @redis_mock)
|
||||
end.to raise_error
|
||||
end.resume
|
||||
@deferrable_mock.fail(RuntimeError.new("Mock Runtime Error from EM::Hiredis"))
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -1,17 +1,33 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe "Creating a new App" do
|
||||
before do
|
||||
build_admin_and_user
|
||||
|
||||
shared_examples_for "any request to create a new app" do
|
||||
before do
|
||||
build_admin_and_user
|
||||
end
|
||||
|
||||
it "is successful when given a unique name" do
|
||||
data = { 'name' => random_name, 'staging' => {'model' => 'sinatra', 'stack' => 'ruby18' }}
|
||||
lambda do
|
||||
post app_create_path, nil, headers_for(@user.email, nil, data)
|
||||
response.should redirect_to(app_get_url(data['name']))
|
||||
end.should change(App, :count).by(1)
|
||||
end
|
||||
|
||||
it "fails when given a duplicate name"
|
||||
end
|
||||
|
||||
it "is successful when given a unique name" do
|
||||
data = { 'name' => random_name, 'staging' => {'model' => 'sinatra', 'stack' => 'ruby18' }}
|
||||
lambda do
|
||||
post app_create_path, nil, headers_for(@user.email, nil, data)
|
||||
response.should redirect_to(app_get_url(data['name']))
|
||||
end.should change(App, :count).by(1)
|
||||
context "using conventional tokens" do
|
||||
it_should_behave_like "any request to create a new app"
|
||||
end
|
||||
|
||||
context "using jwt tokens" do
|
||||
before :all do
|
||||
CloudSpecHelpers.use_jwt_token = true
|
||||
end
|
||||
|
||||
it_should_behave_like "any request to create a new app"
|
||||
end
|
||||
|
||||
it "fails when given a duplicate name"
|
||||
end
|
||||
|
|
|
@ -0,0 +1,201 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe "bulk_api" do
|
||||
|
||||
shared_examples_for "any request to the bulk api" do
|
||||
before :all do
|
||||
build_admin_and_user
|
||||
make_a_bunch_of_apps
|
||||
@bulk_user = AppConfig[:bulk_api][:auth][:user]
|
||||
@bulk_password = AppConfig[:bulk_api][:auth][:password]
|
||||
|
||||
@auth_header = {"HTTP_AUTHORIZATION" => ActionController::HttpAuthentication::Basic.encode_credentials(@bulk_user, @bulk_password) }
|
||||
end
|
||||
|
||||
after :all do
|
||||
App.delete_all
|
||||
User.delete_all
|
||||
end
|
||||
|
||||
describe "credential discovery" do
|
||||
it 'should see credentials in AppConfig' do
|
||||
@bulk_password.should_not be_nil
|
||||
@bulk_password.size.should > 20
|
||||
end
|
||||
|
||||
pending 'should be able to discover credentials through NATS' do
|
||||
#TODO: this stuff should be moved to functional tests, when the NATS is up
|
||||
EM.run do
|
||||
EM.add_timer(5) do
|
||||
EM.stop
|
||||
fail 'failed to complete within the timeout'
|
||||
end
|
||||
|
||||
NATS.request('cloudcontroller.bulk.credentials') do |response|
|
||||
@password = response
|
||||
EM.stop
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'bulk#users' do
|
||||
|
||||
before :all do
|
||||
make_a_bunch_of_users(200)
|
||||
end
|
||||
|
||||
it 'requires authentication' do
|
||||
get bulk_users_url
|
||||
response.status.should == 401
|
||||
end
|
||||
|
||||
it 'accepts request without parameters' do
|
||||
get_users
|
||||
response.status.should == 200
|
||||
results.size.should > 1
|
||||
end
|
||||
|
||||
it 'returns batches according to the token supplied' do
|
||||
get_users :batch_size => 50
|
||||
results.size.should == 50
|
||||
token.should_not be_nil
|
||||
|
||||
saved_results = results
|
||||
|
||||
get_users({:bulk_token => token, :batch_size => 100})
|
||||
|
||||
results.size.should == 100
|
||||
|
||||
Hash.should === saved_results
|
||||
Hash.should === results
|
||||
|
||||
saved_results.merge(results).size.should == 150 #no intersection
|
||||
|
||||
get_users({:bulk_token => token, :batch_size => 100})
|
||||
results.size.should == 52 #all remaining users returned, for the total of 202 created
|
||||
end
|
||||
|
||||
it "doesn't allow dangerous manipulation of the token" do
|
||||
get_users :batch_size => 50
|
||||
results.size.should == 50
|
||||
token.should_not be_nil
|
||||
|
||||
tampered_token = token
|
||||
|
||||
Hash.should === tampered_token
|
||||
tampered_token['foo foo'] = 42
|
||||
get_users :bulk_token => tampered_token
|
||||
|
||||
response.status.should == 400
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe 'bulk#apps' do
|
||||
it 'requires authentication' do
|
||||
get bulk_apps_url
|
||||
response.status.should == 401
|
||||
end
|
||||
|
||||
it "accepts request without token" do
|
||||
#this is a helper method that include authorization header
|
||||
get_apps
|
||||
response.status.should == 200
|
||||
end
|
||||
|
||||
it "returns bulk_token with the intial request" do
|
||||
get_apps
|
||||
token.should_not be_nil
|
||||
end
|
||||
|
||||
it "returns results in the response.body" do
|
||||
get_apps
|
||||
results.should_not be_nil
|
||||
end
|
||||
|
||||
it "respects the batch_size parameter" do
|
||||
[3,5].each { |size|
|
||||
get_apps :batch_size=>size
|
||||
results.size.should == size
|
||||
}
|
||||
end
|
||||
|
||||
it "returns non-intersecting results when token is supplied" do
|
||||
size = 2
|
||||
get_apps :batch_size => size
|
||||
saved_results = results
|
||||
saved_results.size.should == size
|
||||
|
||||
get_apps({:bulk_token => token, :batch_size=>size})
|
||||
results.size.should == size
|
||||
saved_results.each {|saved_result| results.should_not include(saved_result) }
|
||||
end
|
||||
|
||||
it "should eventually return entire collection, batch after batch" do
|
||||
|
||||
args = {:batch_size => 2}
|
||||
apps = {}
|
||||
|
||||
total_size = App.count
|
||||
|
||||
while apps.size < total_size do
|
||||
get_apps(args)
|
||||
apps.merge! results
|
||||
args[:bulk_token] = token
|
||||
end
|
||||
|
||||
apps.size.should == total_size
|
||||
get_apps(args)
|
||||
results.size.should == 0
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
context "using conventional tokens" do
|
||||
it_should_behave_like "any request to the bulk api"
|
||||
end
|
||||
|
||||
context "using jwt tokens" do
|
||||
before :all do
|
||||
CloudSpecHelpers.use_jwt_token = true
|
||||
end
|
||||
|
||||
it_should_behave_like "any request to the bulk api"
|
||||
end
|
||||
|
||||
def get_users(args=nil)
|
||||
get(bulk_users_url, args, @auth_header)
|
||||
end
|
||||
|
||||
def get_apps(args=nil)
|
||||
get(bulk_apps_url, args, @auth_header)
|
||||
end
|
||||
|
||||
def token
|
||||
body['bulk_token']
|
||||
end
|
||||
|
||||
def results
|
||||
body['results']
|
||||
end
|
||||
|
||||
def body
|
||||
Yajl::Parser.parse(response.body)
|
||||
end
|
||||
|
||||
def make_a_bunch_of_apps(n=10)
|
||||
n.times do
|
||||
data = { 'name' => random_name, 'staging' => {'model' => 'sinatra', 'stack' => 'ruby18' }}
|
||||
lambda do
|
||||
post app_create_path, nil, headers_for(@user.email, nil, data)
|
||||
response.should redirect_to(app_get_url(data['name']))
|
||||
end.should change(App, :count).by(1)
|
||||
end
|
||||
end
|
||||
|
||||
def make_a_bunch_of_users(n=100)
|
||||
n.times { build_user("#{random_name}@example.com") }
|
||||
end
|
||||
end
|
|
@ -10,48 +10,64 @@ describe "A GET request to /info" do
|
|||
response.status.should == 200
|
||||
end
|
||||
|
||||
# This code tests https enforcement in a variety of scenarions defined in cloud_spec_helpers
|
||||
CloudSpecHelpers::HTTPS_ENFORCEMENT_SCENARIOS.each do |scenario_vars|
|
||||
describe "#{scenario_vars[:appconfig_enabled].empty? ? '' : 'with ' + (scenario_vars[:appconfig_enabled].map{|x| x.to_s}.join(', ')) + ' enabled'} using #{scenario_vars[:protocol]}" do
|
||||
before do
|
||||
# Back to defaults (false)
|
||||
AppConfig[:https_required] = false
|
||||
AppConfig[:https_required_for_admins] = false
|
||||
shared_examples_for "any request" do
|
||||
# This code tests https enforcement in a variety of scenarions defined in cloud_spec_helpers
|
||||
CloudSpecHelpers::HTTPS_ENFORCEMENT_SCENARIOS.each do |scenario_vars|
|
||||
describe "#{scenario_vars[:appconfig_enabled].empty? ? '' : 'with ' + (scenario_vars[:appconfig_enabled].map{|x| x.to_s}.join(', ')) + ' enabled'} using #{scenario_vars[:protocol]}" do
|
||||
before do
|
||||
# Back to defaults (false)
|
||||
AppConfig[:https_required] = false
|
||||
AppConfig[:https_required_for_admins] = false
|
||||
|
||||
scenario_vars[:appconfig_enabled].each do |v|
|
||||
AppConfig[v] = true
|
||||
scenario_vars[:appconfig_enabled].each do |v|
|
||||
AppConfig[v] = true
|
||||
end
|
||||
|
||||
@current_user = instance_variable_get("@#{scenario_vars[:user]}")
|
||||
@current_headers = headers_for(@current_user, nil, nil, (scenario_vars[:protocol]=="https"))
|
||||
end
|
||||
|
||||
@current_user = instance_variable_get("@#{scenario_vars[:user]}")
|
||||
@current_headers = headers_for(@current_user, nil, nil, (scenario_vars[:protocol]=="https"))
|
||||
end
|
||||
after do
|
||||
# Back to defaults (false)
|
||||
AppConfig[:https_required] = false
|
||||
AppConfig[:https_required_for_admins] = false
|
||||
end
|
||||
|
||||
after do
|
||||
# Back to defaults (false)
|
||||
AppConfig[:https_required] = false
|
||||
AppConfig[:https_required_for_admins] = false
|
||||
end
|
||||
|
||||
# These should work in EVERY config scenario
|
||||
it "with invalid authorization header for #{scenario_vars[:user]}" do
|
||||
headers = @current_headers
|
||||
headers['HTTP_AUTHORIZATION'].reverse!
|
||||
get cloud_info_url, nil, headers
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should_not have_key('user')
|
||||
end
|
||||
|
||||
|
||||
it "with a valid authorization header for #{scenario_vars[:user]}" do
|
||||
get cloud_info_url, nil, @current_headers
|
||||
response.status.should == (scenario_vars[:success] ? 200 : 403)
|
||||
if scenario_vars[:success]
|
||||
# These should work in EVERY config scenario
|
||||
it "with invalid authorization header for #{scenario_vars[:user]}" do
|
||||
headers = @current_headers
|
||||
headers['HTTP_AUTHORIZATION'].reverse!
|
||||
get cloud_info_url, nil, headers
|
||||
response.status.should == 200
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should have_key('user')
|
||||
json.should_not have_key('user')
|
||||
end
|
||||
|
||||
|
||||
it "with a valid authorization header for #{scenario_vars[:user]}" do
|
||||
get cloud_info_url, nil, @current_headers
|
||||
response.status.should == (scenario_vars[:success] ? 200 : 403)
|
||||
if scenario_vars[:success]
|
||||
json = Yajl::Parser.parse(response.body)
|
||||
json.should have_key('user')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "using conventional tokens" do
|
||||
it_should_behave_like "any request"
|
||||
end
|
||||
|
||||
context "using jwt tokens" do
|
||||
before :all do
|
||||
CloudSpecHelpers.use_jwt_token = true
|
||||
end
|
||||
|
||||
it_should_behave_like "any request"
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -1,29 +1,45 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe "Specifying a proxy user" do
|
||||
|
||||
before do
|
||||
build_admin_and_user
|
||||
end
|
||||
|
||||
describe "as an authorized admin" do
|
||||
it "performs the request as that user" do
|
||||
get cloud_info_url, nil, headers_for(@admin, @user)
|
||||
response.status.should == 200
|
||||
Yajl::Parser.parse(response.body)['user'].should == @user.email
|
||||
shared_examples_for "any request to test a proxy user" do
|
||||
describe "as an authorized admin" do
|
||||
it "performs the request as that user" do
|
||||
get cloud_info_url, nil, headers_for(@admin, @user)
|
||||
response.status.should == 200
|
||||
Yajl::Parser.parse(response.body)['user'].should == @user.email
|
||||
end
|
||||
end
|
||||
|
||||
describe "as a regular user" do
|
||||
it "responds with a 403 status" do
|
||||
get cloud_info_url, nil, headers_for(@user, @admin)
|
||||
response.status.should == 403
|
||||
end
|
||||
end
|
||||
|
||||
describe "as an anonymous badguy" do
|
||||
it "responds with a 403 status" do
|
||||
get cloud_info_url, nil, headers_for(nil, @user)
|
||||
response.status.should == 403
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "as a regular user" do
|
||||
it "responds with a 403 status" do
|
||||
get cloud_info_url, nil, headers_for(@user, @admin)
|
||||
response.status.should == 403
|
||||
end
|
||||
context "using conventional tokens" do
|
||||
it_should_behave_like "any request to test a proxy user"
|
||||
end
|
||||
|
||||
describe "as an anonymous badguy" do
|
||||
it "responds with a 403 status" do
|
||||
get cloud_info_url, nil, headers_for(nil, @user)
|
||||
response.status.should == 403
|
||||
context "using jwt tokens" do
|
||||
before :all do
|
||||
CloudSpecHelpers.use_jwt_token = true
|
||||
end
|
||||
|
||||
it_should_behave_like "any request to test a proxy user"
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe "Requesting a new user token" do
|
||||
|
||||
before do
|
||||
build_admin_and_user
|
||||
|
||||
|
@ -8,15 +9,29 @@ describe "Requesting a new user token" do
|
|||
@user_token_path = create_token_path('email' => @user.email)
|
||||
end
|
||||
|
||||
it "returns a 400 response when given invalid JSON" do
|
||||
bad_data = '{{{}}}'
|
||||
post @admin_token_path, nil, headers_for(@admin.email, nil, bad_data)
|
||||
response.status.should == 400
|
||||
shared_examples_for "any request for a new user token" do
|
||||
it "returns a 400 response when given invalid JSON" do
|
||||
bad_data = '{{{}}}'
|
||||
post @admin_token_path, nil, headers_for(@admin.email, nil, bad_data)
|
||||
response.status.should == 400
|
||||
end
|
||||
|
||||
it "always returns a 200 response when admin requests" do
|
||||
post @user_token_path, nil, headers_for(@admin.email, nil, '{}')
|
||||
response.status.should == 200
|
||||
end
|
||||
end
|
||||
|
||||
it "always returns a 200 response when admin requests" do
|
||||
post @user_token_path, nil, headers_for(@admin.email, nil, '{}')
|
||||
response.status.should == 200
|
||||
context "using conventional tokens" do
|
||||
it_should_behave_like "any request for a new user token"
|
||||
end
|
||||
|
||||
context "using jwt tokens" do
|
||||
before :all do
|
||||
CloudSpecHelpers.use_jwt_token = true
|
||||
end
|
||||
|
||||
it_should_behave_like "any request for a new user token"
|
||||
end
|
||||
|
||||
context "When user_expire is specified" do
|
||||
|
|
|
@ -17,6 +17,16 @@ module CloudSpecHelpers
|
|||
{:protocol => "https", :appconfig_enabled => [:https_required_for_admins], :user => "user", :success => true},
|
||||
{:protocol => "https", :appconfig_enabled => [:https_required_for_admins], :user => "admin", :success => true}]
|
||||
|
||||
@@use_jwt_token = false
|
||||
|
||||
def self.use_jwt_token
|
||||
@@use_jwt_token
|
||||
end
|
||||
|
||||
def self.use_jwt_token=(use_jwt_token)
|
||||
@@use_jwt_token = use_jwt_token
|
||||
end
|
||||
|
||||
# Generate a handy header Hash.
|
||||
# At minimum it requires a User or email as the first argument.
|
||||
# Optionally, you can pass a second User or email which will be the 'proxy user'.
|
||||
|
@ -26,7 +36,17 @@ module CloudSpecHelpers
|
|||
headers = {}
|
||||
if user
|
||||
email = User === user ? user.email : user.to_s
|
||||
headers['HTTP_AUTHORIZATION'] = UserToken.create(email).encode
|
||||
if @@use_jwt_token
|
||||
token_body = {"resource_ids" => ["cloud_controller"], "foo" => "bar", "email" => email}
|
||||
token_coder = Cloudfoundry::Uaa::TokenCoder.new(AppConfig[:uaa][:resource_id],
|
||||
AppConfig[:uaa][:token_secret])
|
||||
token = token_coder.encode(token_body)
|
||||
AppConfig[:uaa][:enabled] = true
|
||||
headers['HTTP_AUTHORIZATION'] = "bearer #{token}"
|
||||
else
|
||||
AppConfig[:uaa][:enabled] = false
|
||||
headers['HTTP_AUTHORIZATION'] = UserToken.create(email).encode
|
||||
end
|
||||
end
|
||||
if proxy_user
|
||||
proxy_email = User === proxy_user ? proxy_user.email : proxy_user.to_s
|
||||
|
@ -63,6 +83,6 @@ module CloudSpecHelpers
|
|||
# Only 'hard-code' names in the specs that are meaningful.
|
||||
# If the name doesn't matter in real life, use a random one to indicate that.
|
||||
def random_name(length = 7)
|
||||
Digest::SHA1.hexdigest("#{Time.now}-#{rand(1_000)}").slice(0,length)
|
||||
Digest::SHA1.hexdigest("#{Time.now.nsec}-#{rand(1_000_000)}").slice(0,length)
|
||||
end
|
||||
end
|
||||
|
|
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичные данные
cloud_controller/vendor/cache/vcap_stager-0.1.8.gem → cloud_controller/vendor/cache/vcap_stager-0.1.14.gem
поставляемый
Двоичные данные
cloud_controller/vendor/cache/vcap_stager-0.1.8.gem → cloud_controller/vendor/cache/vcap_stager-0.1.14.gem
поставляемый
Двоичный файл не отображается.
Двоичные данные
cloud_controller/vendor/cache/vcap_staging-0.1.32.gem → cloud_controller/vendor/cache/vcap_staging-0.1.50.gem
поставляемый
Двоичные данные
cloud_controller/vendor/cache/vcap_staging-0.1.32.gem → cloud_controller/vendor/cache/vcap_staging-0.1.50.gem
поставляемый
Двоичный файл не отображается.
|
@ -1,10 +1,9 @@
|
|||
PATH
|
||||
remote: .
|
||||
specs:
|
||||
vcap_common (1.0.3)
|
||||
vcap_common (1.0.10)
|
||||
eventmachine (~> 0.12.11.cloudfoundry.3)
|
||||
logging (>= 1.5.0)
|
||||
nats (~> 0.4.22.beta.4)
|
||||
nats (~> 0.4.22.beta.8)
|
||||
posix-spawn (~> 0.3.6)
|
||||
thin (~> 1.3.1)
|
||||
yajl-ruby (~> 0.8.3)
|
||||
|
@ -13,7 +12,7 @@ GEM
|
|||
remote: http://rubygems.org/
|
||||
specs:
|
||||
addressable (2.2.6)
|
||||
daemons (1.1.5)
|
||||
daemons (1.1.8)
|
||||
diff-lcs (1.1.3)
|
||||
em-http-request (0.3.0)
|
||||
addressable (>= 2.0.0)
|
||||
|
@ -21,17 +20,14 @@ GEM
|
|||
eventmachine (>= 0.12.9)
|
||||
escape_utils (0.2.4)
|
||||
eventmachine (0.12.11.cloudfoundry.3)
|
||||
json_pure (1.6.4)
|
||||
little-plugger (1.1.3)
|
||||
logging (1.6.1)
|
||||
little-plugger (>= 1.1.2)
|
||||
nats (0.4.22.beta.4)
|
||||
json_pure (1.6.5)
|
||||
nats (0.4.22.beta.8)
|
||||
daemons (>= 1.1.4)
|
||||
eventmachine (>= 0.12.10)
|
||||
json_pure (>= 1.6.1)
|
||||
thin (>= 1.3.1)
|
||||
posix-spawn (0.3.6)
|
||||
rack (1.4.0)
|
||||
rack (1.4.1)
|
||||
rake (0.9.2.2)
|
||||
rspec (2.7.0)
|
||||
rspec-core (~> 2.7.0)
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
require 'bundler'
|
||||
require 'fileutils'
|
||||
require 'logger'
|
||||
require 'rubygems'
|
||||
|
||||
# Provides functionality similar to `bundle package`, but without needing
|
||||
# to install any gems.
|
||||
|
||||
unless ARGV.length == 3
|
||||
puts "Usage: fetch_gems [/path/to/Gemfile] [/path/to/Gemfile.lock] [/path/to/store]"
|
||||
exit 1
|
||||
end
|
||||
|
||||
gemfile_path, lockfile_path, gem_store_dir = ARGV.map {|path| File.expand_path(path) }
|
||||
|
||||
ENV['BUNDLE_GEMFILE'] = gemfile_path
|
||||
|
||||
lockfile_contents = File.read(lockfile_path)
|
||||
parser = Bundler::LockfileParser.new(lockfile_contents)
|
||||
|
||||
logger = Logger.new(STDOUT)
|
||||
|
||||
if parser.specs.empty?
|
||||
logger.info("No gems found")
|
||||
exit 0
|
||||
end
|
||||
|
||||
FileUtils.mkdir_p(gem_store_dir)
|
||||
|
||||
to_fetch = []
|
||||
parser.specs.each do |spec|
|
||||
gem_basename = "#{spec.name}-#{spec.version}.gem"
|
||||
dst_path = File.join(gem_store_dir, gem_basename)
|
||||
if spec.source.kind_of?(Bundler::Source::Path) && (spec.source.path.to_s == '.')
|
||||
logger.info("Skipping '#{gem_basename}', Gemfile.lock appears to belong to it")
|
||||
elsif File.exist?(dst_path)
|
||||
logger.info("Skipping '#{gem_basename}', found in '#{gem_store_dir}'")
|
||||
else
|
||||
logger.info("Need to download '#{gem_basename}'")
|
||||
to_fetch << gem_basename
|
||||
end
|
||||
end
|
||||
|
||||
unless to_fetch.empty?
|
||||
urls = to_fetch.map do |gem_basename|
|
||||
"http://production.s3.rubygems.org/gems/#{gem_basename}"
|
||||
end.join(' ')
|
||||
|
||||
cmd = "wget --quiet --retry-connrefused --connect-timeout=5"
|
||||
cmd += " --no-check-certificate --directory-prefix #{gem_store_dir} #{urls}"
|
||||
|
||||
logger.info("Fetching #{to_fetch.join(', ')} from rubygems")
|
||||
logger.info("Executing '#{cmd}'")
|
||||
|
||||
unless system(cmd)
|
||||
logger.error("#{cmd} failed with status #{$?}!")
|
||||
exit $?.exitstatus
|
||||
end
|
||||
end
|
||||
|
||||
logger.info("Done")
|
|
@ -1,5 +1,10 @@
|
|||
#!/usr/bin/env ruby
|
||||
$:.unshift(File.join(File.dirname(__FILE__), '../lib/vcap/user_pools'))
|
||||
|
||||
ENV["BUNDLE_GEMFILE"] ||= File.join(File.dirname(__FILE__), '../Gemfile')
|
||||
require 'bundler'
|
||||
Bundler.setup
|
||||
|
||||
require 'optparse'
|
||||
require 'user_pool_util'
|
||||
|
||||
|
|
|
@ -43,6 +43,46 @@ class VCAP::Services::Api::ServiceGatewayClient
|
|||
perform_request(Net::HTTP::Delete, "/gateway/v1/configurations/#{args[:service_id]}")
|
||||
end
|
||||
|
||||
def create_snapshot(args)
|
||||
resp = perform_request(Net::HTTP::Post, "/gateway/v1/configurations/#{args[:service_id]}/snapshots")
|
||||
VCAP::Services::Api::Job.decode(resp.body)
|
||||
end
|
||||
|
||||
def enum_snapshots(args)
|
||||
resp = perform_request(Net::HTTP::Get, "/gateway/v1/configurations/#{args[:service_id]}/snapshots")
|
||||
VCAP::Services::Api::SnapshotList.decode(resp.body)
|
||||
end
|
||||
|
||||
def snapshot_details(args)
|
||||
resp = perform_request(Net::HTTP::Get, "/gateway/v1/configurations/#{args[:service_id]}/snapshots/#{args[:snapshot_id]}")
|
||||
VCAP::Services::Api::Snapshot.decode(resp.body)
|
||||
end
|
||||
|
||||
def rollback_snapshot(args)
|
||||
resp = perform_request(Net::HTTP::Put, "/gateway/v1/configurations/#{args[:service_id]}/snapshots/#{args[:snapshot_id]}")
|
||||
VCAP::Services::Api::Job.decode(resp.body)
|
||||
end
|
||||
|
||||
def serialized_url(args)
|
||||
resp = perform_request(Net::HTTP::Get, "/gateway/v1/configurations/#{args[:service_id]}/serialized/url")
|
||||
VCAP::Services::Api::Job.decode(resp.body)
|
||||
end
|
||||
|
||||
def import_from_url(args)
|
||||
resp = perform_request(Net::HTTP::Put, "/gateway/v1/configurations/#{args[:service_id]}/serialized/url", args[:msg])
|
||||
VCAP::Services::Api::Job.decode(resp.body)
|
||||
end
|
||||
|
||||
def import_from_data(args)
|
||||
resp = perform_request(Net::HTTP::Put, "/gateway/v1/configurations/#{args[:service_id]}/serialized/data", args[:msg])
|
||||
VCAP::Services::Api::Job.decode(resp.body)
|
||||
end
|
||||
|
||||
def job_info(args)
|
||||
resp = perform_request(Net::HTTP::Get, "/gateway/v1/configurations/#{args[:service_id]}/jobs/#{args[:job_id]}")
|
||||
VCAP::Services::Api::Job.decode(resp.body)
|
||||
end
|
||||
|
||||
def bind(args)
|
||||
msg = VCAP::Services::Api::GatewayBindRequest.new(args)
|
||||
resp = perform_request(Net::HTTP::Post, "/gateway/v1/configurations/#{msg.service_id}/handles", msg)
|
|
@ -26,15 +26,14 @@ module VCAP
|
|||
optional :plans, [String]
|
||||
optional :plan_options
|
||||
optional :binding_options
|
||||
optional :acls, {'users' => [String], 'wildcards' => [String]}
|
||||
optional :acls
|
||||
optional :active
|
||||
optional :timeout, Integer
|
||||
end
|
||||
|
||||
class BrokeredServiceOfferingRequest < JsonMessage
|
||||
required :label, SERVICE_LABEL_REGEX
|
||||
required :options, [{"name" => String, "acls" => {"users" => [String], "wildcards" => [String] } , "credentials" => Hash}]
|
||||
#required :options, [::JsonSchema::WILDCARD]
|
||||
required :options, [{"name" => String, "credentials" => Hash}]
|
||||
optional :description, String
|
||||
end
|
||||
|
||||
|
@ -123,6 +122,32 @@ module VCAP
|
|||
required :binding_options
|
||||
end
|
||||
|
||||
class Snapshot < JsonMessage
|
||||
required :snapshot_id, String
|
||||
required :date, String
|
||||
required :size, Integer
|
||||
end
|
||||
|
||||
class SnapshotList < JsonMessage
|
||||
required :snapshots, [::JsonSchema::WILDCARD]
|
||||
end
|
||||
|
||||
class Job < JsonMessage
|
||||
required :job_id, String
|
||||
required :status, String
|
||||
required :start_time, String
|
||||
optional :description, String
|
||||
optional :complete_time, String
|
||||
optional :result, ::JsonSchema::WILDCARD
|
||||
end
|
||||
|
||||
class SerializedURL < JsonMessage
|
||||
required :url, URI::regexp(%w(http https))
|
||||
end
|
||||
|
||||
class SerializedData < JsonMessage
|
||||
required :data, String
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,6 +1,5 @@
|
|||
# Copyright (c) 2009-2011 VMware, Inc.
|
||||
require 'fileutils'
|
||||
require 'logging'
|
||||
require 'socket'
|
||||
|
||||
# VMware's Cloud Application Platform
|
||||
|
@ -122,27 +121,6 @@ module VCAP
|
|||
end
|
||||
end
|
||||
|
||||
# Helper for creating logs based on common config options. Returns a logger appending to standard out
|
||||
# by default. This is somewhat kitchen-sink, is better than duplicating this code in each component.
|
||||
#
|
||||
# @param String name Unique ID for this logger
|
||||
# @param Hash opts :log_file => Filename where log messages should go
|
||||
# :log_rotation_interval => If supplied, rotate logs on this interval
|
||||
def self.create_logger(name, opts={})
|
||||
log = ::Logging.logger[name]
|
||||
layout = ::Logging.layouts.pattern(:pattern => '[%.1l, [%d] %5l -- %c: %m\n')
|
||||
appender = ::Logging.appenders.stdout(:layout => layout)
|
||||
if opts[:log_file]
|
||||
if opts[:log_rotation_interval]
|
||||
appender = ::Logging.appenders.rolling_file(opts[:log_file], :age => opts[:log_rotation_interval], :layout => layout)
|
||||
else
|
||||
appender = ::Logging.appenders.file(opts[:log_file], :layout => layout)
|
||||
end
|
||||
end
|
||||
log.appenders = [appender]
|
||||
log
|
||||
end
|
||||
|
||||
# Helper class to atomically create/update pidfiles and ensure that only one instance of a given
|
||||
# process is running at all times.
|
||||
#
|
||||
|
|
|
@ -12,16 +12,32 @@ module VCAP
|
|||
RACK_TEXT_HDR = { 'Content-Type' => 'text/plaintext' }
|
||||
|
||||
class Varz
|
||||
def initialize(logger)
|
||||
@logger = logger
|
||||
end
|
||||
|
||||
def call(env)
|
||||
@logger.debug "varz access"
|
||||
varz = Yajl::Encoder.encode(Component.updated_varz, :pretty => true, :terminator => "\n")
|
||||
[200, { 'Content-Type' => 'application/json' }, varz]
|
||||
[200, { 'Content-Type' => 'application/json', 'Content-Length' => varz.length.to_s }, varz]
|
||||
rescue => e
|
||||
@logger.error "varz error #{e.inspect} #{e.backtrace.join("\n")}"
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
class Healthz
|
||||
def initialize(logger)
|
||||
@logger = logger
|
||||
end
|
||||
|
||||
def call(env)
|
||||
@logger.debug "healthz access"
|
||||
healthz = Component.updated_healthz
|
||||
[200, { 'Content-Type' => 'application/json' }, healthz]
|
||||
[200, { 'Content-Type' => 'application/json', 'Content-Length' => healthz.length.to_s }, healthz]
|
||||
rescue => e
|
||||
@logger.error "healthz error #{e.inspect} #{e.backtrace.join("\n")}"
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -62,17 +78,17 @@ module VCAP
|
|||
healthz
|
||||
end
|
||||
|
||||
def start_http_server(host, port, auth)
|
||||
def start_http_server(host, port, auth, logger)
|
||||
http_server = Thin::Server.new(host, port, :signals => false) do
|
||||
Thin::Logging.silent = true
|
||||
use Rack::Auth::Basic do |username, password|
|
||||
[username, password] == auth
|
||||
end
|
||||
map '/healthz' do
|
||||
run Healthz.new
|
||||
run Healthz.new(logger)
|
||||
end
|
||||
map '/varz' do
|
||||
run Varz.new
|
||||
run Varz.new(logger)
|
||||
end
|
||||
end
|
||||
http_server.start!
|
||||
|
@ -91,6 +107,7 @@ module VCAP
|
|||
port = opts[:port] || VCAP.grab_ephemeral_port
|
||||
nats = opts[:nats] || NATS
|
||||
auth = [opts[:user] || VCAP.secure_uuid, opts[:password] || VCAP.secure_uuid]
|
||||
logger = opts[:logger] || Logger.new(nil)
|
||||
|
||||
# Discover message limited
|
||||
@discover = {
|
||||
|
@ -113,7 +130,7 @@ module VCAP
|
|||
raise "EventMachine reactor needs to be running" if !EventMachine.reactor_running?
|
||||
|
||||
# Startup the http endpoint for /varz and /healthz
|
||||
start_http_server(host, port, auth)
|
||||
start_http_server(host, port, auth, logger)
|
||||
|
||||
# Listen for discovery requests
|
||||
nats.subscribe('vcap.component.discover') do |msg, reply|
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
require 'logger'
|
||||
require 'fiber'
|
||||
require 'fileutils'
|
||||
|
||||
module VCAP end
|
||||
module VCAP::EMRun
|
||||
class << self
|
||||
CLOSE_FDS_PATH = File.expand_path("../close_fds", __FILE__)
|
||||
|
||||
def init(logger = nil)
|
||||
@logger = logger || Logger.new(STDOUT)
|
||||
end
|
||||
|
||||
def run_restricted(run_dir, user, base_cmd)
|
||||
user_name = user[:user_name]
|
||||
uid = user[:uid]
|
||||
|
||||
close_fds_dst = File.join(run_dir, 'close_fds')
|
||||
FileUtils.cp(CLOSE_FDS_PATH, close_fds_dst)
|
||||
FileUtils.chmod(0500, close_fds_dst)
|
||||
FileUtils.chown(uid, nil, close_fds_dst)
|
||||
|
||||
#XXX resource limits would be nice.
|
||||
|
||||
run_action = proc do |process|
|
||||
process.send_data("cd #{run_dir}\n")
|
||||
process.send_data("ruby ./close_fds true #{base_cmd} 2>&1\n")
|
||||
process.send_data("exit\n")
|
||||
end
|
||||
|
||||
f = Fiber.current
|
||||
exit_action = proc do |output, status|
|
||||
if status.exitstatus != 0
|
||||
@logger.debug("EM.system failed with: #{output}")
|
||||
else
|
||||
@logger.debug("completed with: #{output}")
|
||||
end
|
||||
f.resume([output, status.exitstatus])
|
||||
end
|
||||
|
||||
sh_command = "env -i su -s /bin/sh #{user_name}"
|
||||
EM.system(sh_command, run_action, exit_action)
|
||||
Fiber.yield
|
||||
end
|
||||
|
||||
def run(cmd, expected_exit_status = 0)
|
||||
f = Fiber.current
|
||||
EM.system("#{cmd} 2>&1") { |output, status|
|
||||
if status.exitstatus != expected_exit_status
|
||||
@logger.error("run (#{cmd}) expected #{expected_exit_status} saw #{status.exitstatus}")
|
||||
@logger.error("run output: #{output}")
|
||||
end
|
||||
f.resume([output, status.exitstatus])
|
||||
}
|
||||
Fiber.yield
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -1,5 +1,8 @@
|
|||
#simple priority queue implementation using binary max-heap on top of a ruby array.
|
||||
#this implementation is not meant to be high-performance, just decent, with two goals:
|
||||
#a priority queue with the added twist of FIFO behavior for elements with equal priorities
|
||||
#implementation using binary max-heap on top of a ruby array.
|
||||
#the FIFO behavior is implemented by storing a FIFO bucket of same-priority values
|
||||
|
||||
#The implementation is not meant to be high-performance, just decent, with two goals:
|
||||
#1. clean interface
|
||||
#2. proper time/space complexity of a binary heap
|
||||
#3. no silly memory leaks (Ah, three weapons of the Spanish Inquisition)
|
||||
|
@ -18,46 +21,80 @@
|
|||
#See spec/unit/priority_queue_set for
|
||||
#other examples
|
||||
|
||||
|
||||
|
||||
require 'set'
|
||||
require 'pp'
|
||||
|
||||
module VCAP
|
||||
class PriorityQueue
|
||||
class PriorityQueueFIFO
|
||||
|
||||
attr_reader :size
|
||||
|
||||
def initialize
|
||||
@heap_arr = []
|
||||
end
|
||||
|
||||
def size
|
||||
@heap_arr.size
|
||||
@p2b = {} #hash mapping priorities to buckets
|
||||
@size = 0
|
||||
end
|
||||
|
||||
def empty?
|
||||
@heap_arr.empty?
|
||||
size == 0
|
||||
end
|
||||
|
||||
def insert(item, priority = 0)
|
||||
raise ArgumentError, "priority can not be negative: #{priority}" if priority < 0
|
||||
@heap_arr.push [item, priority]
|
||||
shift_up
|
||||
|
||||
unless append_to_existing_priority_bucket(item, priority)
|
||||
add_bucket_at_the_end_and_shift_up(item, priority)
|
||||
end
|
||||
@size += 1
|
||||
end
|
||||
|
||||
def remove
|
||||
return nil if empty?
|
||||
elem = @heap_arr[0][0]
|
||||
if size > 1
|
||||
bucket = top_bucket
|
||||
priority = top_priority
|
||||
elem = bucket.shift
|
||||
@size -= 1
|
||||
if empty?
|
||||
@heap_arr.clear
|
||||
@p2b.clear
|
||||
elsif bucket.empty?
|
||||
@heap_arr[0] = @heap_arr.pop
|
||||
@p2b.delete(priority)
|
||||
shift_down
|
||||
else
|
||||
@heap_arr.clear
|
||||
#do nothing, we just shifted a value from a bucket and it still isn't empty, so no rearrangement is needed
|
||||
end
|
||||
elem
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def add_bucket_at_the_end_and_shift_up(item, priority)
|
||||
bucket = [item]
|
||||
@p2b[priority] = bucket
|
||||
|
||||
#normal binary heap operation
|
||||
@heap_arr.push priority
|
||||
shift_up
|
||||
end
|
||||
|
||||
def append_to_existing_priority_bucket(item, priority)
|
||||
return false unless @p2b[priority]
|
||||
@p2b[priority] << item
|
||||
return true
|
||||
end
|
||||
|
||||
def top_bucket
|
||||
@p2b[top_priority]
|
||||
end
|
||||
|
||||
def top_priority
|
||||
priority_at(0)
|
||||
end
|
||||
|
||||
def priority_at(index)
|
||||
return -1 if index >= @heap_arr.size
|
||||
@heap_arr[index][1]
|
||||
@heap_arr[index]
|
||||
end
|
||||
|
||||
def parent_index(index)
|
||||
|
@ -108,7 +145,7 @@ module VCAP
|
|||
end
|
||||
|
||||
|
||||
class PrioritySet < PriorityQueue
|
||||
class PrioritySet < PriorityQueueFIFO
|
||||
def initialize
|
||||
super
|
||||
@set = Set.new #the set is used to check for duplicates
|
||||
|
|
|
@ -2,7 +2,7 @@ $:.unshift(File.join(File.dirname(__FILE__),'..'))
|
|||
$:.unshift(File.dirname(__FILE__))
|
||||
require 'user_pool_util'
|
||||
require 'user_ops'
|
||||
require 'em_run'
|
||||
require 'subprocess'
|
||||
|
||||
module VCAP
|
||||
class UserPool
|
||||
|
@ -12,7 +12,6 @@ module VCAP
|
|||
def initialize(name, logger = nil)
|
||||
@logger = logger || Logger.new(STDOUT)
|
||||
UserPoolUtil.init
|
||||
EMRun.init
|
||||
@free_users = UserPoolUtil.open_pool(name)
|
||||
@busy_users = Hash.new
|
||||
@logger.debug("Initialized user pool #{name} with #{@free_users.size} users.")
|
||||
|
@ -32,7 +31,7 @@ module VCAP
|
|||
def free_user(user)
|
||||
user_name = user[:user_name]
|
||||
if @busy_users.has_key?(user_name)
|
||||
VCAP::EMRun.run("pkill -9 -u #{user_name}", 1)
|
||||
VCAP::Subprocess.run("pkill -9 -u #{user_name}", 1)
|
||||
@busy_users.delete(user_name)
|
||||
@free_users[user_name] = user
|
||||
@logger.debug "free()'d user #{user_name}"
|
||||
|
|
|
@ -18,7 +18,7 @@ describe VCAP::Component do
|
|||
end
|
||||
|
||||
it "should publish an announcement" do
|
||||
em(:timeout => 1) do
|
||||
em(:timeout => 2) do
|
||||
nats.subscribe("vcap.component.announce") do |msg|
|
||||
body = Yajl::Parser.parse(msg, :symbolize_keys => true)
|
||||
body[:type].should == "type"
|
||||
|
@ -120,20 +120,17 @@ describe VCAP::Component do
|
|||
|
||||
describe "http endpoint" do
|
||||
let(:host) { VCAP::Component.varz[:host] }
|
||||
let(:http) { ::EM::HttpRequest.new("http://#{host}/varz") }
|
||||
let(:http2) { ::EM::HttpRequest.new("http://#{host}/varz") }
|
||||
let(:authorization) { { :head => { "authorization" => VCAP::Component.varz[:credentials] } } }
|
||||
|
||||
it "should let you specify the port" do
|
||||
em do
|
||||
options = default_options
|
||||
options[:port] = 18123
|
||||
port = 18123
|
||||
options = default_options.merge(:port => port)
|
||||
|
||||
VCAP::Component.register(options)
|
||||
VCAP::Component.varz[:host].split(':').last.to_i.should == port
|
||||
|
||||
http.opts.port.should == 18123
|
||||
|
||||
request = http.get authorization.merge(:path => "/varz")
|
||||
request = make_em_httprequest(:get, host, "/varz", authorization)
|
||||
request.callback do
|
||||
request.response_header.status.should == 200
|
||||
done
|
||||
|
@ -142,19 +139,19 @@ describe VCAP::Component do
|
|||
end
|
||||
|
||||
it "should not truncate varz on second request" do
|
||||
em do
|
||||
em(:timeout => 2) do
|
||||
options = default_options
|
||||
|
||||
VCAP::Component.register(options)
|
||||
|
||||
request = http.get authorization.merge(:path => "/varz")
|
||||
request = make_em_httprequest(:get, host, "/varz", authorization)
|
||||
request.callback do
|
||||
request.response_header.status.should == 200
|
||||
content_length = request.response_header['CONTENT_LENGTH'].to_i
|
||||
|
||||
VCAP::Component.varz[:var] = 'var'
|
||||
|
||||
request2 = http2.get authorization.merge(:path => "/varz")
|
||||
request2 = make_em_httprequest(:get, host, "/varz", authorization)
|
||||
request2.callback do
|
||||
request2.response_header.status.should == 200
|
||||
content_length2 = request2.response_header['CONTENT_LENGTH'].to_i
|
||||
|
@ -172,13 +169,13 @@ describe VCAP::Component do
|
|||
|
||||
VCAP::Component.register(options)
|
||||
|
||||
request = http.get authorization.merge(:path => "/healthz")
|
||||
request = make_em_httprequest(:get, host, "/healthz", authorization)
|
||||
request.callback do
|
||||
request.response_header.status.should == 200
|
||||
|
||||
VCAP::Component.healthz = 'healthz'
|
||||
|
||||
request2 = http2.get authorization.merge(:path => "/healthz")
|
||||
request2 = make_em_httprequest(:get, host, "/healthz", authorization)
|
||||
request2.callback do
|
||||
request2.response_header.status.should == 200
|
||||
content_length2 = request2.response_header['CONTENT_LENGTH'].to_i
|
||||
|
@ -200,7 +197,7 @@ describe VCAP::Component do
|
|||
|
||||
VCAP::Component.varz[:credentials].should == ["foo", "bar"]
|
||||
|
||||
request = http.get authorization.merge(:path => "/varz")
|
||||
request = make_em_httprequest(:get, host, "/varz", authorization)
|
||||
request.callback do
|
||||
request.response_header.status.should == 200
|
||||
done
|
||||
|
@ -208,48 +205,11 @@ describe VCAP::Component do
|
|||
end
|
||||
end
|
||||
|
||||
it "should skip keep-alive by default" do
|
||||
em do
|
||||
VCAP::Component.register(default_options)
|
||||
|
||||
request = http.get authorization
|
||||
request.callback do
|
||||
request.response_header.should_not be_keepalive
|
||||
|
||||
request = http.get authorization
|
||||
request.callback { raise "second request shouldn't succeed" }
|
||||
request.errback { done }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it "should support keep-alive" do
|
||||
em do
|
||||
VCAP::Component.register(default_options)
|
||||
|
||||
first_peername = nil
|
||||
request = http.get authorization.merge(:path => "/varz", :keepalive => true)
|
||||
request.callback do
|
||||
request.response_header.should be_keepalive
|
||||
first_peername = http.get_peername
|
||||
first_peername.should be
|
||||
|
||||
request = http.get authorization.merge(:path => "/varz", :keepalive => true)
|
||||
request.callback do
|
||||
request.response_header.should be_keepalive
|
||||
second_peername = http.get_peername
|
||||
second_peername.should eql first_peername
|
||||
done
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it "should return 401 on unauthorized requests" do
|
||||
em do
|
||||
VCAP::Component.register(default_options)
|
||||
|
||||
request = http.get :path => "/varz"
|
||||
request = make_em_httprequest(:get, host, "/varz")
|
||||
request.callback do
|
||||
request.response_header.status.should == 401
|
||||
done
|
||||
|
@ -261,7 +221,7 @@ describe VCAP::Component do
|
|||
em do
|
||||
VCAP::Component.register(default_options)
|
||||
|
||||
request = http.get :path => "/varz", :head => { "authorization" => "foo" }
|
||||
request = make_em_httprequest(:get, host, "/varz", :head => { "authorization" => "foo" })
|
||||
request.callback do
|
||||
request.response_header.status.should == 400
|
||||
done
|
||||
|
@ -269,4 +229,8 @@ describe VCAP::Component do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
def make_em_httprequest(method, host, path, opts={})
|
||||
::EM::HttpRequest.new("http://#{host}#{path}").send(method, opts)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -13,6 +13,17 @@ require "vcap/process_utils"
|
|||
require "vcap/config"
|
||||
require "vcap/priority_queue"
|
||||
require 'vcap/quota'
|
||||
require 'benchmark'
|
||||
|
||||
RSpec::Matchers.define :take_less_than do |n|
|
||||
chain :seconds do; end
|
||||
match do |block|
|
||||
@elapsed = Benchmark.realtime do
|
||||
block.call
|
||||
end
|
||||
@elapsed <= n
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.configure do |c|
|
||||
# declare an exclusion filter
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
require 'spec_helper'
|
||||
|
||||
require 'fileutils'
|
||||
require 'logging'
|
||||
require 'tmpdir'
|
||||
|
||||
describe 'VCAP#create_logger' do
|
||||
before :all do
|
||||
@tmpdir = Dir.mktmpdir
|
||||
Dir.exists?(@tmpdir).should be_true
|
||||
end
|
||||
|
||||
after :all do
|
||||
FileUtils.rm_rf(@tmpdir)
|
||||
Dir.exists?(@tmpdir).should be_false
|
||||
end
|
||||
|
||||
it 'should create a stdout logger by default' do
|
||||
log = VCAP.create_logger('test')
|
||||
appenders = log.instance_variable_get '@appenders'
|
||||
appenders.length.should == 1
|
||||
appenders[0].class.should == Logging::Appenders::Stdout
|
||||
end
|
||||
|
||||
it 'should create a file logger if logfile given, but no rotation interval supplied' do
|
||||
logfilename = File.join(@tmpdir, 'no_rotate.log')
|
||||
log = VCAP.create_logger('test', :log_file => logfilename)
|
||||
appenders = log.instance_variable_get '@appenders'
|
||||
appenders.length.should == 1
|
||||
appenders[0].class.should == Logging::Appenders::File
|
||||
end
|
||||
|
||||
it 'should create a rolling file logger if logfile given and rotation interval supplied' do
|
||||
logfilename = File.join(@tmpdir, 'no_rotate.log')
|
||||
log = VCAP.create_logger('test', :log_file => logfilename, :log_rotation_interval => 'daily')
|
||||
appenders = log.instance_variable_get '@appenders'
|
||||
appenders.length.should == 1
|
||||
appenders[0].class.should == Logging::Appenders::RollingFile
|
||||
end
|
||||
|
||||
it 'should correctly rotate files by age' do
|
||||
logfilename = File.join(@tmpdir, 'rotate_by_age.log')
|
||||
log = VCAP.create_logger('tst', :log_file => logfilename, :log_rotation_interval => 1)
|
||||
log.info "TEST LINE 1"
|
||||
sleep(2)
|
||||
log.info "TEST_LINE 2"
|
||||
logs = Dir.glob(File.join(@tmpdir, 'rotate_by_age.*log'))
|
||||
logs.count.should == 2
|
||||
end
|
||||
end
|
|
@ -1,9 +1,9 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe VCAP::PriorityQueue do
|
||||
describe VCAP::PriorityQueueFIFO do
|
||||
before :each do
|
||||
@q.should be_nil
|
||||
@q = VCAP::PrioritySet.new
|
||||
@q = VCAP::PriorityQueueFIFO.new
|
||||
end
|
||||
|
||||
describe '.new' do
|
||||
|
@ -38,18 +38,41 @@ describe VCAP::PriorityQueue do
|
|||
|
||||
describe 'high volume' do
|
||||
it 'should be able to accept a bunch of random values and return them in non-increasing order' do
|
||||
n = 100_000
|
||||
n = (ENV['VCAP_TEST_PQUEUE_LOAD'] || 100_000).to_i
|
||||
n.times { val = rand ; @q.insert val,val }
|
||||
prev = @q.remove
|
||||
until @q.empty? do
|
||||
val = @q.remove
|
||||
val.should be < prev
|
||||
prev = val
|
||||
n-=1
|
||||
end
|
||||
|
||||
expect do
|
||||
until @q.empty? do
|
||||
val = @q.remove
|
||||
val.should be < prev
|
||||
prev = val
|
||||
n-=1
|
||||
end
|
||||
end.to take_less_than(6).seconds
|
||||
|
||||
n.should == 1
|
||||
end
|
||||
end
|
||||
|
||||
describe 'high volume FIFO' do
|
||||
it 'should have decent FIFO perfomance for same-priority values' do
|
||||
|
||||
n = 100_000
|
||||
n.times { |i| @q.insert i }
|
||||
|
||||
prev = @q.remove
|
||||
expect do
|
||||
until @q.empty? do
|
||||
val = @q.remove
|
||||
val.should == prev + 1
|
||||
prev = val
|
||||
end
|
||||
end.to take_less_than(1).seconds
|
||||
|
||||
prev.should == n - 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe VCAP::PrioritySet do
|
||||
|
@ -127,4 +150,55 @@ describe VCAP::PrioritySet do
|
|||
@qs.size.should == 5
|
||||
end
|
||||
end
|
||||
|
||||
describe 'equal priorities' do
|
||||
describe 'FIFO behavior' do
|
||||
it 'should FIFO for simplest case' do
|
||||
@qs.insert 'first', 1
|
||||
@qs.insert 'second', 1
|
||||
@qs.insert 'third', 1
|
||||
|
||||
@qs.remove.should == 'first'
|
||||
@qs.remove.should == 'second'
|
||||
@qs.remove.should == 'third'
|
||||
end
|
||||
|
||||
it 'should FIFO for lower and higher priority items interpersed' do
|
||||
|
||||
50.times {|i|
|
||||
val, pri = 0,0
|
||||
if rand > 0.2
|
||||
val, pri = 1000 - i , 2000000000
|
||||
else
|
||||
val, pri = 100 - i, 100 - i
|
||||
end
|
||||
@qs.insert(val, pri)
|
||||
}
|
||||
prev = @qs.remove
|
||||
until @qs.empty?
|
||||
v = @qs.remove
|
||||
v.should < prev
|
||||
prev = v
|
||||
end
|
||||
end
|
||||
|
||||
it 'should retain FIFO ordering when higher priority items are interspersed' do
|
||||
@qs.insert 1
|
||||
@qs.insert 2
|
||||
@qs.insert 'high', 2
|
||||
@qs.insert 3
|
||||
@qs.insert 4
|
||||
@qs.remove.should == 'high'
|
||||
@qs.insert 5
|
||||
@qs.insert 6
|
||||
@qs.remove.should == 1
|
||||
@qs.remove.should == 2
|
||||
@qs.remove.should == 3
|
||||
@qs.remove.should == 4
|
||||
@qs.remove.should == 5
|
||||
@qs.remove.should == 6
|
||||
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -2,18 +2,17 @@ $:.unshift(File.dirname(__FILE__),'..')
|
|||
require 'spec_helper'
|
||||
require 'user_pool_util'
|
||||
require 'user_pool'
|
||||
require 'em_fiber_wrap'
|
||||
|
||||
describe VCAP::UserPool, :needs_root => true do
|
||||
before(:all) do
|
||||
VCAP::UserPoolUtil.init
|
||||
em_fiber_wrap{ VCAP::UserPoolUtil.install_pool('test_pool', 5)}
|
||||
VCAP::UserPoolUtil.install_pool('test_pool', 5)
|
||||
@up = VCAP::UserPool.new('test_pool')
|
||||
@in_use = []
|
||||
end
|
||||
|
||||
after(:all) do
|
||||
em_fiber_wrap{ VCAP::UserPoolUtil.remove_pool('test_pool')}
|
||||
VCAP::UserPoolUtil.remove_pool('test_pool')
|
||||
end
|
||||
|
||||
it "allocates some users" do
|
||||
|
@ -23,9 +22,9 @@ describe VCAP::UserPool, :needs_root => true do
|
|||
end
|
||||
|
||||
it "free's some users" do
|
||||
em_fiber_wrap { @up.free_user @in_use.pop }
|
||||
em_fiber_wrap { @up.free_user @in_use.pop }
|
||||
em_fiber_wrap { @up.free_user @in_use.pop }
|
||||
@up.free_user @in_use.pop
|
||||
@up.free_user @in_use.pop
|
||||
@up.free_user @in_use.pop
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
spec = Gem::Specification.new do |s|
|
||||
s.name = 'vcap_common'
|
||||
s.version = '1.0.3'
|
||||
s.version = '1.0.10'
|
||||
s.date = '2011-02-09'
|
||||
s.summary = 'vcap common'
|
||||
s.homepage = "http://github.com/vmware-ac/core"
|
||||
|
@ -12,8 +12,7 @@ spec = Gem::Specification.new do |s|
|
|||
s.add_dependency('eventmachine', '~> 0.12.11.cloudfoundry.3')
|
||||
s.add_dependency('thin', '~> 1.3.1')
|
||||
s.add_dependency('yajl-ruby', '~> 0.8.3')
|
||||
s.add_dependency('nats', '~> 0.4.22.beta.4')
|
||||
s.add_dependency('logging', '>= 1.5.0')
|
||||
s.add_dependency('nats', '~> 0.4.22.beta.8')
|
||||
s.add_dependency('posix-spawn', '~> 0.3.6')
|
||||
s.add_development_dependency('rake', '~> 0.9.2')
|
||||
|
||||
|
|
|
@ -9,9 +9,8 @@ gem 'rack', :require => ["rack/utils", "rack/mime"]
|
|||
gem 'rake'
|
||||
gem 'thin'
|
||||
gem 'yajl-ruby', :require => ['yajl', 'yajl/json_gem']
|
||||
gem 'logging', '>= 1.5.0'
|
||||
|
||||
gem 'vcap_common', '~> 1.0.3'
|
||||
gem 'vcap_common', '~> 1.0.8'
|
||||
gem 'vcap_logging', :require => ['vcap/logging']
|
||||
|
||||
group :test do
|
||||
|
|
|
@ -5,7 +5,7 @@ GEM
|
|||
builder (3.0.0)
|
||||
ci_reporter (1.6.4)
|
||||
builder (>= 2.1.2)
|
||||
daemons (1.1.5)
|
||||
daemons (1.1.8)
|
||||
diff-lcs (1.1.2)
|
||||
em-http-request (1.0.0.beta.3)
|
||||
addressable (>= 2.2.3)
|
||||
|
@ -16,17 +16,14 @@ GEM
|
|||
eventmachine
|
||||
eventmachine (0.12.11.cloudfoundry.3)
|
||||
http_parser.rb (0.5.1)
|
||||
json_pure (1.6.4)
|
||||
little-plugger (1.1.3)
|
||||
logging (1.6.1)
|
||||
little-plugger (>= 1.1.2)
|
||||
nats (0.4.22.beta.4)
|
||||
json_pure (1.6.5)
|
||||
nats (0.4.22.beta.8)
|
||||
daemons (>= 1.1.4)
|
||||
eventmachine (>= 0.12.10)
|
||||
json_pure (>= 1.6.1)
|
||||
thin (>= 1.3.1)
|
||||
posix-spawn (0.3.6)
|
||||
rack (1.4.0)
|
||||
rack (1.4.1)
|
||||
rake (0.8.7)
|
||||
rcov (0.9.9)
|
||||
rspec (2.5.0)
|
||||
|
@ -41,14 +38,14 @@ GEM
|
|||
daemons (>= 1.0.9)
|
||||
eventmachine (>= 0.12.6)
|
||||
rack (>= 1.0.0)
|
||||
vcap_common (1.0.3)
|
||||
vcap_common (1.0.8)
|
||||
eventmachine (~> 0.12.11.cloudfoundry.3)
|
||||
logging (>= 1.5.0)
|
||||
nats (~> 0.4.22.beta.4)
|
||||
nats (~> 0.4.22.beta.8)
|
||||
posix-spawn (~> 0.3.6)
|
||||
thin (~> 1.3.1)
|
||||
yajl-ruby (~> 0.8.3)
|
||||
vcap_logging (0.1.3)
|
||||
vcap_logging (1.0.0)
|
||||
rake
|
||||
yajl-ruby (0.8.3)
|
||||
|
||||
PLATFORMS
|
||||
|
@ -59,13 +56,12 @@ DEPENDENCIES
|
|||
ci_reporter
|
||||
em-http-request (~> 1.0.0.beta.3)
|
||||
eventmachine
|
||||
logging (>= 1.5.0)
|
||||
nats
|
||||
rack
|
||||
rake
|
||||
rcov
|
||||
rspec
|
||||
thin
|
||||
vcap_common (~> 1.0.3)
|
||||
vcap_common (~> 1.0.8)
|
||||
vcap_logging
|
||||
yajl-ruby
|
||||
|
|
54
dea/Rakefile
54
dea/Rakefile
|
@ -1,47 +1,17 @@
|
|||
require 'rake'
|
||||
require 'rspec/core/rake_task'
|
||||
require 'ci/reporter/rake/rspec'
|
||||
|
||||
desc "Run specs"
|
||||
task "spec" => ["bundler:install:test", "test:spec"]
|
||||
|
||||
desc "Run specs using RCov"
|
||||
task "spec:rcov" => ["bundler:install:test", "test:spec:rcov"]
|
||||
|
||||
desc "Run specs producing results for CI"
|
||||
task "ci" => ["ci:spec"]
|
||||
|
||||
namespace "bundler" do
|
||||
desc "Install gems"
|
||||
task "install" do
|
||||
sh("bundle install")
|
||||
end
|
||||
|
||||
desc "Install gems for test"
|
||||
task "install:test" do
|
||||
sh("bundle install --without development production")
|
||||
end
|
||||
|
||||
desc "Install gems for production"
|
||||
task "install:production" do
|
||||
sh("bundle install --without development test")
|
||||
end
|
||||
|
||||
desc "Install gems for development"
|
||||
task "install:development" do
|
||||
sh("bundle install --without test production")
|
||||
end
|
||||
end
|
||||
|
||||
namespace "test" do
|
||||
task "spec" do |t|
|
||||
sh("cd spec && rake spec")
|
||||
end
|
||||
|
||||
task "spec:rcov" do |t|
|
||||
sh("cd spec && rake spec:rcov")
|
||||
end
|
||||
end
|
||||
|
||||
namespace :ci do
|
||||
desc "Run specs producing results for CI"
|
||||
task "spec" => ["ci:setup:rspec", "^spec"]
|
||||
end
|
||||
|
||||
|
||||
reports_dir = File.expand_path("spec_reports")
|
||||
|
||||
ENV['CI_REPORTS'] = reports_dir
|
||||
|
||||
RSpec::Core::RakeTask.new do |t|
|
||||
t.pattern = "spec/**/*_spec.rb"
|
||||
t.rspec_opts = ["--format", "documentation", "--colour"]
|
||||
end
|
||||
|
|
|
@ -2,6 +2,11 @@
|
|||
# Base directory where all applications are staged and hosted
|
||||
base_dir: /var/vcap.local/dea
|
||||
|
||||
# The DEA will no longer respond to discover/start requests once
|
||||
# this usage threshold (in percent) has been exceeded on the filesystem housing
|
||||
# the base_dir.
|
||||
droplet_fs_percent_used_threshold: 95
|
||||
|
||||
# Local_route is the IP address of a well known server on your network, it
|
||||
# is used to choose the right ip address (think of hosts that have multiple nics
|
||||
# and IP addresses assigned to them) of the host running the DEA. Default
|
||||
|
@ -16,6 +21,7 @@ mbus: nats://localhost:4222/
|
|||
intervals:
|
||||
# Time interval between heartbeats sent to the Health Manager
|
||||
heartbeat: 10
|
||||
advertise: 5
|
||||
logging:
|
||||
level: debug
|
||||
|
||||
|
@ -58,6 +64,21 @@ runtimes:
|
|||
version: 0.4.12
|
||||
version_flag: '-v'
|
||||
environment:
|
||||
debug_env:
|
||||
run:
|
||||
- NODE_ARGS="--debug=$VCAP_DEBUG_PORT"
|
||||
suspend:
|
||||
- NODE_ARGS="--debug-brk=$VCAP_DEBUG_PORT"
|
||||
node06:
|
||||
executable: node
|
||||
version: 0.6.8
|
||||
version_flag: '-v'
|
||||
environment:
|
||||
debug_env:
|
||||
run:
|
||||
- NODE_ARGS="--debug=$VCAP_DEBUG_PORT"
|
||||
suspend:
|
||||
- NODE_ARGS="--debug-brk=$VCAP_DEBUG_PORT"
|
||||
java:
|
||||
executable: java
|
||||
version: 1.6.0
|
||||
|
@ -78,7 +99,7 @@ runtimes:
|
|||
version: ".* 5.8.3"
|
||||
version_flag: '-version'
|
||||
environment:
|
||||
python26:
|
||||
python2:
|
||||
executable: python
|
||||
version: 2.6.5
|
||||
version_flag: '--version'
|
||||
|
|
|
@ -5,6 +5,7 @@ filer_port: 12346
|
|||
mbus: nats://localhost:4222/
|
||||
intervals:
|
||||
heartbeat: 10
|
||||
advertise: 5
|
||||
logging:
|
||||
level: debug
|
||||
multi_tenant: true
|
||||
|
|
|
@ -10,7 +10,6 @@ end
|
|||
|
||||
require 'fcntl'
|
||||
require 'logger'
|
||||
require 'logging'
|
||||
require 'pp'
|
||||
require 'set'
|
||||
require 'socket'
|
||||
|
@ -73,6 +72,10 @@ module DEA
|
|||
# How long to wait in between logging the structure of the apps directory in the event that a du takes excessively long
|
||||
APPS_DUMP_INTERVAL = 30*60
|
||||
|
||||
|
||||
DROPLET_FS_PERCENT_USED_THRESHOLD = 95
|
||||
DROPLET_FS_PERCENT_USED_UPDATE_INTERVAL = 2
|
||||
|
||||
def initialize(config)
|
||||
VCAP::Logging.setup_from_config(config['logging'])
|
||||
@logger = VCAP::Logging.logger('dea')
|
||||
|
@ -117,6 +120,13 @@ module DEA
|
|||
@db_dir = File.join(@droplet_dir, 'db')
|
||||
@app_state_file = File.join(@db_dir, APP_STATE_FILE)
|
||||
|
||||
# The DEA will no longer respond to discover/start requests once this usage
|
||||
# threshold (in percent) has been exceeded on the filesystem housing the
|
||||
# base_dir.
|
||||
@droplet_fs_percent_used_threshold =
|
||||
config['droplet_fs_percent_used_threshold'] || DROPLET_FS_PERCENT_USED_THRESHOLD
|
||||
@dropet_fs_percent_used = 0
|
||||
|
||||
#prevent use of shared directory for droplets even if available.
|
||||
@force_http_sharing = config['force_http_sharing']
|
||||
|
||||
|
@ -130,7 +140,8 @@ module DEA
|
|||
end
|
||||
|
||||
@nats_uri = config['mbus']
|
||||
@heartbeat_interval = config['intervals']['heartbeat']
|
||||
@heartbeat_interval = config['intervals']['heartbeat'] || 10
|
||||
@advertise_interval = config['intervals']['advertise'] || 5
|
||||
|
||||
# XXX(mjp) - Ugh, this is needed for VCAP::Component.register(). Find a better solution when time permits.
|
||||
@config = config.dup()
|
||||
|
@ -205,6 +216,7 @@ module DEA
|
|||
['TERM', 'INT', 'QUIT'].each { |s| trap(s) { shutdown() } }
|
||||
trap('USR2') { evacuate_apps_then_quit() }
|
||||
|
||||
|
||||
NATS.on_error do |e|
|
||||
@logger.error("EXITING! NATS error: #{e}")
|
||||
@logger.error(e)
|
||||
|
@ -219,8 +231,11 @@ module DEA
|
|||
@logger.error(e)
|
||||
end
|
||||
|
||||
NATS.start(:uri => @nats_uri) do
|
||||
# Calculate how much disk is available before we respond to any messages
|
||||
update_droplet_fs_usage(:blocking => true)
|
||||
@logger.info("Initial usage of droplet fs is: #{@droplet_fs_percent_used}%")
|
||||
|
||||
NATS.start(:uri => @nats_uri) do
|
||||
# Register ourselves with the system
|
||||
status_config = @config['status'] || {}
|
||||
VCAP::Component.register(:type => 'DEA',
|
||||
|
@ -247,23 +262,27 @@ module DEA
|
|||
NATS.subscribe("dea.#{uuid}.start") { |msg| process_dea_start(msg) }
|
||||
NATS.subscribe('router.start') { |msg| process_router_start(msg) }
|
||||
NATS.subscribe('healthmanager.start') { |msg| process_healthmanager_start(msg) }
|
||||
NATS.subscribe('dea.locate') { |msg| process_dea_locate(msg) }
|
||||
|
||||
# Recover existing application state.
|
||||
recover_existing_droplets
|
||||
delete_untracked_instance_dirs
|
||||
|
||||
EM.add_periodic_timer(@heartbeat_interval) { send_heartbeat }
|
||||
EM.add_periodic_timer(@advertise_interval) { send_advertise }
|
||||
EM.add_timer(MONITOR_INTERVAL) { monitor_apps }
|
||||
EM.add_periodic_timer(CRASHES_REAPER_INTERVAL) { crashes_reaper }
|
||||
EM.add_periodic_timer(VARZ_UPDATE_INTERVAL) { snapshot_varz }
|
||||
EM.add_periodic_timer(DROPLET_FS_PERCENT_USED_UPDATE_INTERVAL) { update_droplet_fs_usage }
|
||||
|
||||
NATS.publish('dea.start', @hello_message_json)
|
||||
send_advertise
|
||||
end
|
||||
end
|
||||
|
||||
def send_heartbeat
|
||||
return if @droplets.empty? || @shutting_down
|
||||
heartbeat = {:droplets => []}
|
||||
heartbeat = {:droplets => [], :dea => VCAP::Component.uuid }
|
||||
@droplets.each_value do |instances|
|
||||
instances.each_value do |instance|
|
||||
heartbeat[:droplets] << generate_heartbeat(instance)
|
||||
|
@ -272,8 +291,27 @@ module DEA
|
|||
NATS.publish('dea.heartbeat', heartbeat.to_json)
|
||||
end
|
||||
|
||||
def process_dea_locate(msg)
|
||||
send_advertise
|
||||
end
|
||||
|
||||
def space_available?
|
||||
@num_clients < @max_clients && @reserved_mem < @max_memory && !droplet_fs_usage_threshold_exceeded?
|
||||
end
|
||||
|
||||
def send_advertise
|
||||
return if !space_available? || @shutting_down
|
||||
|
||||
advertise_message = { :id => VCAP::Component.uuid,
|
||||
:available_memory => @max_memory - @reserved_mem,
|
||||
:runtimes => @runtimes.keys}
|
||||
|
||||
NATS.publish('dea.advertise', advertise_message.to_json)
|
||||
end
|
||||
|
||||
|
||||
def send_single_heartbeat(instance)
|
||||
heartbeat = {:droplets => [generate_heartbeat(instance)]}
|
||||
heartbeat = {:droplets => [generate_heartbeat(instance)], :dea => VCAP::Component.uuid }
|
||||
NATS.publish('dea.heartbeat', heartbeat.to_json)
|
||||
end
|
||||
|
||||
|
@ -343,6 +381,8 @@ module DEA
|
|||
@logger.debug('Ignoring request, shutting down.')
|
||||
elsif @num_clients >= @max_clients || @reserved_mem > @max_memory
|
||||
@logger.debug('Ignoring request, not enough resources.')
|
||||
elsif droplet_fs_usage_threshold_exceeded?
|
||||
@logger.warn("Droplet FS has exceeded usage threshold, ignoring request")
|
||||
else
|
||||
# Check that we properly support the runtime requested
|
||||
unless runtime_supported? message_json['runtime']
|
||||
|
@ -408,7 +448,9 @@ module DEA
|
|||
:credentials => @file_auth,
|
||||
:staged => instance[:staged],
|
||||
:debug_ip => instance[:debug_ip],
|
||||
:debug_port => instance[:debug_port]
|
||||
:debug_port => instance[:debug_port],
|
||||
:console_ip => instance[:console_ip],
|
||||
:console_port => instance[:console_port]
|
||||
}
|
||||
if include_stats && instance[:state] == :RUNNING
|
||||
response[:stats] = {
|
||||
|
@ -504,6 +546,7 @@ module DEA
|
|||
runtime = message_json['runtime']
|
||||
framework = message_json['framework']
|
||||
debug = message_json['debug']
|
||||
console = message_json['console']
|
||||
|
||||
# Limits processing
|
||||
mem = DEFAULT_APP_MEM
|
||||
|
@ -524,6 +567,9 @@ module DEA
|
|||
elsif @reserved_mem + mem > @max_memory || @num_clients >= @max_clients
|
||||
@logger.info('Do not have room for this client application')
|
||||
return
|
||||
elsif droplet_fs_usage_threshold_exceeded?
|
||||
@logger.warn("Droplet FS usage has exceeded the threshold")
|
||||
return
|
||||
end
|
||||
|
||||
if (!sha1 || !bits_file || !bits_uri)
|
||||
|
@ -571,16 +617,18 @@ module DEA
|
|||
|
||||
start_operation = lambda do
|
||||
@logger.debug('Completed download')
|
||||
|
||||
port = grab_port
|
||||
if port
|
||||
instance[:port] = port
|
||||
if not instance[:uris].empty?
|
||||
port = grab_port
|
||||
if port
|
||||
instance[:port] = port
|
||||
else
|
||||
@logger.warn("Unable to allocate port for instance#{instance[:log_id]}")
|
||||
stop_droplet(instance)
|
||||
return
|
||||
end
|
||||
else
|
||||
@logger.warn("Unable to allocate port for instance#{instance[:log_id]}")
|
||||
stop_droplet(instance)
|
||||
return
|
||||
@logger.info("No URIs found for application. Not assigning a port")
|
||||
end
|
||||
|
||||
if debug
|
||||
debug_port = grab_port
|
||||
if debug_port
|
||||
|
@ -594,8 +642,21 @@ module DEA
|
|||
end
|
||||
end
|
||||
|
||||
if console
|
||||
console_port = grab_port
|
||||
if console_port
|
||||
instance[:console_ip] = VCAP.local_ip
|
||||
instance[:console_port] = console_port
|
||||
else
|
||||
@logger.warn("Unable to allocate console port for instance#{instance[:log_id]}")
|
||||
stop_droplet(instance)
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
@logger.info("Starting up instance #{instance[:log_id]} on port:#{instance[:port]} " +
|
||||
"#{"debuger:" if instance[:debug_port]}#{instance[:debug_port]}")
|
||||
"#{"debuger:" if instance[:debug_port]}#{instance[:debug_port]}" +
|
||||
"#{"console:" if instance[:console_port]}#{instance[:console_port]}")
|
||||
@logger.debug("Clients: #{@num_clients}")
|
||||
@logger.debug("Reserved Memory Usage: #{@reserved_mem} MB of #{@max_memory} MB TOTAL")
|
||||
|
||||
|
@ -644,6 +705,7 @@ module DEA
|
|||
exec_operation = proc do |process|
|
||||
process.send_data("cd #{instance_dir}\n")
|
||||
if @secure || @enforce_ulimit
|
||||
process.send_data("renice 0 $$\n")
|
||||
process.send_data("ulimit -m #{mem_kbytes} 2> /dev/null\n") # ulimit -m takes kb, soft enforce
|
||||
process.send_data("ulimit -v 3000000 2> /dev/null\n") # virtual memory at 3G, this will be enforced
|
||||
process.send_data("ulimit -n #{num_fds} 2> /dev/null\n")
|
||||
|
@ -652,7 +714,11 @@ module DEA
|
|||
process.send_data("umask 077\n")
|
||||
end
|
||||
app_env.each { |env| process.send_data("export #{env}\n") }
|
||||
process.send_data("./startup -p #{instance[:port]}\n")
|
||||
if instance[:port]
|
||||
process.send_data("./startup -p #{instance[:port]}\n")
|
||||
else
|
||||
process.send_data("./startup\n")
|
||||
end
|
||||
process.send_data("exit\n")
|
||||
end
|
||||
|
||||
|
@ -840,6 +906,7 @@ module DEA
|
|||
instance[:resources_tracked] = true
|
||||
@reserved_mem += instance_mem_usage_in_mb(instance)
|
||||
@num_clients += 1
|
||||
send_advertise
|
||||
end
|
||||
|
||||
def remove_instance_resources(instance)
|
||||
|
@ -847,6 +914,7 @@ module DEA
|
|||
instance[:resources_tracked] = false
|
||||
@reserved_mem -= instance_mem_usage_in_mb(instance)
|
||||
@num_clients -= 1
|
||||
send_advertise
|
||||
end
|
||||
|
||||
def instance_mem_usage_in_mb(instance)
|
||||
|
@ -862,8 +930,10 @@ module DEA
|
|||
if state_file
|
||||
state_file = File.join(instance[:dir], state_file)
|
||||
detect_state_ready(instance, state_file, &block)
|
||||
else
|
||||
elsif instance[:port]
|
||||
detect_port_ready(instance, &block)
|
||||
else
|
||||
block.call(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -1088,6 +1158,8 @@ module DEA
|
|||
env << "VCAP_APP_PORT='#{instance[:port]}'"
|
||||
env << "VCAP_DEBUG_IP='#{instance[:debug_ip]}'"
|
||||
env << "VCAP_DEBUG_PORT='#{instance[:debug_port]}'"
|
||||
env << "VCAP_CONSOLE_IP='#{instance[:console_ip]}'"
|
||||
env << "VCAP_CONSOLE_PORT='#{instance[:console_port]}'"
|
||||
|
||||
if vars = debug_env(instance)
|
||||
@logger.info("Debugger environment variables: #{vars.inspect}")
|
||||
|
@ -1122,7 +1194,6 @@ module DEA
|
|||
env << "#{k}=#{v}"
|
||||
end
|
||||
end
|
||||
|
||||
return env
|
||||
end
|
||||
|
||||
|
@ -1187,12 +1258,17 @@ module DEA
|
|||
if instance[:pid] || [:STARTING, :RUNNING].include?(instance[:state])
|
||||
instance[:state] = :STOPPED unless instance[:state] == :CRASHED
|
||||
instance[:state_timestamp] = Time.now.to_i
|
||||
stop_cmd = File.join(instance[:dir], 'stop')
|
||||
stop_cmd = "su -c #{stop_cmd} #{username}" if @secure
|
||||
stop_cmd = "#{stop_cmd} #{instance[:pid]} 2> /dev/null"
|
||||
stop_script = File.join(instance[:dir], 'stop')
|
||||
insecure_stop_cmd = "#{stop_script} #{instance[:pid]} 2> /dev/null"
|
||||
stop_cmd =
|
||||
if @secure
|
||||
"su -c \"#{insecure_stop_cmd}\" #{username}"
|
||||
else
|
||||
insecure_stop_cmd
|
||||
end
|
||||
|
||||
unless (RUBY_PLATFORM =~ /darwin/ and @secure)
|
||||
@logger.debug("Executing stop script: '#{stop_cmd}'")
|
||||
@logger.debug("Executing stop script: '#{stop_cmd}', instance state is #{instance[:state]}")
|
||||
# We can't make 'stop_cmd' into EM.system because of a race with
|
||||
# 'cleanup_droplet'
|
||||
@logger.debug("Stopping instance PID:#{instance[:pid]}")
|
||||
|
@ -1618,7 +1694,7 @@ module DEA
|
|||
expanded_exec.strip!
|
||||
|
||||
# java prints to stderr, so munch them both..
|
||||
version_check = `#{expanded_exec} #{version_flag} 2>&1`.strip!
|
||||
version_check = `env -i HOME=$HOME #{expanded_exec} #{version_flag} 2>&1`.strip!
|
||||
unless $? == 0
|
||||
@logger.info(" #{pname} FAILED, executable '#{runtime['executable']}' not found")
|
||||
next
|
||||
|
@ -1630,7 +1706,7 @@ module DEA
|
|||
if /#{runtime['version']}/ =~ version_check
|
||||
# Additional checks should return true
|
||||
if runtime['additional_checks']
|
||||
additional_check = `#{runtime['executable']} #{runtime['additional_checks']} 2>&1`
|
||||
additional_check = `env -i HOME=$HOME #{runtime['executable']} #{runtime['additional_checks']} 2>&1`
|
||||
unless additional_check =~ /true/i
|
||||
@logger.info(" #{pname} FAILED, additional checks failed")
|
||||
end
|
||||
|
@ -1664,7 +1740,6 @@ module DEA
|
|||
FileUtils.rm_f(test_file)
|
||||
end
|
||||
|
||||
|
||||
def run_command(cmd)
|
||||
outdir = Dir.mktmpdir
|
||||
stderr_path = File.join(outdir, 'stderr.log')
|
||||
|
@ -1674,6 +1749,41 @@ module DEA
|
|||
[$?, stdout, stderr]
|
||||
end
|
||||
|
||||
def droplet_fs_usage_threshold_exceeded?
|
||||
@droplet_fs_percent_used > @droplet_fs_percent_used_threshold
|
||||
end
|
||||
|
||||
def update_droplet_fs_usage(opts={})
|
||||
df_cmd = "df #{@droplet_dir}"
|
||||
|
||||
cont = proc do |output, status|
|
||||
raise "Failed executing #{df_cmd}" unless status.success?
|
||||
|
||||
percent_used = parse_df_percent_used(output)
|
||||
raise "Failed parsing df output: #{output}" unless percent_used
|
||||
|
||||
@droplet_fs_percent_used = percent_used
|
||||
end
|
||||
|
||||
if opts[:blocking]
|
||||
output = `#{df_cmd}`
|
||||
cont.call(output, $?)
|
||||
else
|
||||
EM.system(df_cmd, cont)
|
||||
end
|
||||
end
|
||||
|
||||
def parse_df_percent_used(output)
|
||||
fields = output.strip.split(/\s+/)
|
||||
return nil unless fields.count == 13
|
||||
|
||||
if fields[11] =~ /^(\d+)%$/
|
||||
Integer($1)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
require 'tempfile'
|
||||
|
||||
require 'rubygems'
|
||||
require 'bundler/setup'
|
||||
Bundler.require(:default, :test)
|
||||
|
||||
require 'rake'
|
||||
require 'rspec'
|
||||
require 'rspec/core/rake_task'
|
||||
require 'ci/reporter/rake/rspec'
|
||||
|
||||
coverage_dir = File.expand_path(File.join(File.dirname(__FILE__), "..", "spec_coverage"))
|
||||
reports_dir = File.expand_path(File.join(File.dirname(__FILE__), "..", "spec_reports"))
|
||||
dump_file = File.join(Dir.tmpdir, "dea.rcov")
|
||||
ignore_pattern = 'spec,[.]bundle,[/]gems[/]'
|
||||
|
||||
ENV['CI_REPORTS'] = reports_dir
|
||||
|
||||
desc "Run specs using RCov"
|
||||
task "spec:rcov" => ["ci:setup:rspec", "spec:rcov_internal", "convert_rcov_to_clover"]
|
||||
|
||||
RSpec::Core::RakeTask.new do |t|
|
||||
t.pattern = "**/*_spec.rb"
|
||||
t.rspec_opts = ["--format", "documentation", "--colour"]
|
||||
end
|
||||
|
||||
desc "Run specs using RCov (internal, use spec:rcov instead)"
|
||||
RSpec::Core::RakeTask.new("spec:rcov_internal") do |t|
|
||||
sh("rm -f #{dump_file}")
|
||||
t.pattern = "**/*_spec.rb"
|
||||
t.rspec_opts = ["--format", "progress", "--colour"]
|
||||
t.rcov = true
|
||||
t.rcov_opts = %W{--exclude osx\/objc,gems\/,spec\/,features\/ -o "#{coverage_dir}"}
|
||||
end
|
||||
|
||||
task "convert_rcov_to_clover" do |t|
|
||||
analyzer = File.join(File.dirname(__FILE__), "..", "..", "tests", "common", "rcov_analyzer.rb")
|
||||
clover_output = File.join(coverage_dir, "clover.xml")
|
||||
sh("ruby #{analyzer} #{dump_file} #{ignore_pattern} > #{clover_output}")
|
||||
sh("rm -f #{dump_file}")
|
||||
end
|
|
@ -2,6 +2,7 @@
|
|||
require File.join(File.dirname(__FILE__), 'spec_helper')
|
||||
|
||||
require 'digest/sha1'
|
||||
require 'erb'
|
||||
require 'fileutils'
|
||||
require 'nats/client'
|
||||
require 'uri'
|
||||
|
@ -63,12 +64,13 @@ describe 'DEA Agent' do
|
|||
'pid' => File.join(@run_dir, 'dea.pid'),
|
||||
'runtimes' => {
|
||||
'ruby18' => {
|
||||
'executable' => '/usr/bin/ruby',
|
||||
'executable' => ENV["VCAP_TEST_DEA_RUBY18"] || '/usr/bin/ruby1.8',
|
||||
'version' => '1.8.7',
|
||||
'version_flag' => "-e 'puts RUBY_VERSION'"
|
||||
}
|
||||
},
|
||||
'disable_dir_cleanup' => true,
|
||||
'droplet_fs_percent_used_threshold' => 100, # don't fail if a developer's machine is almost full
|
||||
}
|
||||
@dea_config_file = File.join(@run_dir, 'dea.config')
|
||||
File.open(@dea_config_file, 'w') {|f| YAML.dump(@dea_cfg, f) }
|
||||
|
@ -138,6 +140,11 @@ describe 'DEA Agent' do
|
|||
send_request(@nats_server.uri, 'dea.discover', disc_msg).should_not be_nil
|
||||
end
|
||||
|
||||
it 'should respond to a locate message' do
|
||||
send_request(@nats_server.uri, 'dea.locate', {})
|
||||
receive_message(@nats_server.uri, 'dea.advertise').should_not be_nil
|
||||
end
|
||||
|
||||
it 'should start a droplet when requested' do
|
||||
droplet_info = start_droplet(@nats_server.uri, @droplet)
|
||||
droplet_info.should_not be_nil
|
||||
|
@ -204,7 +211,7 @@ describe 'DEA Agent' do
|
|||
ret = nil
|
||||
em_run_with_timeout do
|
||||
NATS.start(:uri => uri) do
|
||||
NATS.subscribe('dea.heartbeat') do |msg|
|
||||
NATS.subscribe(subj) do |msg|
|
||||
ret = msg
|
||||
EM.stop
|
||||
end
|
||||
|
@ -263,21 +270,30 @@ describe 'DEA Agent' do
|
|||
staging_dir = File.join(base_dir, 'staging')
|
||||
FileUtils.mkdir(staging_dir)
|
||||
File.exists?(staging_dir).should be_true
|
||||
{'start_tcpserver.rb' => 'startup',
|
||||
'stop_tcpserver.rb' => 'stop',
|
||||
{'start_tcpserver.erb' => 'startup',
|
||||
}.each do |src, dst|
|
||||
src = File.join(File.dirname(__FILE__), src)
|
||||
dst = File.join(staging_dir, dst)
|
||||
FileUtils.cp(src, dst)
|
||||
render_control_script(src, dst)
|
||||
FileUtils.chmod(0755, dst)
|
||||
File.exists?(dst).should be_true
|
||||
end
|
||||
system("tar czf #{bundlename} -C #{staging_dir} startup -C #{staging_dir} stop")
|
||||
system("tar czf #{bundlename} -C #{staging_dir} startup")
|
||||
File.exists?(bundlename).should be_true
|
||||
FileUtils.rm_rf(staging_dir)
|
||||
File.exists?(staging_dir).should be_false
|
||||
end
|
||||
|
||||
def render_control_script(template_path, dst_path)
|
||||
dea_ruby_path = ENV['VCAP_TEST_DEA_RUBY18']
|
||||
template_contents = File.read(template_path)
|
||||
template = ERB.new(template_contents)
|
||||
rendered = template.result(binding())
|
||||
File.open(dst_path, 'w+') do |f|
|
||||
f.write(rendered)
|
||||
end
|
||||
end
|
||||
|
||||
def droplet_for_bundle(bundle_filename)
|
||||
{
|
||||
'sha1' => Digest::SHA1.hexdigest(File.read(bundle_filename)),
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env ruby
|
||||
#!<%= dea_ruby_path || '/usr/bin/env ruby'%>
|
||||
# Copyright (c) 2009-2011 VMware, Inc.
|
||||
require 'optparse'
|
||||
require 'fileutils'
|
||||
require 'socket'
|
||||
|
||||
port = nil
|
||||
|
@ -13,11 +14,18 @@ parser.parse(ARGV)
|
|||
|
||||
fail 'Must supply port' unless port
|
||||
|
||||
PID_FILE = '/tmp/dea_agent_test_tcpserver.pid'
|
||||
stop_contents =<<-EOT
|
||||
#!/bin/sh
|
||||
echo Killing pid #{Process.pid}
|
||||
kill -9 #{Process.pid}
|
||||
EOT
|
||||
|
||||
File.open(PID_FILE, 'w+') do |f|
|
||||
f.write("%d\n" % (Process.pid()))
|
||||
|
||||
stop_path = File.expand_path('../stop', __FILE__)
|
||||
File.open(stop_path, 'w+') do |f|
|
||||
f.write(stop_contents)
|
||||
end
|
||||
FileUtils.chmod(0755, stop_path)
|
||||
|
||||
server = TCPServer.new('127.0.0.1', port)
|
||||
while true do
|
|
@ -1,21 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
# Copyright (c) 2009-2011 VMware, Inc.
|
||||
puts "STOP SCRIPT!"
|
||||
|
||||
def process_running?(pid)
|
||||
return false unless pid && (pid > 0)
|
||||
output = %x[ps -o rss= -p #{pid}]
|
||||
return true if ($? == 0 && !output.empty?)
|
||||
# fail otherwise..
|
||||
return false
|
||||
end
|
||||
|
||||
PID_FILE = '/tmp/dea_agent_test_tcpserver.pid'
|
||||
|
||||
exit 1 unless File.exists? PID_FILE
|
||||
|
||||
pid = File.read(PID_FILE).to_i
|
||||
FileUtils.rm_f(PID_FILE)
|
||||
|
||||
exit 0 unless process_running?(pid)
|
||||
Process.kill('TERM', pid)
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче