Merge pull request #132 from edx/bbeggs/mongodb-3.0-PLAT-730
Update Mongoid to 5.x release (mongodb 3.x + compatibility)
This commit is contained in:
Коммит
26fc3a60ff
1
AUTHORS
1
AUTHORS
|
@ -22,3 +22,4 @@ Alan Boudreault <alan@alanb.ca>
|
|||
Matjaz Gregoric <mtyaka@gmail.com>
|
||||
Ben McMorran <ben.mcmorran@gmail.com>
|
||||
Bill DeRusha <bill@edx.org>
|
||||
Brian Beggs <macdiesel@gmail.com>
|
||||
|
|
30
Gemfile
30
Gemfile
|
@ -14,25 +14,21 @@ gem 'sinatra'
|
|||
gem 'yajl-ruby'
|
||||
|
||||
|
||||
gem 'ampex'
|
||||
|
||||
gem 'mongo'
|
||||
gem 'moped', "1.5.1"
|
||||
gem 'mongoid', "3.0.15"
|
||||
gem 'mongoid', "~>5.0"
|
||||
gem 'bson', '~>3.1'
|
||||
gem 'bson_ext'
|
||||
gem 'protected_attributes'
|
||||
|
||||
gem 'delayed_job'
|
||||
gem 'delayed_job_mongoid', :git => 'https://github.com/dementrock/delayed_job_mongoid.git'
|
||||
gem 'delayed_job_mongoid'
|
||||
|
||||
gem "enumerize", "~>0.8.0"
|
||||
gem 'mongoid-tree', :git => 'https://github.com/dementrock/mongoid-tree.git'
|
||||
gem 'voteable_mongo', :git => 'https://github.com/dementrock/voteable_mongo.git'
|
||||
gem 'mongoid_magic_counter_cache', :git => 'https://github.com/dementrock/mongoid-magic-counter-cache.git'
|
||||
|
||||
gem 'kaminari', :require => 'kaminari/sinatra', :git => 'https://github.com/dementrock/kaminari.git'
|
||||
gem "enumerize"
|
||||
gem 'mongoid-tree', :git => 'https://github.com/macdiesel/mongoid-tree'
|
||||
gem 'rs_voteable_mongo', :git => 'https://github.com/navneet35371/voteable_mongo.git'
|
||||
gem 'mongoid_magic_counter_cache'
|
||||
|
||||
gem 'faker'
|
||||
gem 'will_paginate_mongoid'
|
||||
gem 'will_paginate_mongoid', "~>2.0"
|
||||
gem 'rdiscount'
|
||||
gem 'nokogiri'
|
||||
|
||||
|
@ -49,12 +45,14 @@ group :test do
|
|||
gem 'guard'
|
||||
gem 'guard-unicorn'
|
||||
gem 'simplecov', :require => false
|
||||
gem 'database_cleaner'
|
||||
# database_cleaner 1.5.1 which is compatible with Mongoid 5 has not been released
|
||||
# to rubygems yet, so pull it from github.
|
||||
gem 'database_cleaner', :git => 'https://github.com/DatabaseCleaner/database_cleaner', :ref => 'b87f00320f8aa0f7e499d183128f05ce29cedc33'
|
||||
end
|
||||
|
||||
gem 'newrelic_rpm'
|
||||
gem 'newrelic_moped'
|
||||
gem 'unicorn'
|
||||
gem "rack-timeout", "0.1.0beta3"
|
||||
gem "rack-timeout"
|
||||
gem "i18n"
|
||||
gem "rack-contrib", :git => 'https://github.com/rack/rack-contrib.git', :ref => '6ff3ca2b2d988911ca52a2712f6a7da5e064aa27'
|
||||
|
||||
|
|
|
@ -1,39 +1,23 @@
|
|||
GIT
|
||||
remote: https://github.com/dementrock/delayed_job_mongoid.git
|
||||
revision: 48b1420d59bc01e0b1aba1c2ad66bda4a5e04b9a
|
||||
remote: https://github.com/DatabaseCleaner/database_cleaner
|
||||
revision: b87f00320f8aa0f7e499d183128f05ce29cedc33
|
||||
ref: b87f00320f8aa0f7e499d183128f05ce29cedc33
|
||||
specs:
|
||||
delayed_job_mongoid (1.0.8)
|
||||
delayed_job (~> 3.0.0)
|
||||
mongoid (>= 3.0.0.rc)
|
||||
database_cleaner (1.5.1)
|
||||
|
||||
GIT
|
||||
remote: https://github.com/dementrock/kaminari.git
|
||||
revision: 82a38e07db1ca1598c8daf073a8f6be22ae714d6
|
||||
remote: https://github.com/macdiesel/mongoid-tree
|
||||
revision: b381dd56f1b3b061df8f4b4181d5440dea1602d1
|
||||
specs:
|
||||
kaminari (0.13.0)
|
||||
actionpack (>= 3.0.0)
|
||||
activesupport (>= 3.0.0)
|
||||
mongoid-tree (2.0.0)
|
||||
mongoid (>= 4.0, <= 5.0)
|
||||
|
||||
GIT
|
||||
remote: https://github.com/dementrock/mongoid-magic-counter-cache.git
|
||||
revision: 28bc5e617cab19187b323e7d97d49fe73a7de68a
|
||||
remote: https://github.com/navneet35371/voteable_mongo.git
|
||||
revision: 55fcfe76705ab5da1c9e5670594331b33954c545
|
||||
specs:
|
||||
mongoid_magic_counter_cache (0.1.1)
|
||||
mongoid (>= 3.0)
|
||||
rake
|
||||
|
||||
GIT
|
||||
remote: https://github.com/dementrock/mongoid-tree.git
|
||||
revision: 5aa7a4ee16cd90dbbcac3ad702446d2119e971df
|
||||
specs:
|
||||
mongoid-tree (1.0.0)
|
||||
mongoid (>= 3.0, <= 4.0)
|
||||
|
||||
GIT
|
||||
remote: https://github.com/dementrock/voteable_mongo.git
|
||||
revision: 538e86856daa1c180ba80b7c6f2805e531ba420c
|
||||
specs:
|
||||
voteable_mongo (0.9.3)
|
||||
rs_voteable_mongo (1.0.2)
|
||||
mongoid (>= 3.0, <= 5.0)
|
||||
|
||||
GIT
|
||||
remote: https://github.com/rack/rack-contrib.git
|
||||
|
@ -46,39 +30,32 @@ GIT
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
actionpack (3.2.8)
|
||||
activemodel (= 3.2.8)
|
||||
activesupport (= 3.2.8)
|
||||
builder (~> 3.0.0)
|
||||
erubis (~> 2.7.0)
|
||||
journey (~> 1.0.4)
|
||||
rack (~> 1.4.0)
|
||||
rack-cache (~> 1.2)
|
||||
rack-test (~> 0.6.1)
|
||||
sprockets (~> 2.1.3)
|
||||
activemodel (3.2.8)
|
||||
activesupport (= 3.2.8)
|
||||
builder (~> 3.0.0)
|
||||
activesupport (3.2.8)
|
||||
i18n (~> 0.6)
|
||||
multi_json (~> 1.0)
|
||||
ampex (2.0.0)
|
||||
blankslate
|
||||
ansi (1.4.3)
|
||||
blankslate (2.1.2.4)
|
||||
bson (1.6.4)
|
||||
bson_ext (1.6.4)
|
||||
bson (~> 1.6.4)
|
||||
builder (3.0.4)
|
||||
activemodel (4.2.4)
|
||||
activesupport (= 4.2.4)
|
||||
builder (~> 3.1)
|
||||
activesupport (4.2.4)
|
||||
i18n (~> 0.7)
|
||||
json (~> 1.7, >= 1.7.7)
|
||||
minitest (~> 5.1)
|
||||
thread_safe (~> 0.3, >= 0.3.4)
|
||||
tzinfo (~> 1.1)
|
||||
ansi (1.5.0)
|
||||
bson (3.2.4)
|
||||
bson_ext (1.5.1)
|
||||
builder (3.2.2)
|
||||
coderay (1.0.7)
|
||||
dalli (2.1.0)
|
||||
database_cleaner (1.2.0)
|
||||
delayed_job (3.0.3)
|
||||
activesupport (~> 3.0)
|
||||
delayed_job (4.1.1)
|
||||
activesupport (>= 3.0, < 5.0)
|
||||
delayed_job_mongoid (2.2.0)
|
||||
delayed_job (>= 3.0, < 5)
|
||||
mongoid (>= 3.0, < 6)
|
||||
mongoid-compatibility
|
||||
diff-lcs (1.1.3)
|
||||
enumerize (0.8.0)
|
||||
domain_name (0.5.24)
|
||||
unf (>= 0.0.5, < 1.0.0)
|
||||
enumerize (0.11.0)
|
||||
activesupport (>= 3.2)
|
||||
erubis (2.7.0)
|
||||
faker (1.0.1)
|
||||
i18n (~> 0.4)
|
||||
guard (1.3.2)
|
||||
|
@ -87,47 +64,54 @@ GEM
|
|||
guard-unicorn (0.0.7)
|
||||
guard (>= 1.1)
|
||||
hashr (0.0.22)
|
||||
hike (1.2.1)
|
||||
i18n (0.6.9)
|
||||
journey (1.0.4)
|
||||
kgio (2.7.4)
|
||||
http-cookie (1.0.2)
|
||||
domain_name (~> 0.5)
|
||||
i18n (0.7.0)
|
||||
json (1.8.3)
|
||||
kgio (2.10.0)
|
||||
listen (0.5.0)
|
||||
method_source (0.8)
|
||||
mime-types (2.2)
|
||||
mongo (1.6.4)
|
||||
bson (~> 1.6.4)
|
||||
mongoid (3.0.15)
|
||||
activemodel (~> 3.1)
|
||||
moped (~> 1.1)
|
||||
origin (~> 1.0)
|
||||
tzinfo (~> 0.3.22)
|
||||
moped (1.5.1)
|
||||
multi_json (1.10.0)
|
||||
newrelic_moped (1.0.0)
|
||||
moped
|
||||
newrelic_rpm (>= 3.7)
|
||||
newrelic_rpm (3.11.2.286)
|
||||
mime-types (2.6.1)
|
||||
minitest (5.8.1)
|
||||
mongo (2.1.1)
|
||||
bson (~> 3.0)
|
||||
mongoid (5.0.0)
|
||||
activemodel (~> 4.0)
|
||||
mongo (~> 2.1)
|
||||
origin (~> 2.1)
|
||||
tzinfo (>= 0.3.37)
|
||||
mongoid-compatibility (0.3.1)
|
||||
activesupport
|
||||
mongoid (>= 2.0)
|
||||
mongoid_magic_counter_cache (1.1.1)
|
||||
mongoid
|
||||
rake
|
||||
multi_json (1.11.2)
|
||||
netrc (0.10.3)
|
||||
newrelic_rpm (3.13.2.302)
|
||||
nokogiri (1.5.5)
|
||||
origin (1.1.0)
|
||||
origin (2.1.1)
|
||||
protected_attributes (1.1.3)
|
||||
activemodel (>= 4.0.1, < 5.0)
|
||||
pry (0.9.10)
|
||||
coderay (~> 1.0.5)
|
||||
method_source (~> 0.8)
|
||||
slop (~> 3.3.1)
|
||||
pry-nav (0.2.2)
|
||||
pry (~> 0.9.10)
|
||||
rack (1.4.1)
|
||||
rack-cache (1.2)
|
||||
rack (>= 0.4)
|
||||
rack (1.6.4)
|
||||
rack-protection (1.2.0)
|
||||
rack
|
||||
rack-test (0.6.1)
|
||||
rack-test (0.6.3)
|
||||
rack (>= 1.0)
|
||||
rack-timeout (0.1.0beta3)
|
||||
raindrops (0.10.0)
|
||||
rake (10.3.1)
|
||||
rack-timeout (0.3.2)
|
||||
raindrops (0.15.0)
|
||||
rake (10.4.2)
|
||||
rdiscount (1.6.8)
|
||||
rest-client (1.6.7)
|
||||
mime-types (>= 1.16)
|
||||
rest-client (1.8.0)
|
||||
http-cookie (>= 1.0.2, < 2.0)
|
||||
mime-types (>= 1.16, < 3.0)
|
||||
netrc (~> 0.7)
|
||||
rspec (2.11.0)
|
||||
rspec-core (~> 2.11.0)
|
||||
rspec-expectations (~> 2.11.0)
|
||||
|
@ -145,11 +129,8 @@ GEM
|
|||
rack-protection (~> 1.2)
|
||||
tilt (~> 1.3, >= 1.3.3)
|
||||
slop (3.3.2)
|
||||
sprockets (2.1.3)
|
||||
hike (~> 1.2)
|
||||
rack (~> 1.0)
|
||||
tilt (~> 1.1, != 1.3.0)
|
||||
thor (0.16.0)
|
||||
thread_safe (0.3.5)
|
||||
tilt (1.3.3)
|
||||
tire (0.6.2)
|
||||
activemodel (>= 3.0)
|
||||
|
@ -161,56 +142,57 @@ GEM
|
|||
rest-client (~> 1.6)
|
||||
tire-contrib (0.1.1)
|
||||
tire
|
||||
tzinfo (0.3.38)
|
||||
unicorn (4.3.1)
|
||||
tzinfo (1.2.2)
|
||||
thread_safe (~> 0.1)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.7.1)
|
||||
unicorn (4.9.0)
|
||||
kgio (~> 2.6)
|
||||
rack
|
||||
raindrops (~> 0.7)
|
||||
will_paginate (3.0.4)
|
||||
will_paginate_mongoid (1.1.0)
|
||||
mongoid (>= 2.4)
|
||||
will_paginate (3.0.7)
|
||||
will_paginate_mongoid (2.0.1)
|
||||
mongoid
|
||||
will_paginate (~> 3.0)
|
||||
yajl-ruby (1.1.0)
|
||||
yajl-ruby (1.2.1)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
ampex
|
||||
bson (~> 3.1)
|
||||
bson_ext
|
||||
bundler
|
||||
dalli
|
||||
database_cleaner
|
||||
database_cleaner!
|
||||
delayed_job
|
||||
delayed_job_mongoid!
|
||||
enumerize (~> 0.8.0)
|
||||
delayed_job_mongoid
|
||||
enumerize
|
||||
faker
|
||||
guard
|
||||
guard-unicorn
|
||||
i18n
|
||||
kaminari!
|
||||
mongo
|
||||
mongoid (= 3.0.15)
|
||||
mongoid (~> 5.0)
|
||||
mongoid-tree!
|
||||
mongoid_magic_counter_cache!
|
||||
moped (= 1.5.1)
|
||||
newrelic_moped
|
||||
mongoid_magic_counter_cache
|
||||
newrelic_rpm
|
||||
nokogiri
|
||||
protected_attributes
|
||||
pry
|
||||
pry-nav
|
||||
rack-contrib!
|
||||
rack-test
|
||||
rack-timeout (= 0.1.0beta3)
|
||||
rack-timeout
|
||||
rake
|
||||
rdiscount
|
||||
rest-client
|
||||
rs_voteable_mongo!
|
||||
rspec
|
||||
simplecov
|
||||
sinatra
|
||||
tire (= 0.6.2)
|
||||
tire-contrib
|
||||
unicorn
|
||||
voteable_mongo!
|
||||
will_paginate_mongoid
|
||||
will_paginate_mongoid (~> 2.0)
|
||||
yajl-ruby
|
||||
|
|
4
Rakefile
4
Rakefile
|
@ -159,7 +159,7 @@ namespace :db do
|
|||
task :seed_fast => :environment do
|
||||
ADDITIONAL_COMMENTS_PER_THREAD = 20
|
||||
|
||||
config = YAML.load_file("config/mongoid.yml")[Sinatra::Base.environment]["sessions"]["default"]
|
||||
config = YAML.load_file("config/mongoid.yml")[Sinatra::Base.environment]["clients"]["default"]
|
||||
connnection = Mongo::Connection.new(config["hosts"][0].split(":")[0], config["hosts"][0].split(":")[1])
|
||||
db = Mongo::Connection.new.db(config["database"])
|
||||
coll = db.collection("contents")
|
||||
|
@ -248,7 +248,6 @@ namespace :search do
|
|||
end
|
||||
|
||||
def import_from_cursor(cursor, index, opts)
|
||||
Mongoid.identity_map_enabled = true
|
||||
tot = cursor.count
|
||||
cnt = 0
|
||||
t = Time.now
|
||||
|
@ -259,7 +258,6 @@ namespace :search do
|
|||
LOG.info "#{index.name}: imported #{cnt} of #{tot} (#{pct_complete}% complete after #{elapsed_secs} seconds)"
|
||||
end
|
||||
cnt += documents.length
|
||||
Mongoid::IdentityMap.clear
|
||||
sleep opts[:sleep_time]
|
||||
documents
|
||||
end
|
||||
|
|
|
@ -8,7 +8,11 @@ put "#{APIPREFIX}/comments/:comment_id" do |comment_id|
|
|||
if params.has_key?("endorsed")
|
||||
new_endorsed_val = Boolean.mongoize(params["endorsed"])
|
||||
if new_endorsed_val != comment.endorsed
|
||||
endorsement = {:user_id => params["endorsement_user_id"], :time => DateTime.now}
|
||||
if params["endorsement_user_id"].nil?
|
||||
endorsement = nil
|
||||
else
|
||||
endorsement = {:user_id => params["endorsement_user_id"], :time => DateTime.now}
|
||||
end
|
||||
updated_content["endorsement"] = new_endorsed_val ? endorsement : nil
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
require 'new_relic/agent/method_tracer'
|
||||
|
||||
get "#{APIPREFIX}/search/threads" do
|
||||
local_params = params # Necessary for params to be available inside blocks
|
||||
group_ids = get_group_ids_from_params(local_params)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
require 'new_relic/agent/method_tracer'
|
||||
|
||||
post "#{APIPREFIX}/users" do
|
||||
user = User.new(external_id: params["id"])
|
||||
user.username = params["username"]
|
||||
|
|
80
app.rb
80
app.rb
|
@ -19,29 +19,6 @@ module CommentService
|
|||
API_PREFIX = "/api/#{API_VERSION}"
|
||||
end
|
||||
|
||||
if ["staging", "production", "loadtest", "edgestage","edgeprod"].include? environment
|
||||
require 'newrelic_rpm'
|
||||
require 'new_relic/agent/method_tracer'
|
||||
Moped::Session.class_eval do
|
||||
include NewRelic::Agent::MethodTracer
|
||||
add_method_tracer :new
|
||||
add_method_tracer :use
|
||||
add_method_tracer :login
|
||||
end
|
||||
Moped::Cluster.class_eval do
|
||||
include NewRelic::Agent::MethodTracer
|
||||
add_method_tracer :with_primary
|
||||
add_method_tracer :nodes
|
||||
end
|
||||
Moped::Node.class_eval do
|
||||
include NewRelic::Agent::MethodTracer
|
||||
add_method_tracer :command
|
||||
add_method_tracer :connect
|
||||
add_method_tracer :flush
|
||||
add_method_tracer :refresh
|
||||
end
|
||||
end
|
||||
|
||||
if ENV["ENABLE_GC_PROFILER"]
|
||||
GC::Profiler.enable
|
||||
end
|
||||
|
@ -56,11 +33,12 @@ end
|
|||
|
||||
Mongoid.load!("config/mongoid.yml", environment)
|
||||
Mongoid.logger.level = Logger::INFO
|
||||
Moped.logger.level = ENV["ENABLE_MOPED_DEBUGGING"] ? Logger::DEBUG : Logger::INFO
|
||||
Mongo::Logger.logger.level = ENV["ENABLE_MONGO_DEBUGGING"] ? Logger::DEBUG : Logger::INFO
|
||||
|
||||
# set up i18n
|
||||
I18n.load_path += Dir[File.join(File.dirname(__FILE__), 'locale', '*.yml').to_s]
|
||||
I18n.default_locale = CommentService.config[:default_locale]
|
||||
I18n.enforce_available_locales = false
|
||||
I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks)
|
||||
use Rack::Locale
|
||||
|
||||
|
@ -97,27 +75,6 @@ before do
|
|||
content_type "application/json"
|
||||
end
|
||||
|
||||
if ENV["ENABLE_IDMAP_LOGGING"]
|
||||
|
||||
after do
|
||||
idmap = Mongoid::Threaded.identity_map
|
||||
vals = {
|
||||
"pid" => Process.pid,
|
||||
"dyno" => ENV["DYNO"],
|
||||
"request_id" => params[:request_id]
|
||||
}
|
||||
idmap.each {|k, v| vals["idmap_count_#{k.to_s}"] = v.size }
|
||||
logger.info vals.map{|e| e.join("=") }.join(" ")
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
# Enable the identity map. The middleware ensures that the identity map is
|
||||
# cleared for every request.
|
||||
Mongoid.identity_map_enabled = true
|
||||
use Rack::Mongoid::Middleware::IdentityMap
|
||||
|
||||
|
||||
# use yajl implementation for to_json.
|
||||
# https://github.com/brianmario/yajl-ruby#json-gem-compatibility-api
|
||||
#
|
||||
|
@ -128,16 +85,27 @@ require 'yajl/json_gem'
|
|||
|
||||
# patch json serialization of ObjectIds to work properly with yajl.
|
||||
# See https://groups.google.com/forum/#!topic/mongoid/MaXFVw7D_4s
|
||||
module Moped
|
||||
module BSON
|
||||
class ObjectId
|
||||
def to_json
|
||||
self.to_s.to_json
|
||||
end
|
||||
# Note that BSON was moved from Moped::BSON::ObjectId to BSON::ObjectId
|
||||
module BSON
|
||||
class ObjectId
|
||||
def to_json
|
||||
self.to_s.to_json
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Patch json serialization of Time Objects
|
||||
class Time
|
||||
# Returns a hash, that will be turned into a JSON object and represent this
|
||||
# object.
|
||||
# Note that this was done to prevent milliseconds from showing up in the JSON response thus breaking
|
||||
# API compatibility for downstream clients.
|
||||
def to_json(*)
|
||||
'"' + utc().strftime("%Y-%m-%dT%H:%M:%SZ") + '"'
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
# these files must be required in order
|
||||
require './api/search'
|
||||
|
@ -158,7 +126,7 @@ if RACK_ENV.to_s == "development"
|
|||
end
|
||||
end
|
||||
|
||||
error Moped::Errors::InvalidObjectId do
|
||||
error Mongo::Error::InvalidDocument do
|
||||
error 400, [t(:requested_object_not_found)].to_json
|
||||
end
|
||||
|
||||
|
@ -170,10 +138,10 @@ error ArgumentError do
|
|||
error 400, [env['sinatra.error'].message].to_json
|
||||
end
|
||||
|
||||
CommentService.blocked_hashes = Content.mongo_session[:blocked_hash].find.select(hash: 1).each.map {|d| d["hash"]}
|
||||
CommentService.blocked_hashes = Content.mongo_client[:blocked_hash].find(nil, projection: {hash: 1}).map {|d| d["hash"]}
|
||||
|
||||
def get_db_is_master
|
||||
Mongoid::Sessions.default.command(isMaster: 1)
|
||||
Mongoid::Clients.default.command(isMaster: 1)
|
||||
end
|
||||
|
||||
def get_es_status
|
||||
|
@ -186,7 +154,7 @@ get '/heartbeat' do
|
|||
db_ok = false
|
||||
begin
|
||||
res = get_db_is_master
|
||||
db_ok = ( res["ismaster"] == true and Integer(res["ok"]) == 1 )
|
||||
db_ok = res.ok? && res.documents.first['ismaster'] == true
|
||||
rescue
|
||||
end
|
||||
error 500, JSON.generate({"OK" => false, "check" => "db"}) unless db_ok
|
||||
|
@ -221,4 +189,4 @@ get '/selftest' do
|
|||
"#{ex.backtrace.first}: #{ex.message} (#{ex.class})\n\t#{ex.backtrace[1..-1].join("\n\t")}"
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -14,5 +14,8 @@ require "rack-timeout"
|
|||
use Rack::Timeout # Call as early as possible so rack-timeout runs before other middleware.
|
||||
Rack::Timeout.timeout = 20
|
||||
|
||||
require "mongoid"
|
||||
use Mongoid::QueryCache::Middleware
|
||||
|
||||
require './app'
|
||||
run Sinatra::Application
|
||||
|
|
|
@ -1,51 +1,62 @@
|
|||
development:
|
||||
sessions:
|
||||
default:
|
||||
database: cs_comments_service_development
|
||||
hosts:
|
||||
- localhost:27017
|
||||
test:
|
||||
sessions:
|
||||
default:
|
||||
database: cs_comments_service_test
|
||||
hosts:
|
||||
- localhost:27017
|
||||
|
||||
common: &default_session
|
||||
uri: <%= ENV['MONGOHQ_URL'] %>
|
||||
common: &default_client
|
||||
options:
|
||||
skip_version_check: true
|
||||
safe: true
|
||||
consistency: strong
|
||||
write:
|
||||
w: 1
|
||||
read:
|
||||
mode: :primary
|
||||
max_retries: <%= ENV['MONGOID_MAX_RETRIES'] || 1 %>
|
||||
retry_interval: <%= ENV['MONGOID_RETRY_INTERVAL'] || 0 %>
|
||||
timeout: <%= ENV['MONGOID_TIMEOUT'] || 0.5 %>
|
||||
ssl: <%= ENV['MONGOID_USE_SSL'] || false %>
|
||||
|
||||
production:
|
||||
sessions:
|
||||
common_uri: &default_uri
|
||||
uri: <%= ENV['MONGOHQ_URL'] %>
|
||||
|
||||
development:
|
||||
clients:
|
||||
default:
|
||||
<<: *default_session
|
||||
<<: *default_client
|
||||
database: cs_comments_service_development
|
||||
hosts:
|
||||
- localhost:27017
|
||||
|
||||
test:
|
||||
clients:
|
||||
default:
|
||||
<<: *default_client
|
||||
database: cs_comments_service_test
|
||||
hosts:
|
||||
- localhost:27017
|
||||
|
||||
production:
|
||||
clients:
|
||||
default:
|
||||
<<: *default_uri
|
||||
<<: *default_client
|
||||
|
||||
edgeprod:
|
||||
sessions:
|
||||
clients:
|
||||
default:
|
||||
<<: *default_session
|
||||
<<: *default_uri
|
||||
<<: *default_client
|
||||
|
||||
edgestage:
|
||||
sessions:
|
||||
clients:
|
||||
default:
|
||||
<<: *default_session
|
||||
<<: *default_uri
|
||||
<<: *default_client
|
||||
|
||||
staging:
|
||||
sessions:
|
||||
clients:
|
||||
default:
|
||||
<<: *default_session
|
||||
<<: *default_uri
|
||||
<<: *default_client
|
||||
|
||||
loadtest:
|
||||
sessions:
|
||||
clients:
|
||||
default:
|
||||
<<: *default_session
|
||||
<<: *default_uri
|
||||
<<: *default_client
|
||||
|
||||
defaults: &defaults
|
||||
use_utc: false
|
||||
|
|
|
@ -13,5 +13,5 @@ after_fork do |server, worker|
|
|||
Signal.trap 'TERM' do
|
||||
puts 'Unicorn worker intercepting TERM and doing nothing. Waiting for master to send QUIT'
|
||||
end
|
||||
::Mongoid.default_session.disconnect
|
||||
::Mongoid.default_client.close
|
||||
end
|
||||
|
|
|
@ -7,5 +7,5 @@ listen "unix:#{data_dir}/forum.sock", :backlog => 512
|
|||
pid "#{data_dir}/forum_unicorn.pid"
|
||||
|
||||
after_fork do |server, worker|
|
||||
::Mongoid.default_session.disconnect
|
||||
::Mongoid.default_client.close
|
||||
end
|
||||
|
|
|
@ -11,5 +11,5 @@ data_dir = ENV['DATA_DIR'] || Dir.tmpdir
|
|||
pid "#{data_dir}/forum_unicorn.pid"
|
||||
|
||||
after_fork do |server, worker|
|
||||
::Mongoid.default_session.disconnect
|
||||
::Mongoid.default_client.close
|
||||
end
|
||||
|
|
|
@ -190,24 +190,21 @@ helpers do
|
|||
to_skip = (page - 1) * per_page
|
||||
has_more = false
|
||||
# batch_size is used to cap the number of documents we might load into memory at any given time
|
||||
# TODO: starting with Mongoid 3.1, you can just do comment_threads.batch_size(size).each()
|
||||
comment_threads.query.batch_size(CommentService.config["manual_pagination_batch_size"].to_i)
|
||||
Mongoid.unit_of_work(disable: :current) do # this is to prevent Mongoid from memoizing every document we look at
|
||||
comment_threads.each do |thread|
|
||||
thread_key = thread._id.to_s
|
||||
if !read_dates.has_key?(thread_key) || read_dates[thread_key] < thread.last_activity_at
|
||||
if skipped >= to_skip
|
||||
if threads.length == per_page
|
||||
has_more = true
|
||||
break
|
||||
end
|
||||
threads << thread
|
||||
else
|
||||
skipped += 1
|
||||
comment_threads.batch_size(CommentService.config["manual_pagination_batch_size"].to_i).each do |thread|
|
||||
thread_key = thread._id.to_s
|
||||
if !read_dates.has_key?(thread_key) || read_dates[thread_key] < thread.last_activity_at
|
||||
if skipped >= to_skip
|
||||
if threads.length == per_page
|
||||
has_more = true
|
||||
break
|
||||
end
|
||||
threads << thread
|
||||
else
|
||||
skipped += 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# The following trick makes frontend pagers work without recalculating
|
||||
# the number of all unread threads per user on every request (since the number
|
||||
# of threads in a course could be tens or hundreds of thousands). It has the
|
||||
|
@ -219,7 +216,7 @@ helpers do
|
|||
# let the installed paginator library handle pagination
|
||||
num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max
|
||||
page = [1, page].max
|
||||
threads = comment_threads.page(page).per(per_page).to_a
|
||||
threads = comment_threads.paginate(:page => page, :per_page => per_page).to_a
|
||||
end
|
||||
|
||||
if threads.length == 0
|
||||
|
@ -368,7 +365,7 @@ helpers do
|
|||
rescue
|
||||
# body was nil, or the hash function failed somehow - never mind
|
||||
return
|
||||
end
|
||||
end
|
||||
if CommentService.blocked_hashes.include? hash then
|
||||
msg = t(:blocked_content_with_body_hash, :hash => hash)
|
||||
logger.warn msg
|
||||
|
|
|
@ -5,7 +5,8 @@ class Comment < Content
|
|||
include Mongoid::Tree
|
||||
include Mongoid::Timestamps
|
||||
include Mongoid::MagicCounterCache
|
||||
|
||||
include ActiveModel::MassAssignmentSecurity
|
||||
|
||||
voteable self, :up => +1, :down => -1
|
||||
|
||||
field :course_id, type: String
|
||||
|
@ -14,6 +15,7 @@ class Comment < Content
|
|||
field :endorsement, type: Hash
|
||||
field :anonymous, type: Boolean, default: false
|
||||
field :anonymous_to_peers, type: Boolean, default: false
|
||||
field :commentable_id, type: String
|
||||
field :at_position_list, type: Array, default: []
|
||||
|
||||
index({author_id: 1, course_id: 1})
|
||||
|
@ -101,10 +103,10 @@ class Comment < Content
|
|||
.merge("user_id" => author_id)
|
||||
.merge("username" => author_username)
|
||||
.merge("depth" => depth)
|
||||
.merge("closed" => comment_thread.nil? ? false : comment_thread.closed) # ditto
|
||||
.merge("closed" => comment_thread.nil? ? false : comment_thread.closed)
|
||||
.merge("thread_id" => comment_thread_id)
|
||||
.merge("parent_id" => parent_ids[-1])
|
||||
.merge("commentable_id" => comment_thread.nil? ? nil : comment_thread.commentable_id) # ditto
|
||||
.merge("commentable_id" => comment_thread.nil? ? nil : comment_thread.commentable_id)
|
||||
.merge("votes" => votes.slice(*%w[count up_count down_count point]))
|
||||
.merge("abuse_flaggers" => abuse_flaggers)
|
||||
.merge("type" => "comment")
|
||||
|
@ -156,7 +158,7 @@ class Comment < Content
|
|||
private
|
||||
|
||||
def set_thread_last_activity_at
|
||||
self.comment_thread.update_attributes!(last_activity_at: Time.now.utc)
|
||||
self.comment_thread.update_attribute(:last_activity_at, Time.now.utc)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -5,6 +5,8 @@ require_relative 'content'
|
|||
class CommentThread < Content
|
||||
|
||||
include Mongoid::Timestamps
|
||||
include Mongoid::Attributes::Dynamic
|
||||
include ActiveModel::MassAssignmentSecurity
|
||||
extend Enumerize
|
||||
|
||||
voteable self, :up => +1, :down => -1
|
||||
|
@ -154,8 +156,8 @@ private
|
|||
# the last activity time on the thread. Therefore the callbacks would be mutually recursive and we end up with a
|
||||
# 'SystemStackError'. The 'set' method skips callbacks and therefore bypasses this issue.
|
||||
self.comments.each do |comment|
|
||||
comment.set :endorsed, false
|
||||
comment.set :endorsement, nil
|
||||
comment.set(endorsed: false)
|
||||
comment.set(endorsement: nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
class Notification
|
||||
include Mongoid::Document
|
||||
include Mongoid::Timestamps
|
||||
include ActiveModel::MassAssignmentSecurity
|
||||
|
||||
field :notification_type, type: String
|
||||
field :info, type: Hash
|
||||
|
|
|
@ -12,7 +12,7 @@ class Subscription
|
|||
index({source_id: 1, source_type: 1}, {background: true})
|
||||
|
||||
def to_hash
|
||||
as_document.slice(*%w[subscriber_id source_id source_type])
|
||||
as_document.slice(*%w[subscriber_id source_id source_type]).merge("id" => _id)
|
||||
end
|
||||
|
||||
def subscriber
|
||||
|
|
|
@ -153,8 +153,9 @@ class ReadState
|
|||
field :last_read_times, type: Hash, default: {}
|
||||
embedded_in :user
|
||||
|
||||
validates :course_id, uniqueness: true, presence: true
|
||||
|
||||
validates_presence_of :course_id
|
||||
validates_uniqueness_of :course_id
|
||||
|
||||
def to_hash
|
||||
to_json
|
||||
end
|
||||
|
|
|
@ -5,10 +5,10 @@ module ThreadUtils
|
|||
# only threads which are endorsed will have entries, value will always be true.
|
||||
endorsed_threads = {}
|
||||
thread_ids = threads.collect {|t| t._id}
|
||||
Comment.collection.aggregate(
|
||||
Comment.collection.aggregate([
|
||||
{"$match" => {"comment_thread_id" => {"$in" => thread_ids}, "endorsed" => true}},
|
||||
{"$group" => {"_id" => "$comment_thread_id"}}
|
||||
).each do |res|
|
||||
]).each do |res|
|
||||
endorsed_threads[res["_id"].to_s] = true
|
||||
end
|
||||
endorsed_threads
|
||||
|
@ -26,7 +26,7 @@ module ThreadUtils
|
|||
thread_key = t._id.to_s
|
||||
if read_dates.has_key? thread_key
|
||||
is_read = read_dates[thread_key] >= t.updated_at
|
||||
unread_comment_count = Comment.collection.where(
|
||||
unread_comment_count = Comment.collection.find(
|
||||
:comment_thread_id => t._id,
|
||||
:author_id => {"$ne" => user.id},
|
||||
:updated_at => {"$gte" => read_dates[thread_key]}
|
||||
|
|
|
@ -43,9 +43,6 @@ describe "app" do
|
|||
describe "flag a comment as abusive" do
|
||||
it "create or update the abuse_flags on the comment" do
|
||||
comment = Comment.first
|
||||
|
||||
# We get the count rather than just keeping the array, because the array
|
||||
# will update as the Comment updates since the IdentityMap is enabled.
|
||||
prev_abuse_flaggers_count = comment.abuse_flaggers.length
|
||||
create_comment_flag("#{comment.id}", User.first.id)
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ describe "app" do
|
|||
|
||||
describe "comments" do
|
||||
before(:each) { init_without_subscriptions }
|
||||
|
||||
describe "GET /api/v1/comments/:comment_id" do
|
||||
it "returns JSON" do
|
||||
comment = Comment.first
|
||||
|
@ -25,10 +26,10 @@ describe "app" do
|
|||
retrieved["children"].should be_nil
|
||||
retrieved["votes"]["point"].should == comment.votes_point
|
||||
retrieved["depth"].should == comment.depth
|
||||
retrieved["parent_id"].should == comment.parent_ids[-1]
|
||||
retrieved["parent_id"].should == comment.parent_ids.map(&:to_s)[-1]
|
||||
end
|
||||
it "retrieve information of a single comment with its sub comments" do
|
||||
comment = Comment.first
|
||||
comment = Comment.order_by(_id: 'asc').first
|
||||
get "/api/v1/comments/#{comment.id}", recursive: true
|
||||
last_response.should be_ok
|
||||
retrieved = parse last_response.body
|
||||
|
@ -56,6 +57,7 @@ describe "app" do
|
|||
|
||||
include_examples "unicode data"
|
||||
end
|
||||
|
||||
describe "PUT /api/v1/comments/:comment_id" do
|
||||
def test_update_endorsed(true_val, false_val)
|
||||
comment = Comment.first
|
||||
|
@ -114,11 +116,13 @@ describe "app" do
|
|||
comment = Comment.first
|
||||
put "/api/v1/comments/#{comment.id}", body: text
|
||||
last_response.should be_ok
|
||||
comment = Comment.find(comment.id)
|
||||
comment.body.should == text
|
||||
end
|
||||
|
||||
include_examples "unicode data"
|
||||
end
|
||||
|
||||
describe "POST /api/v1/comments/:comment_id" do
|
||||
it "create a sub comment to the comment" do
|
||||
comment = Comment.first.to_hash(recursive: true)
|
||||
|
@ -154,6 +158,7 @@ describe "app" do
|
|||
|
||||
include_examples "unicode data"
|
||||
end
|
||||
|
||||
describe "DELETE /api/v1/comments/:comment_id" do
|
||||
it "delete the comment and its sub comments" do
|
||||
comment = Comment.first
|
||||
|
|
|
@ -455,6 +455,8 @@ describe "app" do
|
|||
last_response.should be_ok
|
||||
response_thread = parse last_response.body
|
||||
response_thread["endorsed"].should == true
|
||||
# re-request the thread from the database before checking it.
|
||||
thread = CommentThread.find(thread.id)
|
||||
check_thread_result_json(nil, thread, response_thread)
|
||||
end
|
||||
|
||||
|
@ -463,16 +465,11 @@ describe "app" do
|
|||
# regression in which we used User.only(:id, :read_states). This worked
|
||||
# before we included the identity map, but afterwards, the user was
|
||||
# missing the username and was not refetched.
|
||||
# BBEGGS - Note 8/4/2015: Identify map has been removed during the mongoid 4.x upgrade.
|
||||
# Should no longer be an issue.
|
||||
it "includes the username even if the thread is being marked as read for the thread author" do
|
||||
thread = CommentThread.first
|
||||
expected_username = thread.author.username
|
||||
|
||||
# We need to clear the IdentityMap after getting the expected data to
|
||||
# ensure that this spec fails when it should. If we don't do this, then
|
||||
# in the cases where the User is fetched without its username, the spec
|
||||
# won't fail because the User will already be in the identity map.
|
||||
Mongoid::IdentityMap.clear
|
||||
|
||||
get "/api/v1/threads/#{thread.id}", {:user_id => thread.author_id, :mark_as_read => true}
|
||||
last_response.should be_ok
|
||||
response_thread = parse last_response.body
|
||||
|
@ -657,6 +654,7 @@ describe "app" do
|
|||
thread = CommentThread.first
|
||||
put "/api/v1/threads/#{thread.id}", body: text, title: text
|
||||
last_response.should be_ok
|
||||
thread = CommentThread.find(thread.id)
|
||||
thread.body.should == text
|
||||
thread.title.should == text
|
||||
end
|
||||
|
|
|
@ -45,7 +45,7 @@ describe "app" do
|
|||
course1_threads.should_not == course2_threads
|
||||
end
|
||||
it "filters by group_id" do
|
||||
group_thread = Commentable.find("question_1").comment_threads.first
|
||||
group_thread = Commentable.find("question_1").comment_threads.sort(_id: 1).first
|
||||
threads = thread_result "question_1", group_id: 42
|
||||
threads.length.should == 2
|
||||
group_thread.group_id = 43
|
||||
|
@ -58,7 +58,7 @@ describe "app" do
|
|||
threads.length.should == 2
|
||||
end
|
||||
it "filters by group_ids" do
|
||||
group_thread = Commentable.find("question_1").comment_threads.first
|
||||
group_thread = Commentable.find("question_1").comment_threads.sort(_id: 1).first
|
||||
group_thread.group_id = 42
|
||||
group_thread.save!
|
||||
threads = thread_result "question_1", group_ids: "42,43"
|
||||
|
|
|
@ -17,7 +17,7 @@ describe "app" do
|
|||
random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join
|
||||
thread = CommentThread.new(
|
||||
title: "Test title", body: "elephant otter", course_id: "1",
|
||||
commentable_id: commentable.id, comments_text_dummy: random_string
|
||||
commentable_id: commentable.id, body: random_string
|
||||
)
|
||||
thread.thread_type = :discussion
|
||||
thread.author = user
|
||||
|
@ -94,7 +94,8 @@ describe "app" do
|
|||
|
||||
subscription = Subscription.create({:subscriber_id => user._id.to_s, :source_id => thread._id.to_s})
|
||||
|
||||
comment = Comment.new(body: "dummy body text", course_id: "1", commentable_id: commentable.id)
|
||||
comment = Comment.new(body: "dummy body text", course_id: "1")
|
||||
comment.commentable_id = commentable.id
|
||||
comment.author = user
|
||||
comment.comment_thread = thread
|
||||
comment.save!
|
||||
|
|
|
@ -42,7 +42,8 @@ describe "app" do
|
|||
|
||||
sleep 3
|
||||
|
||||
comment = Comment.new(body: random_string, course_id: "1", commentable_id: commentable.id)
|
||||
comment = Comment.new(body: random_string, course_id: "1")
|
||||
comment.commentable_id = commentable.id
|
||||
comment.author = author
|
||||
comment.comment_thread = thread
|
||||
comment.save!
|
||||
|
|
|
@ -45,8 +45,11 @@ describe "app" do
|
|||
context "db check" do
|
||||
def test_db_check(response, is_success)
|
||||
db = double("db")
|
||||
stub_const("Mongoid::Sessions", Class.new).stub(:default).and_return(db)
|
||||
db.should_receive(:command).with({:isMaster => 1}).and_return(response)
|
||||
stub_const("Mongoid::Clients", Class.new).stub(:default).and_return(db)
|
||||
result = double('result')
|
||||
result.stub(:ok?).and_return(response['ok'] == 1)
|
||||
result.stub(:documents).and_return([response])
|
||||
db.should_receive(:command).with({:isMaster => 1}).and_return(result)
|
||||
get "/heartbeat"
|
||||
if is_success
|
||||
last_response.status.should == 200
|
||||
|
@ -75,7 +78,7 @@ describe "app" do
|
|||
|
||||
it "reports failure when db command raises an error" do
|
||||
db = double("db")
|
||||
stub_const("Mongoid::Sessions", Class.new).stub(:default).and_return(db)
|
||||
stub_const("Mongoid::Clients", Class.new).stub(:default).and_return(db)
|
||||
db.should_receive(:command).with({:isMaster => 1}).and_raise(StandardError)
|
||||
get "/heartbeat"
|
||||
last_response.status.should == 500
|
||||
|
@ -168,4 +171,4 @@ describe "app" do
|
|||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -15,6 +15,9 @@ set :run, false
|
|||
set :raise_errors, true
|
||||
set :logging, false
|
||||
|
||||
Mongoid.logger.level = Logger::WARN
|
||||
Mongo::Logger.logger.level = ENV["ENABLE_MONGO_DEBUGGING"] ? Logger::DEBUG : Logger::WARN
|
||||
|
||||
Delayed::Worker.delay_jobs = false
|
||||
|
||||
def app
|
||||
|
@ -53,7 +56,6 @@ RSpec.configure do |config|
|
|||
config.filter_run focus: true
|
||||
config.run_all_when_everything_filtered = true
|
||||
config.before(:each) do
|
||||
Mongoid::IdentityMap.clear
|
||||
DatabaseCleaner.clean
|
||||
delete_es_index
|
||||
create_es_index
|
||||
|
@ -73,12 +75,6 @@ def create_test_user(id)
|
|||
end
|
||||
|
||||
def init_without_subscriptions
|
||||
|
||||
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes)
|
||||
Content.mongo_session[:blocked_hash].drop
|
||||
delete_es_index
|
||||
create_es_index
|
||||
|
||||
commentable = Commentable.new("question_1")
|
||||
|
||||
users = (1..10).map{|id| create_test_user(id)}
|
||||
|
@ -158,18 +154,14 @@ def init_without_subscriptions
|
|||
users[2,9].each {|user| user.vote(c, [:up, :down].sample)}
|
||||
end
|
||||
|
||||
Content.mongo_session[:blocked_hash].insert(hash: Digest::MD5.hexdigest("blocked post"))
|
||||
Content.mongo_client[:blocked_hash].insert_one(hash: Digest::MD5.hexdigest("blocked post"))
|
||||
# reload the global holding the blocked hashes
|
||||
CommentService.blocked_hashes = Content.mongo_session[:blocked_hash].find.select(hash: 1).each.map {|d| d["hash"]}
|
||||
|
||||
CommentService.blocked_hashes = Content.mongo_client[:blocked_hash].find(nil, projection: {hash: 1}).map do |d|
|
||||
d["hash"]
|
||||
end
|
||||
end
|
||||
|
||||
def init_with_subscriptions
|
||||
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes)
|
||||
|
||||
delete_es_index
|
||||
create_es_index
|
||||
|
||||
user1 = create_test_user(1)
|
||||
user2 = create_test_user(2)
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче