Add a default response size and a response size limit.
By default, we currently allow a user to retrieve all responses from a thread, which is terrible from a performance standpoint. This change adds a default response size of 100, which was chosen to match typical production pagination parameters, and a response limit of 200. Given that responses can include children comments, 200 is still a large number, but far less disastrous than, say, getting 3000 responses, with all of their children comments. Both of these values are configurable, and can be overriden via environment variables.
This commit is contained in:
Родитель
fe3f57022d
Коммит
7281673648
|
@ -1,3 +1,4 @@
|
|||
|
||||
get "#{APIPREFIX}/threads" do # retrieve threads by course
|
||||
threads = CommentThread.where({"course_id" => params["course_id"]})
|
||||
if params[:commentable_ids]
|
||||
|
@ -46,7 +47,11 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
|
|||
error 400, [t(:param_must_be_a_number_greater_than_zero, :param => 'resp_limit')].to_json
|
||||
end
|
||||
else
|
||||
resp_limit = nil
|
||||
resp_limit = CommentService.config["thread_response_default_size"]
|
||||
end
|
||||
size_limit = CommentService.config["thread_response_size_limit"]
|
||||
unless (resp_limit <= size_limit)
|
||||
error 400, [t(:param_exceeds_limit, :param => resp_limit, :limit => size_limit)].to_json
|
||||
end
|
||||
presenter.to_hash(bool_with_responses, resp_skip, resp_limit, bool_recursive).to_json
|
||||
end
|
||||
|
|
|
@ -4,3 +4,5 @@ elasticsearch_server: <%= ENV['SEARCH_SERVER'] || 'http://localhost:9200' %>
|
|||
max_deep_search_comment_count: 5000
|
||||
default_locale: <%= ENV['SERVICE_LANGUAGE'] || 'en-US' %>
|
||||
manual_pagination_batch_size: <%= ENV['MANUAL_PAGINATION_BATCH_SIZE'] || 500 %>
|
||||
thread_response_default_size: <%= ENV['THREAD_RESPONSE_DEFAULT_SIZE'] || 100 %>
|
||||
thread_response_size_limit: <%= ENV['THREAD_RESPONSE_SIZE_LIMIT'] || 200 %>
|
||||
|
|
|
@ -8,4 +8,5 @@ en-US:
|
|||
blocked_content_with_body_hash: "blocked content with body hash %{hash}"
|
||||
param_must_be_a_non_negative_number: "%{param} must be a non-negative number"
|
||||
param_must_be_a_number_greater_than_zero: "%{param} must be a number greater than zero"
|
||||
param_exceeds_limit: "%{param} exceeds limit: %{limit}"
|
||||
cannot_specify_group_id_and_group_ids: "Cannot specify both group_id and group_ids as filters."
|
||||
|
|
|
@ -8,3 +8,5 @@ x-test:
|
|||
blocked_content_with_body_hash: "##x-test## blocked content with body hash %{hash}"
|
||||
param_must_be_a_non_negative_number: "##x-test## %{param} must be a non-negative number"
|
||||
param_must_be_a_number_greater_than_zero: "##x-test## %{param} must be a number greater than zero"
|
||||
param_exceeds_limit: "##x-test## %{param} exceeds limit: %{limit}"
|
||||
cannot_specify_group_id_and_group_ids: "##x-test## Cannot specify both group_id and group_ids as filters."
|
||||
|
|
|
@ -542,6 +542,17 @@ describe "app" do
|
|||
|
||||
include_examples 'unicode data'
|
||||
|
||||
context 'error conditions' do
|
||||
subject do
|
||||
resp_limit = CommentService.config["thread_response_size_limit"]
|
||||
get "/api/v1/threads/#{thread.id}", resp_limit: resp_limit+1
|
||||
end
|
||||
|
||||
it "returns an error when the limit is exceeded" do
|
||||
expect(subject.status).to eq 400
|
||||
end
|
||||
end
|
||||
|
||||
context "response pagination" do
|
||||
before(:each) do
|
||||
User.all.delete
|
||||
|
@ -549,7 +560,7 @@ describe "app" do
|
|||
@user = create_test_user(999)
|
||||
@threads = {}
|
||||
@comments = {}
|
||||
[20, 10, 3, 2, 1, 0].each do |n|
|
||||
[201, 10, 3, 2, 1, 0].each do |n|
|
||||
thread_key = "t#{n}"
|
||||
thread = make_thread(@user, thread_key, DFLT_COURSE_ID, "pdq")
|
||||
@threads[n] = thread
|
||||
|
@ -557,7 +568,7 @@ describe "app" do
|
|||
# generate n responses in this thread
|
||||
comment_key = "#{thread_key} r#{i}"
|
||||
comment = make_comment(@user, thread, comment_key)
|
||||
i.times do |j|
|
||||
2.times do |j|
|
||||
subcomment_key = "#{comment_key} c#{j}"
|
||||
subcomment = make_comment(@user, comment, subcomment_key)
|
||||
end
|
||||
|
@ -572,7 +583,7 @@ describe "app" do
|
|||
parse(last_response.body)
|
||||
end
|
||||
|
||||
it "returns all responses when no skip/limit params given" do
|
||||
it "limits responses when no skip/limit params given" do
|
||||
@threads.each do |n, thread|
|
||||
res = thread_result thread.id, {}
|
||||
check_thread_response_paging_json thread, res, 0, nil, false
|
||||
|
|
|
@ -3,6 +3,8 @@ require 'spec_helper'
|
|||
describe ThreadPresenter do
|
||||
|
||||
context "#to_hash" do
|
||||
let(:default_resp_limit) { CommentService.config["thread_response_default_size"] }
|
||||
|
||||
shared_examples "to_hash arguments" do |thread_type, endorse_responses|
|
||||
before(:each) do
|
||||
User.all.delete
|
||||
|
@ -94,18 +96,18 @@ describe ThreadPresenter do
|
|||
it "handles with_responses=true and recursive=true" do
|
||||
@threads_with_num_comments.each do |thread, num_comments|
|
||||
is_endorsed = num_comments > 0 && endorse_responses
|
||||
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, nil, true)
|
||||
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, default_resp_limit, true)
|
||||
check_thread_result(@reader, thread, hash)
|
||||
check_thread_response_paging(thread, hash, 0, nil, false, true)
|
||||
check_thread_response_paging(thread, hash, 0, default_resp_limit, false, true)
|
||||
end
|
||||
end
|
||||
|
||||
it "handles with_responses=true and recursive=false" do
|
||||
@threads_with_num_comments.each do |thread, num_comments|
|
||||
is_endorsed = num_comments > 0 && endorse_responses
|
||||
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, nil, false)
|
||||
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, default_resp_limit, false)
|
||||
check_thread_result(@reader, thread, hash)
|
||||
check_thread_response_paging(thread, hash)
|
||||
check_thread_response_paging(thread, hash, 0, default_resp_limit)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -113,9 +115,9 @@ describe ThreadPresenter do
|
|||
@threads_with_num_comments.each do |thread, num_comments|
|
||||
is_endorsed = num_comments > 0 && endorse_responses
|
||||
[0, 1, 2, 9, 10, 11, 1000].each do |skip|
|
||||
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, skip, nil, true)
|
||||
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, skip, default_resp_limit, true)
|
||||
check_thread_result(@reader, thread, hash)
|
||||
check_thread_response_paging(thread, hash, skip)
|
||||
check_thread_response_paging(thread, hash, skip, default_resp_limit)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -265,6 +265,10 @@ end
|
|||
|
||||
|
||||
def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
|
||||
if resp_limit.nil?
|
||||
resp_limit = CommentService.config["thread_response_default_size"]
|
||||
end
|
||||
|
||||
all_responses = thread.root_comments.sort({"sk" => 1}).to_a
|
||||
total_responses = all_responses.length
|
||||
hash["resp_total"].should == total_responses
|
||||
|
@ -277,11 +281,7 @@ def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil,
|
|||
check_comment(expected_responses[i], response_hash, is_json, recursive)
|
||||
end
|
||||
hash["resp_skip"].to_i.should == resp_skip
|
||||
if resp_limit.nil?
|
||||
hash["resp_limit"].should be_nil
|
||||
else
|
||||
hash["resp_limit"].to_i.should == resp_limit
|
||||
end
|
||||
hash["resp_limit"].to_i.should == resp_limit
|
||||
end
|
||||
|
||||
def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
|
||||
|
|
Загрузка…
Ссылка в новой задаче