2014-01-23 07:25:21 +04:00
|
|
|
require_relative 'thread_utils'
|
2013-09-25 08:24:13 +04:00
|
|
|
require 'new_relic/agent/method_tracer'
|
|
|
|
|
|
|
|
class ThreadPresenter
|
|
|
|
|
2014-01-23 07:25:21 +04:00
|
|
|
def self.factory(thread, user)
|
|
|
|
# use when working with one thread at a time. fetches extended /
|
|
|
|
# derived attributes from the db and explicitly initializes an instance.
|
|
|
|
course_id = thread.course_id
|
|
|
|
thread_key = thread._id.to_s
|
|
|
|
is_read, unread_count = ThreadUtils.get_read_states([thread], user, course_id).fetch(thread_key, [false, thread.comment_count])
|
|
|
|
is_endorsed = ThreadUtils.get_endorsed([thread]).fetch(thread_key, false)
|
|
|
|
self.new thread, user, is_read, unread_count, is_endorsed
|
2013-09-25 08:24:13 +04:00
|
|
|
end
|
|
|
|
|
2014-01-23 07:25:21 +04:00
|
|
|
def initialize(thread, user, is_read, unread_count, is_endorsed)
|
|
|
|
# generally not intended for direct use. instantiated by self.factory or
|
|
|
|
# by thread list presenters.
|
|
|
|
@thread = thread
|
|
|
|
@user = user
|
|
|
|
@is_read = is_read
|
|
|
|
@unread_count = unread_count
|
|
|
|
@is_endorsed = is_endorsed
|
2013-09-25 08:24:13 +04:00
|
|
|
end
|
|
|
|
|
2016-08-25 17:10:08 +03:00
|
|
|
def to_hash(with_responses=false, resp_skip=0, resp_limit=nil, recursive=true)
|
2014-01-23 07:25:21 +04:00
|
|
|
raise ArgumentError unless resp_skip >= 0
|
|
|
|
raise ArgumentError unless resp_limit.nil? or resp_limit >= 1
|
|
|
|
h = @thread.to_hash
|
|
|
|
h["read"] = @is_read
|
|
|
|
h["unread_comments_count"] = @unread_count
|
|
|
|
h["endorsed"] = @is_endorsed || false
|
|
|
|
if with_responses
|
2014-07-03 05:20:07 +04:00
|
|
|
if @thread.thread_type.discussion? && resp_skip == 0 && resp_limit.nil?
|
2015-10-21 16:25:53 +03:00
|
|
|
if recursive
|
|
|
|
content = Comment.where(comment_thread_id: @thread._id).order_by({"sk" => 1})
|
|
|
|
else
|
|
|
|
content = Comment.where(comment_thread_id: @thread._id, "parent_ids" => []).order_by({"sk" => 1})
|
|
|
|
end
|
2014-07-03 05:20:07 +04:00
|
|
|
h["children"] = merge_response_content(content)
|
|
|
|
h["resp_total"] = content.to_a.select{|d| d.depth == 0 }.length
|
2014-01-23 07:25:21 +04:00
|
|
|
else
|
2014-07-03 05:20:07 +04:00
|
|
|
responses = Content.where(comment_thread_id: @thread._id).exists(parent_id: false)
|
|
|
|
case @thread.thread_type
|
|
|
|
when "question"
|
|
|
|
endorsed_responses = responses.where(endorsed: true)
|
|
|
|
non_endorsed_responses = responses.where(endorsed: false)
|
2015-10-21 16:25:53 +03:00
|
|
|
endorsed_response_info = get_paged_merged_responses(@thread._id, endorsed_responses, 0, nil, recursive)
|
2014-07-03 05:20:07 +04:00
|
|
|
non_endorsed_response_info = get_paged_merged_responses(
|
|
|
|
@thread._id,
|
|
|
|
non_endorsed_responses,
|
|
|
|
resp_skip,
|
2015-10-21 16:25:53 +03:00
|
|
|
resp_limit,
|
|
|
|
recursive
|
2014-07-03 05:20:07 +04:00
|
|
|
)
|
|
|
|
h["endorsed_responses"] = endorsed_response_info["responses"]
|
|
|
|
h["non_endorsed_responses"] = non_endorsed_response_info["responses"]
|
|
|
|
h["non_endorsed_resp_total"] = non_endorsed_response_info["response_count"]
|
2015-10-21 13:34:04 +03:00
|
|
|
h["resp_total"] = non_endorsed_response_info["response_count"] + endorsed_response_info["response_count"]
|
2014-07-03 05:20:07 +04:00
|
|
|
when "discussion"
|
2015-10-21 16:25:53 +03:00
|
|
|
response_info = get_paged_merged_responses(@thread._id, responses, resp_skip, resp_limit, recursive)
|
2014-07-03 05:20:07 +04:00
|
|
|
h["children"] = response_info["responses"]
|
|
|
|
h["resp_total"] = response_info["response_count"]
|
|
|
|
end
|
2014-01-23 07:25:21 +04:00
|
|
|
end
|
|
|
|
h["resp_skip"] = resp_skip
|
|
|
|
h["resp_limit"] = resp_limit
|
2013-09-25 08:24:13 +04:00
|
|
|
end
|
|
|
|
h
|
|
|
|
end
|
|
|
|
|
2014-07-03 05:20:07 +04:00
|
|
|
# Given a Mongoid object representing responses, apply pagination and return
|
|
|
|
# a hash containing the following:
|
|
|
|
# responses
|
|
|
|
# An array of hashes representing the page of responses (including
|
2015-10-21 16:25:53 +03:00
|
|
|
# children, if recursive is true)
|
2014-07-03 05:20:07 +04:00
|
|
|
# response_count
|
|
|
|
# The total number of responses
|
2015-10-21 16:25:53 +03:00
|
|
|
def get_paged_merged_responses(thread_id, responses, skip, limit, recursive=false)
|
2014-07-03 05:20:07 +04:00
|
|
|
response_ids = responses.only(:_id).sort({"sk" => 1}).to_a.map{|doc| doc["_id"]}
|
|
|
|
paged_response_ids = limit.nil? ? response_ids.drop(skip) : response_ids.drop(skip).take(limit)
|
2015-10-21 16:25:53 +03:00
|
|
|
if recursive
|
|
|
|
content = Comment.where(comment_thread_id: thread_id).
|
|
|
|
or({:parent_id => {"$in" => paged_response_ids}}, {:id => {"$in" => paged_response_ids}}).
|
|
|
|
sort({"sk" => 1})
|
|
|
|
else
|
|
|
|
content = Comment.where(comment_thread_id: thread_id, "parent_ids" => []).
|
|
|
|
where({:id => {"$in" => paged_response_ids}}).sort({"sk" => 1})
|
|
|
|
end
|
2014-07-03 05:20:07 +04:00
|
|
|
{"responses" => merge_response_content(content), "response_count" => response_ids.length}
|
|
|
|
end
|
|
|
|
|
|
|
|
# Takes content output from Mongoid in a depth-first traversal order and
|
|
|
|
# returns an array of first-level response hashes with content represented
|
|
|
|
# hierarchically, with a comment's list of children in the key "children".
|
|
|
|
def merge_response_content(content)
|
|
|
|
top_level = []
|
|
|
|
ancestry = []
|
|
|
|
content.each do |item|
|
Reduced merges, do them in place, and reduce allocations.
Primarily, we wanted to reduce the one-thing-per-merge calls, but also,
we want to reduce allocations overall, both from merge calls which will
duplicate the source hash and from string constants sprinkled
throughout.
Seemingly, defining a constant-style word list is not the ticket. For
whatever reason, Ruby can't seem to figure out the list doesn't change,
and presumably is splating out all of the words and allocating them as
if they were declared right there. Wahhhh.
Now we're using a straight up one-for-one constant definition of each
string, in a big constants file, where we freeze the strings on the
spot. We then reference each constant directly at the callsite, which
was the original style and, ultimately, is the best for readability.
2016-07-28 05:44:03 +03:00
|
|
|
item_hash = item.to_hash.merge!("children" => [])
|
2014-07-03 05:20:07 +04:00
|
|
|
if item.parent_id.nil?
|
|
|
|
top_level << item_hash
|
|
|
|
ancestry = [item_hash]
|
|
|
|
else
|
|
|
|
while ancestry.length > 0 do
|
|
|
|
if item.parent_id == ancestry.last["id"]
|
|
|
|
ancestry.last["children"] << item_hash
|
|
|
|
ancestry << item_hash
|
|
|
|
break
|
|
|
|
else
|
|
|
|
ancestry.pop
|
|
|
|
next
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if ancestry.empty? # invalid parent; ignore item
|
2013-09-25 08:24:13 +04:00
|
|
|
next
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2014-07-03 05:20:07 +04:00
|
|
|
top_level
|
2013-09-25 08:24:13 +04:00
|
|
|
end
|
|
|
|
|
|
|
|
include ::NewRelic::Agent::MethodTracer
|
|
|
|
add_method_tracer :to_hash
|
2014-07-03 05:20:07 +04:00
|
|
|
add_method_tracer :merge_response_content
|
2013-09-25 08:24:13 +04:00
|
|
|
|
|
|
|
end
|