diff options
author | Francisco Javier López <fjlopez@gitlab.com> | 2019-04-25 04:19:07 +0000 |
---|---|---|
committer | Stan Hu <stanhu@gmail.com> | 2019-04-25 04:19:07 +0000 |
commit | dde69bfb2a595956c54ddb9c776759c11b3f2a3b (patch) | |
tree | 70629de9baf0f34a2bf2bc71e9045bc0366ec24c /spec/models | |
parent | 0f863c68bb8bc5054a22e0c553a933c83bea4df6 (diff) | |
download | gitlab-ce-dde69bfb2a595956c54ddb9c776759c11b3f2a3b.tar.gz |
Added list_pages method to avoid loading all wiki pages content
Inside a wiki, when we show the sidebar or browse to the `pages`,
all page contents are retrieved from Gitaly and that is a waste
of resources, since no content from that pages are going to be
showed.
This MR introduces the method `ProjectWiki#list_pages`,
which uses new wiki_list_pages RPC call to retrieve
pages without content
Also in the `WikisController` we're using the method to show
pages in the sidebar and also on the `pages` page.
Diffstat (limited to 'spec/models')
-rw-r--r-- | spec/models/project_wiki_spec.rb | 74 | ||||
-rw-r--r-- | spec/models/wiki_page_spec.rb | 66 |
2 files changed, 92 insertions, 48 deletions
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb index 2525a6aebe0..d12dd97bb9e 100644 --- a/spec/models/project_wiki_spec.rb +++ b/spec/models/project_wiki_spec.rb @@ -109,8 +109,7 @@ describe ProjectWiki do subject { super().empty? } it { is_expected.to be_falsey } - # Re-enable this when https://gitlab.com/gitlab-org/gitaly/issues/1204 is fixed - xit 'only instantiates a Wiki page once' do + it 'only instantiates a Wiki page once' do expect(WikiPage).to receive(:new).once.and_call_original subject @@ -119,22 +118,65 @@ describe ProjectWiki do end end - describe "#pages" do + describe "#list_pages" do + let(:wiki_pages) { subject.list_pages } + before do - create_page("index", "This is an awesome new Gollum Wiki") - @pages = subject.pages + create_page("index", "This is an index") + create_page("index2", "This is an index2") + create_page("an index3", "This is an index3") end after do - destroy_page(@pages.first.page) + wiki_pages.each do |wiki_page| + destroy_page(wiki_page.page) + end end it "returns an array of WikiPage instances" do - expect(@pages.first).to be_a WikiPage + expect(wiki_pages.first).to be_a WikiPage + end + + it 'does not load WikiPage content by default' do + wiki_pages.each do |page| + expect(page.content).to be_empty + end + end + + it 'returns all pages by default' do + expect(wiki_pages.count).to eq(3) + end + + context "with limit option" do + it 'returns limited set of pages' do + expect(subject.list_pages(limit: 1).count).to eq(1) + end end - it "returns the correct number of pages" do - expect(@pages.count).to eq(1) + context "with sorting options" do + it 'returns pages sorted by title by default' do + pages = ['an index3', 'index', 'index2'] + + expect(subject.list_pages.map(&:title)).to eq(pages) + expect(subject.list_pages(direction: "desc").map(&:title)).to eq(pages.reverse) + end + + it 'returns pages sorted by created_at' do + pages = ['index', 'index2', 'an index3'] + + expect(subject.list_pages(sort: 'created_at').map(&:title)).to eq(pages) + expect(subject.list_pages(sort: 'created_at', direction: "desc").map(&:title)).to eq(pages.reverse) + end + end + + context "with load_content option" do + let(:pages) { subject.list_pages(load_content: true) } + + it 'loads WikiPage content' do + expect(pages.first.content).to eq("This is an index3") + expect(pages.second.content).to eq("This is an index") + expect(pages.third.content).to eq("This is an index2") + end end end @@ -144,7 +186,7 @@ describe ProjectWiki do end after do - subject.pages.each { |page| destroy_page(page.page) } + subject.list_pages.each { |page| destroy_page(page.page) } end it "returns the latest version of the page if it exists" do @@ -195,7 +237,7 @@ describe ProjectWiki do end after do - subject.pages.each { |page| destroy_page(page.page) } + subject.list_pages.each { |page| destroy_page(page.page) } end it 'finds the page defined as _sidebar' do @@ -242,12 +284,12 @@ describe ProjectWiki do describe "#create_page" do after do - destroy_page(subject.pages.first.page) + destroy_page(subject.list_pages.first.page) end it "creates a new wiki page" do expect(subject.create_page("test page", "this is content")).not_to eq(false) - expect(subject.pages.count).to eq(1) + expect(subject.list_pages.count).to eq(1) end it "returns false when a duplicate page exists" do @@ -262,7 +304,7 @@ describe ProjectWiki do it "sets the correct commit message" do subject.create_page("test page", "some content", :markdown, "commit message") - expect(subject.pages.first.page.version.message).to eq("commit message") + expect(subject.list_pages.first.page.version.message).to eq("commit message") end it 'sets the correct commit email' do @@ -293,7 +335,7 @@ describe ProjectWiki do format: :markdown, message: "updated page" ) - @page = subject.pages.first.page + @page = subject.list_pages(load_content: true).first.page end after do @@ -337,7 +379,7 @@ describe ProjectWiki do it "deletes the page" do subject.delete_page(@page) - expect(subject.pages.count).to eq(0) + expect(subject.list_pages.count).to eq(0) end it 'sets the correct commit email' do diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb index d5c85c11195..520a06e138e 100644 --- a/spec/models/wiki_page_spec.rb +++ b/spec/models/wiki_page_spec.rb @@ -44,47 +44,49 @@ describe WikiPage do WikiDirectory.new('dir_2', pages) end - context 'sort by title' do - let(:grouped_entries) { described_class.group_by_directory(wiki.pages) } - let(:expected_grouped_entries) { [dir_1_1, dir_1, page_dir_2, dir_2, page_1, page_6] } - - it 'returns an array with pages and directories' do - grouped_entries.each_with_index do |page_or_dir, i| - expected_page_or_dir = expected_grouped_entries[i] - expected_slugs = get_slugs(expected_page_or_dir) - slugs = get_slugs(page_or_dir) - - expect(slugs).to match_array(expected_slugs) + context "#list_pages" do + context 'sort by title' do + let(:grouped_entries) { described_class.group_by_directory(wiki.list_pages) } + let(:expected_grouped_entries) { [dir_1_1, dir_1, page_dir_2, dir_2, page_1, page_6] } + + it 'returns an array with pages and directories' do + grouped_entries.each_with_index do |page_or_dir, i| + expected_page_or_dir = expected_grouped_entries[i] + expected_slugs = get_slugs(expected_page_or_dir) + slugs = get_slugs(page_or_dir) + + expect(slugs).to match_array(expected_slugs) + end end end - end - context 'sort by created_at' do - let(:grouped_entries) { described_class.group_by_directory(wiki.pages(sort: 'created_at')) } - let(:expected_grouped_entries) { [dir_1_1, page_1, dir_1, page_dir_2, dir_2, page_6] } + context 'sort by created_at' do + let(:grouped_entries) { described_class.group_by_directory(wiki.list_pages(sort: 'created_at')) } + let(:expected_grouped_entries) { [dir_1_1, page_1, dir_1, page_dir_2, dir_2, page_6] } - it 'returns an array with pages and directories' do - grouped_entries.each_with_index do |page_or_dir, i| - expected_page_or_dir = expected_grouped_entries[i] - expected_slugs = get_slugs(expected_page_or_dir) - slugs = get_slugs(page_or_dir) + it 'returns an array with pages and directories' do + grouped_entries.each_with_index do |page_or_dir, i| + expected_page_or_dir = expected_grouped_entries[i] + expected_slugs = get_slugs(expected_page_or_dir) + slugs = get_slugs(page_or_dir) - expect(slugs).to match_array(expected_slugs) + expect(slugs).to match_array(expected_slugs) + end end end - end - it 'returns an array with retained order with directories at the top' do - expected_order = ['dir_1/dir_1_1/page_3', 'dir_1/page_2', 'dir_2', 'dir_2/page_4', 'dir_2/page_5', 'page_1', 'page_6'] + it 'returns an array with retained order with directories at the top' do + expected_order = ['dir_1/dir_1_1/page_3', 'dir_1/page_2', 'dir_2', 'dir_2/page_4', 'dir_2/page_5', 'page_1', 'page_6'] - grouped_entries = described_class.group_by_directory(wiki.pages) + grouped_entries = described_class.group_by_directory(wiki.list_pages) - actual_order = - grouped_entries.map do |page_or_dir| - get_slugs(page_or_dir) - end - .flatten - expect(actual_order).to eq(expected_order) + actual_order = + grouped_entries.map do |page_or_dir| + get_slugs(page_or_dir) + end + .flatten + expect(actual_order).to eq(expected_order) + end end end end @@ -386,7 +388,7 @@ describe WikiPage do it "deletes the page" do @page.delete - expect(wiki.pages).to be_empty + expect(wiki.list_pages).to be_empty end it "returns true" do |