summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorFilipa Lacerda <filipa@gitlab.com>2017-12-07 13:44:09 +0000
committerFilipa Lacerda <filipa@gitlab.com>2017-12-07 13:44:09 +0000
commita2479aaa1be00416f952eb79fa444328266c16b6 (patch)
tree523250a25fc5b60ccd767bfc0a64f295a1c86b2e /spec
parent139ce1c44587f6920cdb1982c33714ff6cf910b7 (diff)
parent17542a7895f288b8e7bc92836039f4dcbb7c17d2 (diff)
downloadgitlab-ce-a2479aaa1be00416f952eb79fa444328266c16b6.tar.gz
Merge branch 'master' into 38869-datetime
* master: (82 commits) Docs: add EEU tier to the landing page CE backport of ProtectedBranches API changes Support uploads for groups Update pipeline create chain Prometheus metric Don't set timeago title to what was already there. Resolve "Display member role per project" The API isn't using the appropriate services for managing forks Add chevron to create dropdown on repository page Rename GKE as Kubernetes Engine Fix specs for MySQL Fix broken tests Refactor banzai to support referencing from group context Fix specs after rebase Prevent dups when using StringIO for binary reads Bump redis-rails to 5.0.2 to get redis-store security updates add note on deploying Pages to a private network Bump GITLAB_SHELL_VERSION Updates the dropdown to match the docs and remove old hack of stop event propagation Move invalid builds counter out of the transaction Add invalid builds counter metric to stage seeds class ...
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb10
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb247
-rw-r--r--spec/factories/uploads.rb16
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb106
-rw-r--r--spec/features/markdown_spec.rb3
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb6
-rw-r--r--spec/javascripts/search_autocomplete_spec.js2
-rw-r--r--spec/lib/banzai/cross_project_reference_spec.rb8
-rw-r--r--spec/lib/banzai/filter/abstract_reference_filter_spec.rb38
-rw-r--r--spec/lib/banzai/filter/issue_reference_filter_spec.rb47
-rw-r--r--spec/lib/banzai/filter/upload_link_filter_spec.rb30
-rw-r--r--spec/lib/banzai/reference_parser/issue_parser_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb510
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb242
-rw-r--r--spec/lib/gitlab/database_spec.rb16
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb30
-rw-r--r--spec/lib/gitlab/utils_spec.rb10
-rw-r--r--spec/migrations/track_untracked_uploads_spec.rb27
-rw-r--r--spec/models/user_spec.rb159
-rw-r--r--spec/policies/group_policy_spec.rb27
-rw-r--r--spec/requests/api/protected_branches_spec.rb36
-rw-r--r--spec/services/projects/fork_service_spec.rb353
-rw-r--r--spec/support/google_api/cloud_platform_helpers.rb2
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb240
-rw-r--r--spec/support/track_untracked_uploads_helpers.rb20
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb21
26 files changed, 1745 insertions, 465 deletions
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
new file mode 100644
index 00000000000..67a11e56e94
--- /dev/null
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -0,0 +1,10 @@
+require 'spec_helper'
+
+describe Groups::UploadsController do
+ let(:model) { create(:group, :public) }
+ let(:params) do
+ { group_id: model }
+ end
+
+ it_behaves_like 'handle uploads'
+end
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index c2550b1efa7..d572085661d 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -1,247 +1,10 @@
-require('spec_helper')
+require 'spec_helper'
describe Projects::UploadsController do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
- let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
-
- describe "POST #create" do
- before do
- sign_in(user)
- project.team << [user, :developer]
- end
-
- context "without params['file']" do
- it "returns an error" do
- post :create,
- namespace_id: project.namespace.to_param,
- project_id: project,
- format: :json
- expect(response).to have_gitlab_http_status(422)
- end
- end
-
- context 'with valid image' do
- before do
- post :create,
- namespace_id: project.namespace.to_param,
- project_id: project,
- file: jpg,
- format: :json
- end
-
- it 'returns a content with original filename, new link, and correct type.' do
- expect(response.body).to match '\"alt\":\"rails_sample\"'
- expect(response.body).to match "\"url\":\"/uploads"
- end
-
- # NOTE: This is as close as we're getting to an Integration test for this
- # behavior. We're avoiding a proper Feature test because those should be
- # testing things entirely user-facing, which the Upload model is very much
- # not.
- it 'creates a corresponding Upload record' do
- upload = Upload.last
-
- aggregate_failures do
- expect(upload).to exist
- expect(upload.model).to eq project
- end
- end
- end
-
- context 'with valid non-image file' do
- before do
- post :create,
- namespace_id: project.namespace.to_param,
- project_id: project,
- file: txt,
- format: :json
- end
-
- it 'returns a content with original filename, new link, and correct type.' do
- expect(response.body).to match '\"alt\":\"doc_sample.txt\"'
- expect(response.body).to match "\"url\":\"/uploads"
- end
- end
+ let(:model) { create(:project, :public) }
+ let(:params) do
+ { namespace_id: model.namespace.to_param, project_id: model }
end
- describe "GET #show" do
- let(:go) do
- get :show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- secret: "123456",
- filename: "image.jpg"
- end
-
- context "when the project is public" do
- before do
- project.update_attribute(:visibility_level, Project::PUBLIC)
- end
-
- context "when not signed in" do
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
-
- context "when signed in" do
- before do
- sign_in(user)
- end
-
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
- end
-
- context "when the project is private" do
- before do
- project.update_attribute(:visibility_level, Project::PRIVATE)
- end
-
- context "when not signed in" do
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- context "when the file is an image" do
- before do
- allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file is not an image" do
- it "redirects to the sign in page" do
- go
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
- end
-
- context "when the file doesn't exist" do
- it "redirects to the sign in page" do
- go
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
- end
-
- context "when signed in" do
- before do
- sign_in(user)
- end
-
- context "when the user has access to the project" do
- before do
- project.team << [user, :master]
- end
-
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
-
- context "when the user doesn't have access to the project" do
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- context "when the file is an image" do
- before do
- allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file is not an image" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
- end
- end
- end
+ it_behaves_like 'handle uploads'
end
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index 3222c41c3d8..e18f1a6bd4a 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -4,5 +4,21 @@ FactoryGirl.define do
path { "uploads/-/system/project/avatar/avatar.jpg" }
size 100.kilobytes
uploader "AvatarUploader"
+
+ trait :personal_snippet do
+ model { build(:personal_snippet) }
+ uploader "PersonalFileUploader"
+ end
+
+ trait :issuable_upload do
+ path { "#{SecureRandom.hex}/myfile.jpg" }
+ uploader "FileUploader"
+ end
+
+ trait :namespace_upload do
+ path { "#{SecureRandom.hex}/myfile.jpg" }
+ model { build(:group) }
+ uploader "NamespaceFileUploader"
+ end
end
end
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 95d637265e0..c31b636d67f 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -220,6 +220,89 @@ feature 'GFM autocomplete', :js do
end
end
+ # This context has jsut one example in each contexts in order to improve spec performance.
+ context 'labels' do
+ let!(:backend) { create(:label, project: project, title: 'backend') }
+ let!(:bug) { create(:label, project: project, title: 'bug') }
+ let!(:feature_proposal) { create(:label, project: project, title: 'feature proposal') }
+
+ context 'when no labels are assigned' do
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show no labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(not_shown: [backend, bug, feature_proposal])
+ end
+ end
+
+ context 'when some labels are assigned' do
+ before do
+ issue.labels << [backend]
+ end
+
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show only unset labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(shown: [bug, feature_proposal], not_shown: [backend])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show only set labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(shown: [backend], not_shown: [bug, feature_proposal])
+ end
+ end
+
+ context 'when all labels are assigned' do
+ before do
+ issue.labels << [backend, bug, feature_proposal]
+ end
+
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show no labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(not_shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+ end
+ end
+ end
+
+ private
+
def expect_to_wrap(should_wrap, item, note, value)
expect(item).to have_content(value)
expect(item).not_to have_content("\"#{value}\"")
@@ -232,4 +315,27 @@ feature 'GFM autocomplete', :js do
expect(note.value).not_to include("\"#{value}\"")
end
end
+
+ def expect_labels(shown: nil, not_shown: nil)
+ page.within('.atwho-container') do
+ if shown
+ expect(page).to have_selector('.atwho-view li', count: shown.size)
+ shown.each { |label| expect(page).to have_content(label.title) }
+ end
+
+ if not_shown
+ expect(page).not_to have_selector('.atwho-view li') unless shown
+ not_shown.each { |label| expect(page).not_to have_content(label.title) }
+ end
+ end
+ end
+
+ # `note` is a textarea where the given text should be typed.
+ # We don't want to find it each time this function gets called.
+ def type(note, text)
+ page.within('.timeline-content-form') do
+ note.set('')
+ note.native.send_keys(text)
+ end
+ end
end
diff --git a/spec/features/markdown_spec.rb b/spec/features/markdown_spec.rb
index cc1b187ff54..e285befc66f 100644
--- a/spec/features/markdown_spec.rb
+++ b/spec/features/markdown_spec.rb
@@ -207,8 +207,9 @@ describe 'GitLab Markdown' do
before do
@feat = MarkdownFeature.new
- # `markdown` helper expects a `@project` variable
+ # `markdown` helper expects a `@project` and `@group` variable
@project = @feat.project
+ @group = @feat.group
end
context 'default pipeline' do
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 5a00b463960..67b8901f8fb 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -46,15 +46,15 @@ feature 'Gcp Cluster', :js do
end
it 'user sees a cluster details page and creation status' do
- expect(page).to have_content('Cluster is being created on Google Container Engine...')
+ expect(page).to have_content('Cluster is being created on Google Kubernetes Engine...')
Clusters::Cluster.last.provider.make_created!
- expect(page).to have_content('Cluster was successfully created on Google Container Engine')
+ expect(page).to have_content('Cluster was successfully created on Google Kubernetes Engine')
end
it 'user sees a error if something worng during creation' do
- expect(page).to have_content('Cluster is being created on Google Container Engine...')
+ expect(page).to have_content('Cluster is being created on Google Kubernetes Engine...')
Clusters::Cluster.last.provider.make_errored!('Something wrong!')
diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js
index a2394857b82..fdfc59a6f12 100644
--- a/spec/javascripts/search_autocomplete_spec.js
+++ b/spec/javascripts/search_autocomplete_spec.js
@@ -191,8 +191,6 @@ import '~/lib/utils/common_utils';
// browsers will not trigger default behavior (form submit, in this
// example) on JavaScript-created keypresses.
expect(submitSpy).not.toHaveBeenTriggered();
- // Does a worse job at capturing the intent of the test, but works.
- expect(enterKeyEvent.isDefaultPrevented()).toBe(true);
});
});
}).call(window);
diff --git a/spec/lib/banzai/cross_project_reference_spec.rb b/spec/lib/banzai/cross_project_reference_spec.rb
index d70749536b8..68ca960caab 100644
--- a/spec/lib/banzai/cross_project_reference_spec.rb
+++ b/spec/lib/banzai/cross_project_reference_spec.rb
@@ -3,20 +3,20 @@ require 'spec_helper'
describe Banzai::CrossProjectReference do
include described_class
- describe '#project_from_ref' do
+ describe '#parent_from_ref' do
context 'when no project was referenced' do
it 'returns the project from context' do
project = double
allow(self).to receive(:context).and_return({ project: project })
- expect(project_from_ref(nil)).to eq project
+ expect(parent_from_ref(nil)).to eq project
end
end
context 'when referenced project does not exist' do
it 'returns nil' do
- expect(project_from_ref('invalid/reference')).to be_nil
+ expect(parent_from_ref('invalid/reference')).to be_nil
end
end
@@ -27,7 +27,7 @@ describe Banzai::CrossProjectReference do
expect(Project).to receive(:find_by_full_path)
.with('cross/reference').and_return(project2)
- expect(project_from_ref('cross/reference')).to eq project2
+ expect(parent_from_ref('cross/reference')).to eq project2
end
end
end
diff --git a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb b/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
index 7c0ba9ee67f..1e82d18d056 100644
--- a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
@@ -3,67 +3,67 @@ require 'spec_helper'
describe Banzai::Filter::AbstractReferenceFilter do
let(:project) { create(:project) }
- describe '#references_per_project' do
- it 'returns a Hash containing references grouped per project paths' do
+ describe '#references_per_parent' do
+ it 'returns a Hash containing references grouped per parent paths' do
doc = Nokogiri::HTML.fragment("#1 #{project.full_path}#2")
filter = described_class.new(doc, project: project)
expect(filter).to receive(:object_class).exactly(4).times.and_return(Issue)
expect(filter).to receive(:object_sym).twice.and_return(:issue)
- refs = filter.references_per_project
+ refs = filter.references_per_parent
expect(refs).to be_an_instance_of(Hash)
expect(refs[project.full_path]).to eq(Set.new(%w[1 2]))
end
end
- describe '#projects_per_reference' do
- it 'returns a Hash containing projects grouped per project paths' do
+ describe '#parent_per_reference' do
+ it 'returns a Hash containing projects grouped per parent paths' do
doc = Nokogiri::HTML.fragment('')
filter = described_class.new(doc, project: project)
- expect(filter).to receive(:references_per_project)
+ expect(filter).to receive(:references_per_parent)
.and_return({ project.full_path => Set.new(%w[1]) })
- expect(filter.projects_per_reference)
+ expect(filter.parent_per_reference)
.to eq({ project.full_path => project })
end
end
- describe '#find_projects_for_paths' do
+ describe '#find_for_paths' do
let(:doc) { Nokogiri::HTML.fragment('') }
let(:filter) { described_class.new(doc, project: project) }
context 'with RequestStore disabled' do
it 'returns a list of Projects for a list of paths' do
- expect(filter.find_projects_for_paths([project.full_path]))
+ expect(filter.find_for_paths([project.full_path]))
.to eq([project])
end
it "return an empty array for paths that don't exist" do
- expect(filter.find_projects_for_paths(['nonexistent/project']))
+ expect(filter.find_for_paths(['nonexistent/project']))
.to eq([])
end
end
context 'with RequestStore enabled', :request_store do
it 'returns a list of Projects for a list of paths' do
- expect(filter.find_projects_for_paths([project.full_path]))
+ expect(filter.find_for_paths([project.full_path]))
.to eq([project])
end
context "when no project with that path exists" do
it "returns no value" do
- expect(filter.find_projects_for_paths(['nonexistent/project']))
+ expect(filter.find_for_paths(['nonexistent/project']))
.to eq([])
end
it "adds the ref to the project refs cache" do
project_refs_cache = {}
- allow(filter).to receive(:project_refs_cache).and_return(project_refs_cache)
+ allow(filter).to receive(:refs_cache).and_return(project_refs_cache)
- filter.find_projects_for_paths(['nonexistent/project'])
+ filter.find_for_paths(['nonexistent/project'])
expect(project_refs_cache).to eq({ 'nonexistent/project' => nil })
end
@@ -71,11 +71,11 @@ describe Banzai::Filter::AbstractReferenceFilter do
context 'when the project refs cache includes nil values' do
before do
# adds { 'nonexistent/project' => nil } to cache
- filter.project_from_ref_cached('nonexistent/project')
+ filter.from_ref_cached('nonexistent/project')
end
it "return an empty array for paths that don't exist" do
- expect(filter.find_projects_for_paths(['nonexistent/project']))
+ expect(filter.find_for_paths(['nonexistent/project']))
.to eq([])
end
end
@@ -83,12 +83,12 @@ describe Banzai::Filter::AbstractReferenceFilter do
end
end
- describe '#current_project_path' do
- it 'returns the path of the current project' do
+ describe '#current_parent_path' do
+ it 'returns the path of the current parent' do
doc = Nokogiri::HTML.fragment('')
filter = described_class.new(doc, project: project)
- expect(filter.current_project_path).to eq(project.full_path)
+ expect(filter.current_parent_path).to eq(project.full_path)
end
end
end
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
index f70c69ef588..3a5f52ea23f 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
@@ -157,6 +157,12 @@ describe Banzai::Filter::IssueReferenceFilter do
expect(doc.text).to eq("Fixed (#{project2.full_path}##{issue.iid}.)")
end
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
+
it 'ignores invalid issue IDs on the referenced project' do
exp = act = "Fixed #{invalidate_reference(reference)}"
@@ -201,6 +207,12 @@ describe Banzai::Filter::IssueReferenceFilter do
expect(doc.text).to eq("Fixed (#{project2.path}##{issue.iid}.)")
end
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
+
it 'ignores invalid issue IDs on the referenced project' do
exp = act = "Fixed #{invalidate_reference(reference)}"
@@ -245,6 +257,12 @@ describe Banzai::Filter::IssueReferenceFilter do
expect(doc.text).to eq("Fixed (#{project2.path}##{issue.iid}.)")
end
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
+
it 'ignores invalid issue IDs on the referenced project' do
exp = act = "Fixed #{invalidate_reference(reference)}"
@@ -269,8 +287,15 @@ describe Banzai::Filter::IssueReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference}.)")
+
expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(issue.to_reference(project))} \(comment 123\)<\/a>\.\)/)
end
+
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
end
context 'cross-project reference in link href' do
@@ -291,8 +316,15 @@ describe Banzai::Filter::IssueReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference_link}.)")
+
expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/)
end
+
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference_link}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
end
context 'cross-project URL in link href' do
@@ -313,8 +345,15 @@ describe Banzai::Filter::IssueReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference_link}.)")
+
expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/)
end
+
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference_link}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
end
context 'group context' do
@@ -387,19 +426,19 @@ describe Banzai::Filter::IssueReferenceFilter do
end
end
- describe '#issues_per_project' do
+ describe '#records_per_parent' do
context 'using an internal issue tracker' do
it 'returns a Hash containing the issues per project' do
doc = Nokogiri::HTML.fragment('')
filter = described_class.new(doc, project: project)
- expect(filter).to receive(:projects_per_reference)
+ expect(filter).to receive(:parent_per_reference)
.and_return({ project.full_path => project })
- expect(filter).to receive(:references_per_project)
+ expect(filter).to receive(:references_per_parent)
.and_return({ project.full_path => Set.new([issue.iid]) })
- expect(filter.issues_per_project)
+ expect(filter.records_per_parent)
.to eq({ project => { issue.iid => issue } })
end
end
diff --git a/spec/lib/banzai/filter/upload_link_filter_spec.rb b/spec/lib/banzai/filter/upload_link_filter_spec.rb
index 60a88e903ef..76bc0c36ab7 100644
--- a/spec/lib/banzai/filter/upload_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/upload_link_filter_spec.rb
@@ -89,7 +89,35 @@ describe Banzai::Filter::UploadLinkFilter do
end
end
- context 'when project context does not exist' do
+ context 'in group context' do
+ let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
+ let(:group) { create(:group) }
+ let(:filter_context) { { project: nil, group: group } }
+ let(:relative_path) { "groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
+
+ it 'rewrites the link correctly' do
+ doc = raw_filter(upload_link, filter_context)
+
+ expect(doc.at_css('a')['href']).to eq("#{Gitlab.config.gitlab.url}/#{relative_path}")
+ end
+
+ it 'rewrites the link correctly for subgroup' do
+ subgroup = create(:group, parent: group)
+ relative_path = "groups/#{subgroup.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg"
+
+ doc = raw_filter(upload_link, { project: nil, group: subgroup })
+
+ expect(doc.at_css('a')['href']).to eq("#{Gitlab.config.gitlab.url}/#{relative_path}")
+ end
+
+ it 'does not modify absolute URL' do
+ doc = filter(link('http://example.com'), filter_context)
+
+ expect(doc.at_css('a')['href']).to eq 'http://example.com'
+ end
+ end
+
+ context 'when project or group context does not exist' do
let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
it 'does not raise error' do
diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
index 23dbe2b6238..4cef3bdb24b 100644
--- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
@@ -70,12 +70,12 @@ describe Banzai::ReferenceParser::IssueParser do
end
end
- describe '#issues_for_nodes' do
+ describe '#records_for_nodes' do
it 'returns a Hash containing the issues for a list of nodes' do
link['data-issue'] = issue.id.to_s
nodes = [link]
- expect(subject.issues_for_nodes(nodes)).to eq({ link => issue })
+ expect(subject.records_for_nodes(nodes)).to eq({ link => issue })
end
end
end
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
new file mode 100644
index 00000000000..b80df6956b0
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
@@ -0,0 +1,510 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do
+ include TrackUntrackedUploadsHelpers
+
+ subject { described_class.new }
+
+ let!(:untracked_files_for_uploads) { described_class::UntrackedFile }
+ let!(:uploads) { described_class::Upload }
+
+ before do
+ DatabaseCleaner.clean
+ drop_temp_table_if_exists
+ ensure_temporary_tracking_table_exists
+ uploads.delete_all
+ end
+
+ after(:all) do
+ drop_temp_table_if_exists
+ end
+
+ context 'with untracked files and tracked files in untracked_files_for_uploads' do
+ let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
+ let!(:user1) { create(:user, :with_avatar) }
+ let!(:user2) { create(:user, :with_avatar) }
+ let!(:project1) { create(:project, :with_avatar) }
+ let!(:project2) { create(:project, :with_avatar) }
+
+ before do
+ UploadService.new(project1, uploaded_file, FileUploader).execute # Markdown upload
+ UploadService.new(project2, uploaded_file, FileUploader).execute # Markdown upload
+
+ # File records created by PrepareUntrackedUploads
+ untracked_files_for_uploads.create!(path: appearance.uploads.first.path)
+ untracked_files_for_uploads.create!(path: appearance.uploads.last.path)
+ untracked_files_for_uploads.create!(path: user1.uploads.first.path)
+ untracked_files_for_uploads.create!(path: user2.uploads.first.path)
+ untracked_files_for_uploads.create!(path: project1.uploads.first.path)
+ untracked_files_for_uploads.create!(path: project2.uploads.first.path)
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project1.full_path}/#{project1.uploads.last.path}")
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/#{project2.uploads.last.path}")
+
+ # Untrack 4 files
+ user2.uploads.delete_all
+ project2.uploads.delete_all # 2 files: avatar and a Markdown upload
+ appearance.uploads.where("path like '%header_logo%'").delete_all
+ end
+
+ it 'adds untracked files to the uploads table' do
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { uploads.count }.from(4).to(8)
+
+ expect(user2.uploads.count).to eq(1)
+ expect(project2.uploads.count).to eq(2)
+ expect(appearance.uploads.count).to eq(2)
+ end
+
+ it 'deletes rows after processing them' do
+ expect(subject).to receive(:drop_temp_table_if_finished) # Don't drop the table so we can look at it
+
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { untracked_files_for_uploads.count }.from(8).to(0)
+ end
+
+ it 'does not create duplicate uploads of already tracked files' do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+
+ expect(user1.uploads.count).to eq(1)
+ expect(project1.uploads.count).to eq(2)
+ expect(appearance.uploads.count).to eq(2)
+ end
+
+ it 'uses the start and end batch ids [only 1st half]' do
+ ids = untracked_files_for_uploads.all.order(:id).pluck(:id)
+ start_id = ids[0]
+ end_id = ids[3]
+
+ expect do
+ subject.perform(start_id, end_id)
+ end.to change { uploads.count }.from(4).to(6)
+
+ expect(user1.uploads.count).to eq(1)
+ expect(user2.uploads.count).to eq(1)
+ expect(appearance.uploads.count).to eq(2)
+ expect(project1.uploads.count).to eq(2)
+ expect(project2.uploads.count).to eq(0)
+
+ # Only 4 have been either confirmed or added to uploads
+ expect(untracked_files_for_uploads.count).to eq(4)
+ end
+
+ it 'uses the start and end batch ids [only 2nd half]' do
+ ids = untracked_files_for_uploads.all.order(:id).pluck(:id)
+ start_id = ids[4]
+ end_id = ids[7]
+
+ expect do
+ subject.perform(start_id, end_id)
+ end.to change { uploads.count }.from(4).to(6)
+
+ expect(user1.uploads.count).to eq(1)
+ expect(user2.uploads.count).to eq(0)
+ expect(appearance.uploads.count).to eq(1)
+ expect(project1.uploads.count).to eq(2)
+ expect(project2.uploads.count).to eq(2)
+
+ # Only 4 have been either confirmed or added to uploads
+ expect(untracked_files_for_uploads.count).to eq(4)
+ end
+
+ it 'does not drop the temporary tracking table after processing the batch, if there are still untracked rows' do
+ subject.perform(1, untracked_files_for_uploads.last.id - 1)
+
+ expect(ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)).to be_truthy
+ end
+
+ it 'drops the temporary tracking table after processing the batch, if there are no untracked rows left' do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+
+ expect(ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)).to be_falsey
+ end
+
+ it 'does not block a whole batch because of one bad path' do
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/._7d37bf4c747916390e596744117d5d1a")
+ expect(untracked_files_for_uploads.count).to eq(9)
+ expect(uploads.count).to eq(4)
+
+ subject.perform(1, untracked_files_for_uploads.last.id)
+
+ expect(untracked_files_for_uploads.count).to eq(1)
+ expect(uploads.count).to eq(8)
+ end
+
+ it 'an unparseable path is shown in error output' do
+ bad_path = "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/._7d37bf4c747916390e596744117d5d1a"
+ untracked_files_for_uploads.create!(path: bad_path)
+
+ expect(Rails.logger).to receive(:error).with(/Error parsing path "#{bad_path}":/)
+
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end
+ end
+
+ context 'with no untracked files' do
+ it 'does not add to the uploads table (and does not raise error)' do
+ expect do
+ subject.perform(1, 1000)
+ end.not_to change { uploads.count }.from(0)
+ end
+ end
+
+ describe 'upload outcomes for each path pattern' do
+ shared_examples_for 'non_markdown_file' do
+ let!(:expected_upload_attrs) { model.uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum') }
+ let!(:untracked_file) { untracked_files_for_uploads.create!(path: expected_upload_attrs['path']) }
+
+ before do
+ model.uploads.delete_all
+ end
+
+ it 'creates an Upload record' do
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { model.reload.uploads.count }.from(0).to(1)
+
+ expect(model.uploads.first.attributes).to include(expected_upload_attrs)
+ end
+ end
+
+ context 'for an appearance logo file path' do
+ let(:model) { create_or_update_appearance(logo: uploaded_file) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for an appearance header_logo file path' do
+ let(:model) { create_or_update_appearance(header_logo: uploaded_file) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ class Note < ActiveRecord::Base
+ has_many :uploads, as: :model, dependent: :destroy
+ end
+
+ let(:model) { create(:note, :with_attachment) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a user avatar file path' do
+ let(:model) { create(:user, :with_avatar) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a group avatar file path' do
+ let(:model) { create(:group, :with_avatar) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a project avatar file path' do
+ let(:model) { create(:project, :with_avatar) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ let(:model) { create(:project) }
+
+ before do
+ # Upload the file
+ UploadService.new(model, uploaded_file, FileUploader).execute
+
+ # Create the untracked_files_for_uploads record
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{model.full_path}/#{model.uploads.first.path}")
+
+ # Save the expected upload attributes
+ @expected_upload_attrs = model.reload.uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum')
+
+ # Untrack the file
+ model.reload.uploads.delete_all
+ end
+
+ it 'creates an Upload record' do
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { model.reload.uploads.count }.from(0).to(1)
+
+ expect(model.uploads.first.attributes).to include(@expected_upload_attrs)
+ end
+ end
+ end
+end
+
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
+ include TrackUntrackedUploadsHelpers
+
+ let(:upload_class) { Gitlab::BackgroundMigration::PopulateUntrackedUploads::Upload }
+
+ before(:all) do
+ ensure_temporary_tracking_table_exists
+ end
+
+ after(:all) do
+ drop_temp_table_if_exists
+ end
+
+ describe '#upload_path' do
+ def assert_upload_path(file_path, expected_upload_path)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.upload_path).to eq(expected_upload_path)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/appearance/logo/1/some_logo.jpg', 'uploads/-/system/appearance/logo/1/some_logo.jpg')
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/appearance/header_logo/1/some_logo.jpg', 'uploads/-/system/appearance/header_logo/1/some_logo.jpg')
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/note/attachment/1234/some_attachment.pdf', 'uploads/-/system/note/attachment/1234/some_attachment.pdf')
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/user/avatar/1234/avatar.jpg', 'uploads/-/system/user/avatar/1234/avatar.jpg')
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/group/avatar/1234/avatar.jpg', 'uploads/-/system/group/avatar/1234/avatar.jpg')
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/project/avatar/1234/avatar.jpg', 'uploads/-/system/project/avatar/1234/avatar.jpg')
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns the file path relative to the project directory in uploads' do
+ project = create(:project)
+ random_hex = SecureRandom.hex
+
+ assert_upload_path("/#{project.full_path}/#{random_hex}/Some file.jpg", "#{random_hex}/Some file.jpg")
+ end
+ end
+ end
+
+ describe '#uploader' do
+ def assert_uploader(file_path, expected_uploader)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.uploader).to eq(expected_uploader)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns AttachmentUploader as a string' do
+ assert_uploader('/-/system/appearance/logo/1/some_logo.jpg', 'AttachmentUploader')
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns AttachmentUploader as a string' do
+ assert_uploader('/-/system/appearance/header_logo/1/some_logo.jpg', 'AttachmentUploader')
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns AttachmentUploader as a string' do
+ assert_uploader('/-/system/note/attachment/1234/some_attachment.pdf', 'AttachmentUploader')
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns AvatarUploader as a string' do
+ assert_uploader('/-/system/user/avatar/1234/avatar.jpg', 'AvatarUploader')
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns AvatarUploader as a string' do
+ assert_uploader('/-/system/group/avatar/1234/avatar.jpg', 'AvatarUploader')
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns AvatarUploader as a string' do
+ assert_uploader('/-/system/project/avatar/1234/avatar.jpg', 'AvatarUploader')
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns FileUploader as a string' do
+ project = create(:project)
+
+ assert_uploader("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'FileUploader')
+ end
+ end
+ end
+
+ describe '#model_type' do
+ def assert_model_type(file_path, expected_model_type)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.model_type).to eq(expected_model_type)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns Appearance as a string' do
+ assert_model_type('/-/system/appearance/logo/1/some_logo.jpg', 'Appearance')
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns Appearance as a string' do
+ assert_model_type('/-/system/appearance/header_logo/1/some_logo.jpg', 'Appearance')
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns Note as a string' do
+ assert_model_type('/-/system/note/attachment/1234/some_attachment.pdf', 'Note')
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns User as a string' do
+ assert_model_type('/-/system/user/avatar/1234/avatar.jpg', 'User')
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns Namespace as a string' do
+ assert_model_type('/-/system/group/avatar/1234/avatar.jpg', 'Namespace')
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns Project as a string' do
+ assert_model_type('/-/system/project/avatar/1234/avatar.jpg', 'Project')
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns Project as a string' do
+ project = create(:project)
+
+ assert_model_type("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'Project')
+ end
+ end
+ end
+
+ describe '#model_id' do
+ def assert_model_id(file_path, expected_model_id)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.model_id).to eq(expected_model_id)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/appearance/logo/1/some_logo.jpg', 1)
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/appearance/header_logo/1/some_logo.jpg', 1)
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/note/attachment/1234/some_attachment.pdf', 1234)
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/user/avatar/1234/avatar.jpg', 1234)
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/group/avatar/1234/avatar.jpg', 1234)
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/project/avatar/1234/avatar.jpg', 1234)
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns the ID as a string' do
+ project = create(:project)
+
+ assert_model_id("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", project.id)
+ end
+ end
+ end
+
+ describe '#file_size' do
+ context 'for an appearance logo file path' do
+ let(:appearance) { create_or_update_appearance(logo: uploaded_file) }
+ let(:untracked_file) { described_class.create!(path: appearance.uploads.first.path) }
+
+ it 'returns the file size' do
+ expect(untracked_file.file_size).to eq(35255)
+ end
+
+ it 'returns the same thing that CarrierWave would return' do
+ expect(untracked_file.file_size).to eq(appearance.logo.size)
+ end
+ end
+
+ context 'for a project avatar file path' do
+ let(:project) { create(:project, avatar: uploaded_file) }
+ let(:untracked_file) { described_class.create!(path: project.uploads.first.path) }
+
+ it 'returns the file size' do
+ expect(untracked_file.file_size).to eq(35255)
+ end
+
+ it 'returns the same thing that CarrierWave would return' do
+ expect(untracked_file.file_size).to eq(project.avatar.size)
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ let(:project) { create(:project) }
+ let(:untracked_file) { create_untracked_file("/#{project.full_path}/#{project.uploads.first.path}") }
+
+ before do
+ UploadService.new(project, uploaded_file, FileUploader).execute
+ end
+
+ it 'returns the file size' do
+ expect(untracked_file.file_size).to eq(35255)
+ end
+
+ it 'returns the same thing that CarrierWave would return' do
+ expect(untracked_file.file_size).to eq(project.uploads.first.size)
+ end
+ end
+ end
+
+ def create_untracked_file(path_relative_to_upload_dir)
+ described_class.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}#{path_relative_to_upload_dir}")
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
new file mode 100644
index 00000000000..cd3f1a45270
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -0,0 +1,242 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
+ include TrackUntrackedUploadsHelpers
+
+ let!(:untracked_files_for_uploads) { described_class::UntrackedFile }
+
+ matcher :be_scheduled_migration do |*expected|
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ job['args'] == [migration, expected]
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+ end
+
+ before do
+ DatabaseCleaner.clean
+
+ drop_temp_table_if_exists
+ end
+
+ after do
+ drop_temp_table_if_exists
+ end
+
+ around do |example|
+ # Especially important so the follow-up migration does not get run
+ Sidekiq::Testing.fake! do
+ example.run
+ end
+ end
+
+ it 'ensures the untracked_files_for_uploads table exists' do
+ expect do
+ described_class.new.perform
+ end.to change { ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads) }.from(false).to(true)
+ end
+
+ it 'has a path field long enough for really long paths' do
+ described_class.new.perform
+
+ component = 'a' * 255
+
+ long_path = [
+ 'uploads',
+ component, # project.full_path
+ component # filename
+ ].flatten.join('/')
+
+ record = untracked_files_for_uploads.create!(path: long_path)
+ expect(record.reload.path.size).to eq(519)
+ end
+
+ context "test bulk insert with ON CONFLICT DO NOTHING or IGNORE" do
+ around do |example|
+ # If this is CI, we use Postgres 9.2 so this whole context should be
+ # skipped since we're unable to use ON CONFLICT DO NOTHING or IGNORE.
+ if described_class.new.send(:can_bulk_insert_and_ignore_duplicates?)
+ example.run
+ end
+ end
+
+ context 'when files were uploaded before and after hashed storage was enabled' do
+ let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
+ let!(:user) { create(:user, :with_avatar) }
+ let!(:project1) { create(:project, :with_avatar) }
+ let(:project2) { create(:project) } # instantiate after enabling hashed_storage
+
+ before do
+ # Markdown upload before enabling hashed_storage
+ UploadService.new(project1, uploaded_file, FileUploader).execute
+
+ stub_application_setting(hashed_storage_enabled: true)
+
+ # Markdown upload after enabling hashed_storage
+ UploadService.new(project2, uploaded_file, FileUploader).execute
+ end
+
+ it 'adds unhashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+
+ it 'adds files with paths relative to CarrierWave.root' do
+ described_class.new.perform
+ untracked_files_for_uploads.all.each do |file|
+ expect(file.path.start_with?('uploads/')).to be_truthy
+ end
+ end
+
+ it 'does not add hashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ hashed_file_path = project2.uploads.where(uploader: 'FileUploader').first.path
+ expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey
+ end
+
+ it 'correctly schedules the follow-up background migration jobs' do
+ described_class.new.perform
+
+ expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+
+ # E.g. from a previous failed run of this background migration
+ context 'when there is existing data in untracked_files_for_uploads' do
+ before do
+ described_class.new.perform
+ end
+
+ it 'does not error or produce duplicates of existing data' do
+ expect do
+ described_class.new.perform
+ end.not_to change { untracked_files_for_uploads.count }.from(5)
+ end
+ end
+
+ # E.g. The installation is in use at the time of migration, and someone has
+ # just uploaded a file
+ context 'when there are files in /uploads/tmp' do
+ let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+ before do
+ FileUtils.touch(tmp_file)
+ end
+
+ after do
+ FileUtils.rm(tmp_file)
+ end
+
+ it 'does not add files from /uploads/tmp' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+ end
+ end
+ end
+
+ context 'test bulk insert without ON CONFLICT DO NOTHING or IGNORE' do
+ before do
+ # If this is CI, we use Postgres 9.2 so this stub has no effect.
+ #
+ # If this is being run on Postgres 9.5+ or MySQL, then this stub allows us
+ # to test the bulk insert functionality without ON CONFLICT DO NOTHING or
+ # IGNORE.
+ allow_any_instance_of(described_class).to receive(:postgresql_pre_9_5?).and_return(true)
+ end
+
+ context 'when files were uploaded before and after hashed storage was enabled' do
+ let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
+ let!(:user) { create(:user, :with_avatar) }
+ let!(:project1) { create(:project, :with_avatar) }
+ let(:project2) { create(:project) } # instantiate after enabling hashed_storage
+
+ before do
+ # Markdown upload before enabling hashed_storage
+ UploadService.new(project1, uploaded_file, FileUploader).execute
+
+ stub_application_setting(hashed_storage_enabled: true)
+
+ # Markdown upload after enabling hashed_storage
+ UploadService.new(project2, uploaded_file, FileUploader).execute
+ end
+
+ it 'adds unhashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+
+ it 'adds files with paths relative to CarrierWave.root' do
+ described_class.new.perform
+ untracked_files_for_uploads.all.each do |file|
+ expect(file.path.start_with?('uploads/')).to be_truthy
+ end
+ end
+
+ it 'does not add hashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ hashed_file_path = project2.uploads.where(uploader: 'FileUploader').first.path
+ expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey
+ end
+
+ it 'correctly schedules the follow-up background migration jobs' do
+ described_class.new.perform
+
+ expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+
+ # E.g. from a previous failed run of this background migration
+ context 'when there is existing data in untracked_files_for_uploads' do
+ before do
+ described_class.new.perform
+ end
+
+ it 'does not error or produce duplicates of existing data' do
+ expect do
+ described_class.new.perform
+ end.not_to change { untracked_files_for_uploads.count }.from(5)
+ end
+ end
+
+ # E.g. The installation is in use at the time of migration, and someone has
+ # just uploaded a file
+ context 'when there are files in /uploads/tmp' do
+ let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+ before do
+ FileUtils.touch(tmp_file)
+ end
+
+ after do
+ FileUtils.rm(tmp_file)
+ end
+
+ it 'does not add files from /uploads/tmp' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+ end
+ end
+ end
+
+ # Very new or lightly-used installations that are running this migration
+ # may not have an upload directory because they have no uploads.
+ context 'when no files were ever uploaded' do
+ it 'does not add to the untracked_files_for_uploads table (and does not raise error)' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(0)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index a5657b81952..b2f13fae73f 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -221,6 +221,22 @@ describe Gitlab::Database do
described_class.bulk_insert('test', rows)
end
+ it 'does not quote values of a column in the disable_quote option' do
+ [1, 2, 4, 5].each do |i|
+ expect(connection).to receive(:quote).with(i)
+ end
+
+ described_class.bulk_insert('test', rows, disable_quote: :c)
+ end
+
+ it 'does not quote values of columns in the disable_quote option' do
+ [2, 5].each do |i|
+ expect(connection).to receive(:quote).with(i)
+ end
+
+ described_class.bulk_insert('test', rows, disable_quote: [:a, :c])
+ end
+
it 'handles non-UTF-8 data' do
expect { described_class.bulk_insert('test', [{ a: "\255" }]) }.not_to raise_error
end
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 476a3f1998d..ef874368077 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -250,4 +250,34 @@ describe Gitlab::ReferenceExtractor do
subject { described_class.references_pattern }
it { is_expected.to be_kind_of Regexp }
end
+
+ describe 'referables prefixes' do
+ def prefixes
+ described_class::REFERABLES.each_with_object({}) do |referable, result|
+ klass = referable.to_s.camelize.constantize
+
+ next unless klass.respond_to?(:reference_prefix)
+
+ prefix = klass.reference_prefix
+ result[prefix] ||= []
+ result[prefix] << referable
+ end
+ end
+
+ it 'returns all supported prefixes' do
+ expect(prefixes.keys.uniq).to match_array(%w(@ # ~ % ! $ &))
+ end
+
+ it 'does not allow one prefix for multiple referables if not allowed specificly' do
+ # make sure you are not overriding existing prefix before changing this hash
+ multiple_allowed = {
+ '@' => 3
+ }
+
+ prefixes.each do |prefix, referables|
+ expected_count = multiple_allowed[prefix] || 1
+ expect(referables.count).to eq(expected_count)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 3137a72fdc4..e872a5290c5 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::Utils do
- delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, to: :described_class
+ delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which, to: :described_class
describe '.slugify' do
{
@@ -59,4 +59,12 @@ describe Gitlab::Utils do
expect(random_string).to be_kind_of(String)
end
end
+
+ describe '.which' do
+ it 'finds the full path to an executable binary' do
+ expect(File).to receive(:executable?).with('/bin/sh').and_return(true)
+
+ expect(which('sh', 'PATH' => '/bin')).to eq('/bin/sh')
+ end
+ end
end
diff --git a/spec/migrations/track_untracked_uploads_spec.rb b/spec/migrations/track_untracked_uploads_spec.rb
new file mode 100644
index 00000000000..7fe7a140e2f
--- /dev/null
+++ b/spec/migrations/track_untracked_uploads_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20171103140253_track_untracked_uploads')
+
+describe TrackUntrackedUploads, :migration, :sidekiq do
+ include TrackUntrackedUploadsHelpers
+
+ matcher :be_scheduled_migration do
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ job['args'] == [migration]
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+ end
+
+ it 'correctly schedules the follow-up background migration' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index b27c1b2cd1a..03c96a8f5aa 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -2433,4 +2433,163 @@ describe User do
expect(user).not_to be_blocked
end
end
+
+ describe '#max_member_access_for_project_ids' do
+ shared_examples 'max member access for projects' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:owner_project) { create(:project, group: group) }
+ let(:master_project) { create(:project) }
+ let(:reporter_project) { create(:project) }
+ let(:developer_project) { create(:project) }
+ let(:guest_project) { create(:project) }
+ let(:no_access_project) { create(:project) }
+
+ let(:projects) do
+ [owner_project, master_project, reporter_project, developer_project, guest_project, no_access_project].map(&:id)
+ end
+
+ let(:expected) do
+ {
+ owner_project.id => Gitlab::Access::OWNER,
+ master_project.id => Gitlab::Access::MASTER,
+ reporter_project.id => Gitlab::Access::REPORTER,
+ developer_project.id => Gitlab::Access::DEVELOPER,
+ guest_project.id => Gitlab::Access::GUEST,
+ no_access_project.id => Gitlab::Access::NO_ACCESS
+ }
+ end
+
+ before do
+ create(:group_member, user: user, group: group)
+ master_project.add_master(user)
+ reporter_project.add_reporter(user)
+ developer_project.add_developer(user)
+ guest_project.add_guest(user)
+ end
+
+ it 'returns correct roles for different projects' do
+ expect(user.max_member_access_for_project_ids(projects)).to eq(expected)
+ end
+ end
+
+ context 'with RequestStore enabled', :request_store do
+ include_examples 'max member access for projects'
+
+ def access_levels(projects)
+ user.max_member_access_for_project_ids(projects)
+ end
+
+ it 'does not perform extra queries when asked for projects who have already been found' do
+ access_levels(projects)
+
+ expect { access_levels(projects) }.not_to exceed_query_limit(0)
+
+ expect(access_levels(projects)).to eq(expected)
+ end
+
+ it 'only requests the extra projects when uncached projects are passed' do
+ second_master_project = create(:project)
+ second_developer_project = create(:project)
+ second_master_project.add_master(user)
+ second_developer_project.add_developer(user)
+
+ all_projects = projects + [second_master_project.id, second_developer_project.id]
+
+ expected_all = expected.merge(second_master_project.id => Gitlab::Access::MASTER,
+ second_developer_project.id => Gitlab::Access::DEVELOPER)
+
+ access_levels(projects)
+
+ queries = ActiveRecord::QueryRecorder.new { access_levels(all_projects) }
+
+ expect(queries.count).to eq(1)
+ expect(queries.log_message).to match(/\W(#{second_master_project.id}, #{second_developer_project.id})\W/)
+ expect(access_levels(all_projects)).to eq(expected_all)
+ end
+ end
+
+ context 'with RequestStore disabled' do
+ include_examples 'max member access for projects'
+ end
+ end
+
+ describe '#max_member_access_for_group_ids' do
+ shared_examples 'max member access for groups' do
+ let(:user) { create(:user) }
+ let(:owner_group) { create(:group) }
+ let(:master_group) { create(:group) }
+ let(:reporter_group) { create(:group) }
+ let(:developer_group) { create(:group) }
+ let(:guest_group) { create(:group) }
+ let(:no_access_group) { create(:group) }
+
+ let(:groups) do
+ [owner_group, master_group, reporter_group, developer_group, guest_group, no_access_group].map(&:id)
+ end
+
+ let(:expected) do
+ {
+ owner_group.id => Gitlab::Access::OWNER,
+ master_group.id => Gitlab::Access::MASTER,
+ reporter_group.id => Gitlab::Access::REPORTER,
+ developer_group.id => Gitlab::Access::DEVELOPER,
+ guest_group.id => Gitlab::Access::GUEST,
+ no_access_group.id => Gitlab::Access::NO_ACCESS
+ }
+ end
+
+ before do
+ owner_group.add_owner(user)
+ master_group.add_master(user)
+ reporter_group.add_reporter(user)
+ developer_group.add_developer(user)
+ guest_group.add_guest(user)
+ end
+
+ it 'returns correct roles for different groups' do
+ expect(user.max_member_access_for_group_ids(groups)).to eq(expected)
+ end
+ end
+
+ context 'with RequestStore enabled', :request_store do
+ include_examples 'max member access for groups'
+
+ def access_levels(groups)
+ user.max_member_access_for_group_ids(groups)
+ end
+
+ it 'does not perform extra queries when asked for groups who have already been found' do
+ access_levels(groups)
+
+ expect { access_levels(groups) }.not_to exceed_query_limit(0)
+
+ expect(access_levels(groups)).to eq(expected)
+ end
+
+ it 'only requests the extra groups when uncached groups are passed' do
+ second_master_group = create(:group)
+ second_developer_group = create(:group)
+ second_master_group.add_master(user)
+ second_developer_group.add_developer(user)
+
+ all_groups = groups + [second_master_group.id, second_developer_group.id]
+
+ expected_all = expected.merge(second_master_group.id => Gitlab::Access::MASTER,
+ second_developer_group.id => Gitlab::Access::DEVELOPER)
+
+ access_levels(groups)
+
+ queries = ActiveRecord::QueryRecorder.new { access_levels(all_groups) }
+
+ expect(queries.count).to eq(1)
+ expect(queries.log_message).to match(/\W(#{second_master_group.id}, #{second_developer_group.id})\W/)
+ expect(access_levels(all_groups)).to eq(expected_all)
+ end
+ end
+
+ context 'with RequestStore disabled' do
+ include_examples 'max member access for groups'
+ end
+ end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 4f4e634829d..b4d25e06d9a 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -9,6 +9,8 @@ describe GroupPolicy do
let(:admin) { create(:admin) }
let(:group) { create(:group) }
+ let(:guest_permissions) { [:read_group, :upload_file, :read_namespace] }
+
let(:reporter_permissions) { [:admin_label] }
let(:developer_permissions) { [:admin_milestones] }
@@ -52,6 +54,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
+ expect_disallowed(:upload_file)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -64,7 +67,7 @@ describe GroupPolicy do
let(:current_user) { guest }
it do
- expect_allowed(:read_group, :read_namespace)
+ expect_allowed(*guest_permissions)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -76,7 +79,7 @@ describe GroupPolicy do
let(:current_user) { reporter }
it do
- expect_allowed(:read_group, :read_namespace)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -88,7 +91,7 @@ describe GroupPolicy do
let(:current_user) { developer }
it do
- expect_allowed(:read_group, :read_namespace)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -100,7 +103,7 @@ describe GroupPolicy do
let(:current_user) { master }
it do
- expect_allowed(:read_group, :read_namespace)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -114,7 +117,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group, :read_namespace)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -128,7 +131,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group, :read_namespace)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -187,7 +190,7 @@ describe GroupPolicy do
let(:current_user) { nil }
it do
- expect_disallowed(:read_group)
+ expect_disallowed(*guest_permissions)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -199,7 +202,7 @@ describe GroupPolicy do
let(:current_user) { guest }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -211,7 +214,7 @@ describe GroupPolicy do
let(:current_user) { reporter }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -223,7 +226,7 @@ describe GroupPolicy do
let(:current_user) { developer }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -235,7 +238,7 @@ describe GroupPolicy do
let(:current_user) { master }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -249,7 +252,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index 07d7f96bd70..10e6a3c07c8 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -95,6 +95,12 @@ describe API::ProtectedBranches do
describe 'POST /projects/:id/protected_branches' do
let(:branch_name) { 'new_branch' }
+ let(:post_endpoint) { api("/projects/#{project.id}/protected_branches", user) }
+
+ def expect_protection_to_be_successful
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['name']).to eq(branch_name)
+ end
context 'when authenticated as a master' do
before do
@@ -102,7 +108,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch' do
- post api("/projects/#{project.id}/protected_branches", user), name: branch_name
+ post post_endpoint, name: branch_name
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -111,8 +117,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and developers can push' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 30
+ post post_endpoint, name: branch_name, push_access_level: 30
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -121,8 +126,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and developers can merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, merge_access_level: 30
+ post post_endpoint, name: branch_name, merge_access_level: 30
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -131,8 +135,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and developers can push and merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 30, merge_access_level: 30
+ post post_endpoint, name: branch_name, push_access_level: 30, merge_access_level: 30
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -141,8 +144,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and no one can push' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 0
+ post post_endpoint, name: branch_name, push_access_level: 0
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -151,8 +153,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and no one can merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, merge_access_level: 0
+ post post_endpoint, name: branch_name, merge_access_level: 0
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -161,8 +162,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and no one can push or merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 0, merge_access_level: 0
+ post post_endpoint, name: branch_name, push_access_level: 0, merge_access_level: 0
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -171,7 +171,8 @@ describe API::ProtectedBranches do
end
it 'returns a 409 error if the same branch is protected twice' do
- post api("/projects/#{project.id}/protected_branches", user), name: protected_name
+ post post_endpoint, name: protected_name
+
expect(response).to have_gitlab_http_status(409)
end
@@ -179,10 +180,9 @@ describe API::ProtectedBranches do
let(:branch_name) { 'feature/*' }
it "protects multiple branches with a wildcard in the name" do
- post api("/projects/#{project.id}/protected_branches", user), name: branch_name
+ post post_endpoint, name: branch_name
- expect(response).to have_gitlab_http_status(201)
- expect(json_response['name']).to eq(branch_name)
+ expect_protection_to_be_successful
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::MASTER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MASTER)
end
@@ -195,7 +195,7 @@ describe API::ProtectedBranches do
end
it "returns a 403 error if guest" do
- post api("/projects/#{project.id}/protected_branches/", user), name: branch_name
+ post post_endpoint, name: branch_name
expect(response).to have_gitlab_http_status(403)
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 53862283a27..4057caca2ac 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -3,210 +3,253 @@ require 'spec_helper'
describe Projects::ForkService do
include ProjectForksHelper
let(:gitlab_shell) { Gitlab::Shell.new }
+ context 'when forking a new project' do
+ describe 'fork by user' do
+ before do
+ @from_user = create(:user)
+ @from_namespace = @from_user.namespace
+ avatar = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")
+ @from_project = create(:project,
+ :repository,
+ creator_id: @from_user.id,
+ namespace: @from_namespace,
+ star_count: 107,
+ avatar: avatar,
+ description: 'wow such project')
+ @to_user = create(:user)
+ @to_namespace = @to_user.namespace
+ @from_project.add_user(@to_user, :developer)
+ end
- describe 'fork by user' do
- before do
- @from_user = create(:user)
- @from_namespace = @from_user.namespace
- avatar = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")
- @from_project = create(:project,
- :repository,
- creator_id: @from_user.id,
- namespace: @from_namespace,
- star_count: 107,
- avatar: avatar,
- description: 'wow such project')
- @to_user = create(:user)
- @to_namespace = @to_user.namespace
- @from_project.add_user(@to_user, :developer)
- end
+ context 'fork project' do
+ context 'when forker is a guest' do
+ before do
+ @guest = create(:user)
+ @from_project.add_user(@guest, :guest)
+ end
+ subject { fork_project(@from_project, @guest) }
- context 'fork project' do
- context 'when forker is a guest' do
- before do
- @guest = create(:user)
- @from_project.add_user(@guest, :guest)
+ it { is_expected.not_to be_persisted }
+ it { expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden']) }
end
- subject { fork_project(@from_project, @guest) }
- it { is_expected.not_to be_persisted }
- it { expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden']) }
- end
+ describe "successfully creates project in the user namespace" do
+ let(:to_project) { fork_project(@from_project, @to_user, namespace: @to_user.namespace) }
- describe "successfully creates project in the user namespace" do
- let(:to_project) { fork_project(@from_project, @to_user, namespace: @to_user.namespace) }
-
- it { expect(to_project).to be_persisted }
- it { expect(to_project.errors).to be_empty }
- it { expect(to_project.owner).to eq(@to_user) }
- it { expect(to_project.namespace).to eq(@to_user.namespace) }
- it { expect(to_project.star_count).to be_zero }
- it { expect(to_project.description).to eq(@from_project.description) }
- it { expect(to_project.avatar.file).to be_exists }
-
- # This test is here because we had a bug where the from-project lost its
- # avatar after being forked.
- # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
- it "after forking the from-project still has its avatar" do
- # If we do not fork the project first we cannot detect the bug.
- expect(to_project).to be_persisted
-
- expect(@from_project.avatar.file).to be_exists
- end
+ it { expect(to_project).to be_persisted }
+ it { expect(to_project.errors).to be_empty }
+ it { expect(to_project.owner).to eq(@to_user) }
+ it { expect(to_project.namespace).to eq(@to_user.namespace) }
+ it { expect(to_project.star_count).to be_zero }
+ it { expect(to_project.description).to eq(@from_project.description) }
+ it { expect(to_project.avatar.file).to be_exists }
- it 'flushes the forks count cache of the source project' do
- expect(@from_project.forks_count).to be_zero
+ # This test is here because we had a bug where the from-project lost its
+ # avatar after being forked.
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
+ it "after forking the from-project still has its avatar" do
+ # If we do not fork the project first we cannot detect the bug.
+ expect(to_project).to be_persisted
- fork_project(@from_project, @to_user)
+ expect(@from_project.avatar.file).to be_exists
+ end
- expect(@from_project.forks_count).to eq(1)
- end
+ it 'flushes the forks count cache of the source project' do
+ expect(@from_project.forks_count).to be_zero
- it 'creates a fork network with the new project and the root project set' do
- to_project
- fork_network = @from_project.reload.fork_network
+ fork_project(@from_project, @to_user)
- expect(fork_network).not_to be_nil
- expect(fork_network.root_project).to eq(@from_project)
- expect(fork_network.projects).to contain_exactly(@from_project, to_project)
- end
- end
+ expect(@from_project.forks_count).to eq(1)
+ end
- context 'creating a fork of a fork' do
- let(:from_forked_project) { fork_project(@from_project, @to_user) }
- let(:other_namespace) do
- group = create(:group)
- group.add_owner(@to_user)
- group
- end
- let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace) }
+ it 'creates a fork network with the new project and the root project set' do
+ to_project
+ fork_network = @from_project.reload.fork_network
- it 'sets the root of the network to the root project' do
- expect(to_project.fork_network.root_project).to eq(@from_project)
+ expect(fork_network).not_to be_nil
+ expect(fork_network.root_project).to eq(@from_project)
+ expect(fork_network.projects).to contain_exactly(@from_project, to_project)
+ end
end
- it 'sets the forked_from_project on the membership' do
- expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
+ context 'creating a fork of a fork' do
+ let(:from_forked_project) { fork_project(@from_project, @to_user) }
+ let(:other_namespace) do
+ group = create(:group)
+ group.add_owner(@to_user)
+ group
+ end
+ let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace) }
+
+ it 'sets the root of the network to the root project' do
+ expect(to_project.fork_network.root_project).to eq(@from_project)
+ end
+
+ it 'sets the forked_from_project on the membership' do
+ expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
+ end
end
end
- end
- context 'project already exists' do
- it "fails due to validation, not transaction failure" do
- @existing_project = create(:project, :repository, creator_id: @to_user.id, name: @from_project.name, namespace: @to_namespace)
- @to_project = fork_project(@from_project, @to_user, namespace: @to_namespace)
- expect(@existing_project).to be_persisted
+ context 'project already exists' do
+ it "fails due to validation, not transaction failure" do
+ @existing_project = create(:project, :repository, creator_id: @to_user.id, name: @from_project.name, namespace: @to_namespace)
+ @to_project = fork_project(@from_project, @to_user, namespace: @to_namespace)
+ expect(@existing_project).to be_persisted
- expect(@to_project).not_to be_persisted
- expect(@to_project.errors[:name]).to eq(['has already been taken'])
- expect(@to_project.errors[:path]).to eq(['has already been taken'])
+ expect(@to_project).not_to be_persisted
+ expect(@to_project.errors[:name]).to eq(['has already been taken'])
+ expect(@to_project.errors[:path]).to eq(['has already been taken'])
+ end
end
- end
- context 'repository already exists' do
- let(:repository_storage) { 'default' }
- let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
+ context 'repository already exists' do
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
- before do
- gitlab_shell.add_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
- end
+ before do
+ gitlab_shell.add_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
+ end
- after do
- gitlab_shell.remove_repository(repository_storage_path, "#{@to_user.namespace.full_path}/#{@from_project.path}")
- end
+ after do
+ gitlab_shell.remove_repository(repository_storage_path, "#{@to_user.namespace.full_path}/#{@from_project.path}")
+ end
- it 'does not allow creation' do
- to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace)
+ it 'does not allow creation' do
+ to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace)
- expect(to_project).not_to be_persisted
- expect(to_project.errors.messages).to have_key(:base)
- expect(to_project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ expect(to_project).not_to be_persisted
+ expect(to_project.errors.messages).to have_key(:base)
+ expect(to_project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ end
end
- end
- context 'GitLab CI is enabled' do
- it "forks and enables CI for fork" do
- @from_project.enable_ci
- @to_project = fork_project(@from_project, @to_user)
- expect(@to_project.builds_enabled?).to be_truthy
+ context 'GitLab CI is enabled' do
+ it "forks and enables CI for fork" do
+ @from_project.enable_ci
+ @to_project = fork_project(@from_project, @to_user)
+ expect(@to_project.builds_enabled?).to be_truthy
+ end
end
- end
- context "when project has restricted visibility level" do
- context "and only one visibility level is restricted" do
- before do
- @from_project.update_attributes(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
+ context "when project has restricted visibility level" do
+ context "and only one visibility level is restricted" do
+ before do
+ @from_project.update_attributes(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
+ end
+
+ it "creates fork with highest allowed level" do
+ forked_project = fork_project(@from_project, @to_user)
+
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
end
- it "creates fork with highest allowed level" do
- forked_project = fork_project(@from_project, @to_user)
+ context "and all visibility levels are restricted" do
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE])
+ end
+
+ it "creates fork with private visibility levels" do
+ forked_project = fork_project(@from_project, @to_user)
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
end
end
+ end
- context "and all visibility levels are restricted" do
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE])
+ describe 'fork to namespace' do
+ before do
+ @group_owner = create(:user)
+ @developer = create(:user)
+ @project = create(:project, :repository,
+ creator_id: @group_owner.id,
+ star_count: 777,
+ description: 'Wow, such a cool project!')
+ @group = create(:group)
+ @group.add_user(@group_owner, GroupMember::OWNER)
+ @group.add_user(@developer, GroupMember::DEVELOPER)
+ @project.add_user(@developer, :developer)
+ @project.add_user(@group_owner, :developer)
+ @opts = { namespace: @group }
+ end
+
+ context 'fork project for group' do
+ it 'group owner successfully forks project into the group' do
+ to_project = fork_project(@project, @group_owner, @opts)
+
+ expect(to_project).to be_persisted
+ expect(to_project.errors).to be_empty
+ expect(to_project.owner).to eq(@group)
+ expect(to_project.namespace).to eq(@group)
+ expect(to_project.name).to eq(@project.name)
+ expect(to_project.path).to eq(@project.path)
+ expect(to_project.description).to eq(@project.description)
+ expect(to_project.star_count).to be_zero
end
+ end
- it "creates fork with private visibility levels" do
- forked_project = fork_project(@from_project, @to_user)
+ context 'fork project for group when user not owner' do
+ it 'group developer fails to fork project into the group' do
+ to_project = fork_project(@project, @developer, @opts)
+ expect(to_project.errors[:namespace]).to eq(['is not valid'])
+ end
+ end
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ context 'project already exists in group' do
+ it 'fails due to validation, not transaction failure' do
+ existing_project = create(:project, :repository,
+ name: @project.name,
+ namespace: @group)
+ to_project = fork_project(@project, @group_owner, @opts)
+ expect(existing_project.persisted?).to be_truthy
+ expect(to_project.errors[:name]).to eq(['has already been taken'])
+ expect(to_project.errors[:path]).to eq(['has already been taken'])
end
end
end
end
- describe 'fork to namespace' do
- before do
- @group_owner = create(:user)
- @developer = create(:user)
- @project = create(:project, :repository,
- creator_id: @group_owner.id,
- star_count: 777,
- description: 'Wow, such a cool project!')
- @group = create(:group)
- @group.add_user(@group_owner, GroupMember::OWNER)
- @group.add_user(@developer, GroupMember::DEVELOPER)
- @project.add_user(@developer, :developer)
- @project.add_user(@group_owner, :developer)
- @opts = { namespace: @group }
+ context 'when linking fork to an existing project' do
+ let(:fork_from_project) { create(:project, :public) }
+ let(:fork_to_project) { create(:project, :public) }
+ let(:user) { create(:user) }
+
+ subject { described_class.new(fork_from_project, user) }
+
+ def forked_from_project(project)
+ project.fork_network_member&.forked_from_project
end
- context 'fork project for group' do
- it 'group owner successfully forks project into the group' do
- to_project = fork_project(@project, @group_owner, @opts)
-
- expect(to_project).to be_persisted
- expect(to_project.errors).to be_empty
- expect(to_project.owner).to eq(@group)
- expect(to_project.namespace).to eq(@group)
- expect(to_project.name).to eq(@project.name)
- expect(to_project.path).to eq(@project.path)
- expect(to_project.description).to eq(@project.description)
- expect(to_project.star_count).to be_zero
+ context 'if project is already forked' do
+ it 'does not create fork relation' do
+ allow(fork_to_project).to receive(:forked?).and_return(true)
+ expect(forked_from_project(fork_to_project)).to be_nil
+ expect(subject.execute(fork_to_project)).to be_nil
+ expect(forked_from_project(fork_to_project)).to be_nil
end
end
- context 'fork project for group when user not owner' do
- it 'group developer fails to fork project into the group' do
- to_project = fork_project(@project, @developer, @opts)
- expect(to_project.errors[:namespace]).to eq(['is not valid'])
+ context 'if project is not forked' do
+ it 'creates fork relation' do
+ expect(fork_to_project.forked?).to be false
+ expect(forked_from_project(fork_to_project)).to be_nil
+
+ subject.execute(fork_to_project)
+
+ expect(fork_to_project.forked?).to be true
+ expect(forked_from_project(fork_to_project)).to eq fork_from_project
+ expect(fork_to_project.forked_from_project).to eq fork_from_project
end
- end
- context 'project already exists in group' do
- it 'fails due to validation, not transaction failure' do
- existing_project = create(:project, :repository,
- name: @project.name,
- namespace: @group)
- to_project = fork_project(@project, @group_owner, @opts)
- expect(existing_project.persisted?).to be_truthy
- expect(to_project.errors[:name]).to eq(['has already been taken'])
- expect(to_project.errors[:path]).to eq(['has already been taken'])
+ it 'flushes the forks count cache of the source project' do
+ expect(fork_from_project.forks_count).to be_zero
+
+ subject.execute(fork_to_project)
+
+ expect(fork_from_project.forks_count).to eq(1)
end
end
end
diff --git a/spec/support/google_api/cloud_platform_helpers.rb b/spec/support/google_api/cloud_platform_helpers.rb
index dabf0db7666..8a073e58db8 100644
--- a/spec/support/google_api/cloud_platform_helpers.rb
+++ b/spec/support/google_api/cloud_platform_helpers.rb
@@ -63,7 +63,7 @@ module GoogleApi
##
# gcloud container clusters create
- # https://cloud.google.com/container-engine/reference/rest/v1/projects.zones.clusters/create
+ # https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters/create
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
def cloud_platform_cluster_body(**options)
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
new file mode 100644
index 00000000000..935c08221e0
--- /dev/null
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -0,0 +1,240 @@
+shared_examples 'handle uploads' do
+ let(:user) { create(:user) }
+ let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
+ let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
+
+ describe "POST #create" do
+ context 'when a user is not authorized to upload a file' do
+ it 'returns 404 status' do
+ post :create, params.merge(file: jpg, format: :json)
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'when a user can upload a file' do
+ before do
+ sign_in(user)
+ model.add_developer(user)
+ end
+
+ context "without params['file']" do
+ it "returns an error" do
+ post :create, params.merge(format: :json)
+
+ expect(response).to have_gitlab_http_status(422)
+ end
+ end
+
+ context 'with valid image' do
+ before do
+ post :create, params.merge(file: jpg, format: :json)
+ end
+
+ it 'returns a content with original filename, new link, and correct type.' do
+ expect(response.body).to match '\"alt\":\"rails_sample\"'
+ expect(response.body).to match "\"url\":\"/uploads"
+ end
+
+ # NOTE: This is as close as we're getting to an Integration test for this
+ # behavior. We're avoiding a proper Feature test because those should be
+ # testing things entirely user-facing, which the Upload model is very much
+ # not.
+ it 'creates a corresponding Upload record' do
+ upload = Upload.last
+
+ aggregate_failures do
+ expect(upload).to exist
+ expect(upload.model).to eq(model)
+ end
+ end
+ end
+
+ context 'with valid non-image file' do
+ before do
+ post :create, params.merge(file: txt, format: :json)
+ end
+
+ it 'returns a content with original filename, new link, and correct type.' do
+ expect(response.body).to match '\"alt\":\"doc_sample.txt\"'
+ expect(response.body).to match "\"url\":\"/uploads"
+ end
+ end
+ end
+ end
+
+ describe "GET #show" do
+ let(:show_upload) do
+ get :show, params.merge(secret: "123456", filename: "image.jpg")
+ end
+
+ context "when the model is public" do
+ before do
+ model.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ context "when not signed in" do
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "when signed in" do
+ before do
+ sign_in(user)
+ end
+
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
+
+ context "when the model is private" do
+ before do
+ model.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ context "when not signed in" do
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ context "when the file is an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file is not an image" do
+ it "redirects to the sign in page" do
+ show_upload
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "redirects to the sign in page" do
+ show_upload
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
+ context "when signed in" do
+ before do
+ sign_in(user)
+ end
+
+ context "when the user has access to the project" do
+ before do
+ model.add_developer(user)
+ end
+
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "when the user doesn't have access to the model" do
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ context "when the file is an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file is not an image" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb
new file mode 100644
index 00000000000..d05eda08201
--- /dev/null
+++ b/spec/support/track_untracked_uploads_helpers.rb
@@ -0,0 +1,20 @@
+module TrackUntrackedUploadsHelpers
+ def uploaded_file
+ fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
+ fixture_file_upload(fixture_path)
+ end
+
+ def ensure_temporary_tracking_table_exists
+ Gitlab::BackgroundMigration::PrepareUntrackedUploads.new.send(:ensure_temporary_tracking_table_exists)
+ end
+
+ def drop_temp_table_if_exists
+ ActiveRecord::Base.connection.drop_table(:untracked_files_for_uploads) if ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)
+ end
+
+ def create_or_update_appearance(attrs)
+ a = Appearance.first_or_initialize(title: 'foo', description: 'bar')
+ a.update!(attrs)
+ a
+ end
+end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
new file mode 100644
index 00000000000..c6c4500c179
--- /dev/null
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -0,0 +1,21 @@
+require 'spec_helper'
+
+describe NamespaceFileUploader do
+ let(:group) { build_stubbed(:group) }
+ let(:uploader) { described_class.new(group) }
+
+ describe "#store_dir" do
+ it "stores in the namespace id directory" do
+ expect(uploader.store_dir).to include(group.id.to_s)
+ end
+ end
+
+ describe ".absolute_path" do
+ it "stores in thecorrect directory" do
+ upload_record = create(:upload, :namespace_upload, model: group)
+
+ expect(described_class.absolute_path(upload_record))
+ .to include("-/system/namespace/#{group.id}")
+ end
+ end
+end