diff options
author | Lin Jen-Shin <godfat@godfat.org> | 2018-03-03 00:10:21 +0800 |
---|---|---|
committer | Lin Jen-Shin <godfat@godfat.org> | 2018-03-03 00:10:21 +0800 |
commit | 6c5a7d5305e257244168799df0420359d0ad7b57 (patch) | |
tree | 197f0293855b02cccfb97e3f319594530b285344 /spec | |
parent | 461ecbcf07f0785b5ea50c62b114bf8217ac5199 (diff) | |
parent | 9b704ef327cc0224bf09c1e8d8d27df88ab13734 (diff) | |
download | gitlab-ce-6c5a7d5305e257244168799df0420359d0ad7b57.tar.gz |
Merge remote-tracking branch 'upstream/master' into 42572-release-controller
* upstream/master: (889 commits)
SlackService - respect `notify_only_default_branch` for push events
Clarify usage ping wording in admin area
Update incoming emails documents
Allow to include also descendant group labels
Update docs on grouping CI jobs
Support additional LabelsFinder parameters for group labels
Extend Cluster Applications to install GitLab Runner to Kubernetes cluster
Remove registry list webpack entry point
Remove trailing newline that was causing an EE conflict
Small fixes in Vuex docs
Remove u2f webpack bundle
Update documentation WRT to request parameters
remove common_vue CommonsChunk config
Fetch commit signatures from Gitaly in batches
migrate stl_viewer to dynamic import
migrate sketch_viewer to dynamic import
migrate pdf_viewer to dynamic import
migrate notebook_viewer to dynamic import
migrate balsamiq_viewer to dynamic import
Add some strings that were missing in gitlab.pot
...
Diffstat (limited to 'spec')
530 files changed, 13328 insertions, 4383 deletions
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb index 73fff6eb5ca..b7257fac608 100644 --- a/spec/controllers/autocomplete_controller_spec.rb +++ b/spec/controllers/autocomplete_controller_spec.rb @@ -109,15 +109,17 @@ describe AutocompleteController do end context 'limited users per page' do - let(:per_page) { 2 } - before do + 25.times do + create(:user) + end + sign_in(user) - get(:users, per_page: per_page) + get(:users) end it { expect(json_response).to be_kind_of(Array) } - it { expect(json_response.size).to eq(per_page) } + it { expect(json_response.size).to eq(20) } end context 'unauthenticated user' do diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb index 79bbc29e80d..4770e187db6 100644 --- a/spec/controllers/boards/issues_controller_spec.rb +++ b/spec/controllers/boards/issues_controller_spec.rb @@ -86,6 +86,7 @@ describe Boards::IssuesController do context 'with unauthorized user' do before do + allow(Ability).to receive(:allowed?).and_call_original allow(Ability).to receive(:allowed?).with(user, :read_project, project).and_return(true) allow(Ability).to receive(:allowed?).with(user, :read_issue, project).and_return(false) end diff --git a/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb b/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb new file mode 100644 index 00000000000..27f558e1b5d --- /dev/null +++ b/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb @@ -0,0 +1,146 @@ +require 'spec_helper' + +describe ControllerWithCrossProjectAccessCheck do + let(:user) { create(:user) } + + before do + sign_in user + end + + render_views + + context 'When reading cross project is not allowed' do + before do + allow(Ability).to receive(:allowed).and_call_original + allow(Ability).to receive(:allowed?) + .with(user, :read_cross_project, :global) + .and_return(false) + end + + describe '#requires_cross_project_access' do + controller(ApplicationController) do + # `described_class` is not available in this context + include ControllerWithCrossProjectAccessCheck # rubocop:disable RSpec/DescribedClass + + requires_cross_project_access :index, show: false, + unless: -> { unless_condition }, + if: -> { if_condition } + + def index + render nothing: true + end + + def show + render nothing: true + end + + def unless_condition + false + end + + def if_condition + true + end + end + + it 'renders a 404 with trying to access a cross project page' do + message = "This page is unavailable because you are not allowed to read "\ + "information across multiple projects." + + get :index + + expect(response).to have_gitlab_http_status(404) + expect(response.body).to match(/#{message}/) + end + + it 'is skipped when the `if` condition returns false' do + expect(controller).to receive(:if_condition).and_return(false) + + get :index + + expect(response).to have_gitlab_http_status(200) + end + + it 'is skipped when the `unless` condition returns true' do + expect(controller).to receive(:unless_condition).and_return(true) + + get :index + + expect(response).to have_gitlab_http_status(200) + end + + it 'correctly renders an action that does not require cross project access' do + get :show, id: 'nothing' + + expect(response).to have_gitlab_http_status(200) + end + end + + describe '#skip_cross_project_access_check' do + controller(ApplicationController) do + # `described_class` is not available in this context + include ControllerWithCrossProjectAccessCheck # rubocop:disable RSpec/DescribedClass + + requires_cross_project_access + + skip_cross_project_access_check index: true, show: false, + unless: -> { unless_condition }, + if: -> { if_condition } + + def index + render nothing: true + end + + def show + render nothing: true + end + + def edit + render nothing: true + end + + def unless_condition + false + end + + def if_condition + true + end + end + + it 'renders a success when the check is skipped' do + get :index + + expect(response).to have_gitlab_http_status(200) + end + + it 'is executed when the `if` condition returns false' do + expect(controller).to receive(:if_condition).and_return(false) + + get :index + + expect(response).to have_gitlab_http_status(404) + end + + it 'is executed when the `unless` condition returns true' do + expect(controller).to receive(:unless_condition).and_return(true) + + get :index + + expect(response).to have_gitlab_http_status(404) + end + + it 'does not skip the check on an action that is not skipped' do + get :show, id: 'hello' + + expect(response).to have_gitlab_http_status(404) + end + + it 'does not skip the check on an action that was not defined to skip' do + get :edit, id: 'hello' + + expect(response).to have_gitlab_http_status(404) + end + end + end +end diff --git a/spec/controllers/concerns/issuable_collections_spec.rb b/spec/controllers/concerns/issuable_collections_spec.rb index d7825364ed5..c1f42bbb9d7 100644 --- a/spec/controllers/concerns/issuable_collections_spec.rb +++ b/spec/controllers/concerns/issuable_collections_spec.rb @@ -8,6 +8,10 @@ describe IssuableCollections do def self.helper_method(name); end include IssuableCollections + + def finder_type + IssuesFinder + end end controller = klass.new diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb index da54aa9054c..185b6b4ce57 100644 --- a/spec/controllers/groups/labels_controller_spec.rb +++ b/spec/controllers/groups/labels_controller_spec.rb @@ -1,8 +1,9 @@ require 'spec_helper' describe Groups::LabelsController do - let(:group) { create(:group) } - let(:user) { create(:user) } + set(:group) { create(:group) } + set(:user) { create(:user) } + set(:project) { create(:project, namespace: group) } before do group.add_owner(user) @@ -10,6 +11,34 @@ describe Groups::LabelsController do sign_in(user) end + describe 'GET #index' do + set(:label_1) { create(:label, project: project, title: 'label_1') } + set(:group_label_1) { create(:group_label, group: group, title: 'group_label_1') } + + it 'returns group and project labels by default' do + get :index, group_id: group, format: :json + + label_ids = json_response.map {|label| label['title']} + expect(label_ids).to match_array([label_1.title, group_label_1.title]) + end + + context 'with ancestor group', :nested_groups do + set(:subgroup) { create(:group, parent: group) } + set(:subgroup_label_1) { create(:group_label, group: subgroup, title: 'subgroup_label_1') } + + before do + subgroup.add_owner(user) + end + + it 'returns ancestor group labels', :nested_groups do + get :index, group_id: subgroup, include_ancestor_groups: true, only_group_labels: true, format: :json + + label_ids = json_response.map {|label| label['title']} + expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title]) + end + end + end + describe 'POST #toggle_subscription' do it 'allows user to toggle subscription on group labels' do label = create(:group_label, group: group) diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb index f75048f422c..21d59c62613 100644 --- a/spec/controllers/help_controller_spec.rb +++ b/spec/controllers/help_controller_spec.rb @@ -68,7 +68,7 @@ describe HelpController do context 'when requested file exists' do it 'renders the raw file' do get :show, - path: 'user/project/img/labels_filter', + path: 'user/project/img/labels_default', format: :png expect(response).to be_success expect(response.content_type).to eq 'image/png' diff --git a/spec/controllers/import/bitbucket_controller_spec.rb b/spec/controllers/import/bitbucket_controller_spec.rb index e8707760a5a..2be46049aab 100644 --- a/spec/controllers/import/bitbucket_controller_spec.rb +++ b/spec/controllers/import/bitbucket_controller_spec.rb @@ -84,20 +84,42 @@ describe Import::BitbucketController do double(slug: "vim", owner: bitbucket_username, name: 'vim') end + let(:project) { create(:project) } + before do allow_any_instance_of(Bitbucket::Client).to receive(:repo).and_return(bitbucket_repo) allow_any_instance_of(Bitbucket::Client).to receive(:user).and_return(bitbucket_user) assign_session_tokens end + it 'returns 200 response when the project is imported successfully' do + allow(Gitlab::BitbucketImport::ProjectCreator) + .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params) + .and_return(double(execute: project)) + + post :create, format: :json + + expect(response).to have_gitlab_http_status(200) + end + + it 'returns 422 response when the project could not be imported' do + allow(Gitlab::BitbucketImport::ProjectCreator) + .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params) + .and_return(double(execute: build(:project))) + + post :create, format: :json + + expect(response).to have_gitlab_http_status(422) + end + context "when the repository owner is the Bitbucket user" do context "when the Bitbucket user and GitLab user's usernames match" do it "takes the current user's namespace" do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -107,9 +129,9 @@ describe Import::BitbucketController do it "takes the current user's namespace" do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -120,7 +142,7 @@ describe Import::BitbucketController do allow(controller).to receive(:current_user).and_return(user) allow(user).to receive(:can?).and_return(false) - post :create, format: :js + post :create, format: :json end end end @@ -143,9 +165,9 @@ describe Import::BitbucketController do it "takes the existing namespace" do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, existing_namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -154,7 +176,7 @@ describe Import::BitbucketController do expect(Gitlab::BitbucketImport::ProjectCreator) .not_to receive(:new) - post :create, format: :js + post :create, format: :json end end end @@ -163,17 +185,17 @@ describe Import::BitbucketController do context "when current user can create namespaces" do it "creates the namespace" do expect(Gitlab::BitbucketImport::ProjectCreator) - .to receive(:new).and_return(double(execute: true)) + .to receive(:new).and_return(double(execute: project)) - expect { post :create, format: :js }.to change(Namespace, :count).by(1) + expect { post :create, format: :json }.to change(Namespace, :count).by(1) end it "takes the new namespace" do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, an_instance_of(Group), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -184,23 +206,23 @@ describe Import::BitbucketController do it "doesn't create the namespace" do expect(Gitlab::BitbucketImport::ProjectCreator) - .to receive(:new).and_return(double(execute: true)) + .to receive(:new).and_return(double(execute: project)) - expect { post :create, format: :js }.not_to change(Namespace, :count) + expect { post :create, format: :json }.not_to change(Namespace, :count) end it "takes the current user's namespace" do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end end end - context 'user has chosen an existing nested namespace and name for the project' do + context 'user has chosen an existing nested namespace and name for the project', :postgresql do let(:parent_namespace) { create(:group, name: 'foo', owner: user) } let(:nested_namespace) { create(:group, name: 'bar', parent: parent_namespace) } let(:test_name) { 'test_name' } @@ -212,63 +234,77 @@ describe Import::BitbucketController do it 'takes the selected namespace and name' do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, test_name, nested_namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js } + post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :json } end end - context 'user has chosen a non-existent nested namespaces and name for the project' do + context 'user has chosen a non-existent nested namespaces and name for the project', :postgresql do let(:test_name) { 'test_name' } it 'takes the selected namespace and name' do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } end it 'creates the namespaces' do allow(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } } + expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } } .to change { Namespace.count }.by(2) end it 'new namespace has the right parent' do allow(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo') end end - context 'user has chosen existent and non-existent nested namespaces and name for the project' do + context 'user has chosen existent and non-existent nested namespaces and name for the project', :postgresql do let(:test_name) { 'test_name' } let!(:parent_namespace) { create(:group, name: 'foo', owner: user) } + before do + parent_namespace.add_owner(user) + end + it 'takes the selected namespace and name' do expect(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } + post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json } end it 'creates the namespaces' do allow(Gitlab::BitbucketImport::ProjectCreator) .to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } } + expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json } } .to change { Namespace.count }.by(2) end end + + context 'when user can not create projects in the chosen namespace' do + it 'returns 422 response' do + other_namespace = create(:group, name: 'other_namespace') + + post :create, { target_namespace: other_namespace.name, format: :json } + + expect(response).to have_gitlab_http_status(422) + end + end end end diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb index faf1e6f63ea..e958be077c2 100644 --- a/spec/controllers/import/gitlab_controller_spec.rb +++ b/spec/controllers/import/gitlab_controller_spec.rb @@ -57,6 +57,7 @@ describe Import::GitlabController do end describe "POST create" do + let(:project) { create(:project) } let(:gitlab_username) { user.username } let(:gitlab_user) do { username: gitlab_username }.with_indifferent_access @@ -75,14 +76,34 @@ describe Import::GitlabController do assign_session_token end + it 'returns 200 response when the project is imported successfully' do + allow(Gitlab::GitlabImport::ProjectCreator) + .to receive(:new).with(gitlab_repo, user.namespace, user, access_params) + .and_return(double(execute: project)) + + post :create, format: :json + + expect(response).to have_gitlab_http_status(200) + end + + it 'returns 422 response when the project could not be imported' do + allow(Gitlab::GitlabImport::ProjectCreator) + .to receive(:new).with(gitlab_repo, user.namespace, user, access_params) + .and_return(double(execute: build(:project))) + + post :create, format: :json + + expect(response).to have_gitlab_http_status(422) + end + context "when the repository owner is the GitLab.com user" do context "when the GitLab.com user and GitLab server user's usernames match" do it "takes the current user's namespace" do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, user.namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -92,9 +113,9 @@ describe Import::GitlabController do it "takes the current user's namespace" do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, user.namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end end @@ -118,9 +139,9 @@ describe Import::GitlabController do it "takes the existing namespace" do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, existing_namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -129,7 +150,7 @@ describe Import::GitlabController do expect(Gitlab::GitlabImport::ProjectCreator) .not_to receive(:new) - post :create, format: :js + post :create, format: :json end end end @@ -138,17 +159,17 @@ describe Import::GitlabController do context "when current user can create namespaces" do it "creates the namespace" do expect(Gitlab::GitlabImport::ProjectCreator) - .to receive(:new).and_return(double(execute: true)) + .to receive(:new).and_return(double(execute: project)) - expect { post :create, format: :js }.to change(Namespace, :count).by(1) + expect { post :create, format: :json }.to change(Namespace, :count).by(1) end it "takes the new namespace" do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, an_instance_of(Group), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -159,22 +180,22 @@ describe Import::GitlabController do it "doesn't create the namespace" do expect(Gitlab::GitlabImport::ProjectCreator) - .to receive(:new).and_return(double(execute: true)) + .to receive(:new).and_return(double(execute: project)) - expect { post :create, format: :js }.not_to change(Namespace, :count) + expect { post :create, format: :json }.not_to change(Namespace, :count) end it "takes the current user's namespace" do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, user.namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end end - context 'user has chosen an existing nested namespace for the project' do + context 'user has chosen an existing nested namespace for the project', :postgresql do let(:parent_namespace) { create(:group, name: 'foo', owner: user) } let(:nested_namespace) { create(:group, name: 'bar', parent: parent_namespace) } @@ -185,64 +206,78 @@ describe Import::GitlabController do it 'takes the selected namespace and name' do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, nested_namespace, user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: nested_namespace.full_path, format: :js } + post :create, { target_namespace: nested_namespace.full_path, format: :json } end end - context 'user has chosen a non-existent nested namespaces for the project' do + context 'user has chosen a non-existent nested namespaces for the project', :postgresql do let(:test_name) { 'test_name' } it 'takes the selected namespace and name' do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/bar', format: :js } + post :create, { target_namespace: 'foo/bar', format: :json } end it 'creates the namespaces' do allow(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - expect { post :create, { target_namespace: 'foo/bar', format: :js } } + expect { post :create, { target_namespace: 'foo/bar', format: :json } } .to change { Namespace.count }.by(2) end it 'new namespace has the right parent' do allow(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/bar', format: :js } + post :create, { target_namespace: 'foo/bar', format: :json } expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo') end end - context 'user has chosen existent and non-existent nested namespaces and name for the project' do + context 'user has chosen existent and non-existent nested namespaces and name for the project', :postgresql do let(:test_name) { 'test_name' } let!(:parent_namespace) { create(:group, name: 'foo', owner: user) } + before do + parent_namespace.add_owner(user) + end + it 'takes the selected namespace and name' do expect(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/foobar/bar', format: :js } + post :create, { target_namespace: 'foo/foobar/bar', format: :json } end it 'creates the namespaces' do allow(Gitlab::GitlabImport::ProjectCreator) .to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - expect { post :create, { target_namespace: 'foo/foobar/bar', format: :js } } + expect { post :create, { target_namespace: 'foo/foobar/bar', format: :json } } .to change { Namespace.count }.by(2) end end + + context 'when user can not create projects in the chosen namespace' do + it 'returns 422 response' do + other_namespace = create(:group, name: 'other_namespace') + + post :create, { target_namespace: other_namespace.name, format: :json } + + expect(response).to have_gitlab_http_status(422) + end + end end end end diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb index 004b463e745..149b690ff70 100644 --- a/spec/controllers/oauth/authorizations_controller_spec.rb +++ b/spec/controllers/oauth/authorizations_controller_spec.rb @@ -34,6 +34,8 @@ describe Oauth::AuthorizationsController do end context 'with valid params' do + render_views + it 'returns 200 code and renders view' do get :new, params diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb index d380978b86e..03cbbb21e62 100644 --- a/spec/controllers/profiles_controller_spec.rb +++ b/spec/controllers/profiles_controller_spec.rb @@ -69,9 +69,8 @@ describe ProfilesController, :request_store do describe 'PUT update_username' do let(:namespace) { user.namespace } - let(:project) { create(:project_empty_repo, namespace: namespace) } let(:gitlab_shell) { Gitlab::Shell.new } - let(:new_username) { 'renamedtosomethingelse' } + let(:new_username) { generate(:username) } it 'allows username change' do sign_in(user) @@ -85,16 +84,39 @@ describe ProfilesController, :request_store do expect(user.username).to eq(new_username) end - it 'moves dependent projects to new namespace' do - sign_in(user) + context 'with legacy storage' do + it 'moves dependent projects to new namespace' do + project = create(:project_empty_repo, :legacy_storage, namespace: namespace) - put :update_username, - user: { username: new_username } + sign_in(user) - user.reload + put :update_username, + user: { username: new_username } - expect(response.status).to eq(302) - expect(gitlab_shell.exists?(project.repository_storage_path, "#{new_username}/#{project.path}.git")).to be_truthy + user.reload + + expect(response.status).to eq(302) + expect(gitlab_shell.exists?(project.repository_storage_path, "#{new_username}/#{project.path}.git")).to be_truthy + end + end + + context 'with hashed storage' do + it 'keeps repository location unchanged on disk' do + project = create(:project_empty_repo, namespace: namespace) + + before_disk_path = project.disk_path + + sign_in(user) + + put :update_username, + user: { username: new_username } + + user.reload + + expect(response.status).to eq(302) + expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy + expect(before_disk_path).to eq(project.disk_path) + end end end end diff --git a/spec/controllers/projects/clusters/gcp_controller_spec.rb b/spec/controllers/projects/clusters/gcp_controller_spec.rb index 775f9db1c6e..e14ba29fa70 100644 --- a/spec/controllers/projects/clusters/gcp_controller_spec.rb +++ b/spec/controllers/projects/clusters/gcp_controller_spec.rb @@ -161,7 +161,7 @@ describe Projects::Clusters::GcpController do it 'renders the cluster form with an error' do go - expect(response).to set_flash[:alert] + expect(response).to set_flash.now[:alert] expect(response).to render_template('new') end end diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb index 954fc79f57d..15ce418d0d6 100644 --- a/spec/controllers/projects/clusters_controller_spec.rb +++ b/spec/controllers/projects/clusters_controller_spec.rb @@ -91,6 +91,12 @@ describe Projects::ClustersController do expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('cluster_status') end + + it 'invokes schedule_status_update on each application' do + expect_any_instance_of(Clusters::Applications::Ingress).to receive(:schedule_status_update) + + go + end end describe 'security' do diff --git a/spec/controllers/projects/cycle_analytics_controller_spec.rb b/spec/controllers/projects/cycle_analytics_controller_spec.rb index 7c708a418a7..5516c95d044 100644 --- a/spec/controllers/projects/cycle_analytics_controller_spec.rb +++ b/spec/controllers/projects/cycle_analytics_controller_spec.rb @@ -27,7 +27,7 @@ describe Projects::CycleAnalyticsController do milestone = create(:milestone, project: project, created_at: 5.days.ago) issue.update(milestone: milestone) - create_merge_request_closing_issue(issue) + create_merge_request_closing_issue(user, project, issue) end it 'is false' do diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb index 00328d3ea51..fcb0c2f28c8 100644 --- a/spec/controllers/projects/discussions_controller_spec.rb +++ b/spec/controllers/projects/discussions_controller_spec.rb @@ -71,6 +71,19 @@ describe Projects::DiscussionsController do expect(response).to have_gitlab_http_status(200) end + + context "when vue_mr_discussions cookie is present" do + before do + allow(controller).to receive(:cookies).and_return(vue_mr_discussions: 'true') + end + + it "renders discussion with serializer" do + expect_any_instance_of(DiscussionSerializer).to receive(:represent) + .with(instance_of(Discussion), { context: instance_of(described_class) }) + + post :resolve, request_params + end + end end end end @@ -119,6 +132,19 @@ describe Projects::DiscussionsController do expect(response).to have_gitlab_http_status(200) end + + context "when vue_mr_discussions cookie is present" do + before do + allow(controller).to receive(:cookies).and_return({ vue_mr_discussions: 'true' }) + end + + it "renders discussion with serializer" do + expect_any_instance_of(DiscussionSerializer).to receive(:represent) + .with(instance_of(Discussion), { context: instance_of(described_class) }) + + delete :unresolve, request_params + end + end end end end diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index 9656e7f7e74..9918d52e402 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -974,7 +974,7 @@ describe Projects::IssuesController do it 'returns discussion json' do get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid - expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes individual_note]) + expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion individual_note resolvable resolve_with_issue_path resolved]) end context 'with cross-reference system note', :request_store do diff --git a/spec/controllers/projects/merge_requests/creations_controller_spec.rb b/spec/controllers/projects/merge_requests/creations_controller_spec.rb index 92db7284e0e..24310b847e8 100644 --- a/spec/controllers/projects/merge_requests/creations_controller_spec.rb +++ b/spec/controllers/projects/merge_requests/creations_controller_spec.rb @@ -17,7 +17,7 @@ describe Projects::MergeRequests::CreationsController do before do fork_project.add_master(user) - + Projects::ForkService.new(project, user).execute(fork_project) sign_in(user) end @@ -125,4 +125,66 @@ describe Projects::MergeRequests::CreationsController do end end end + + describe 'GET #branch_to' do + before do + allow(Ability).to receive(:allowed?).and_call_original + end + + it 'fetches the commit if a user has access' do + expect(Ability).to receive(:allowed?).with(user, :read_project, project) { true } + + get :branch_to, + namespace_id: fork_project.namespace, + project_id: fork_project, + target_project_id: project.id, + ref: 'master' + + expect(assigns(:commit)).not_to be_nil + expect(response).to have_gitlab_http_status(200) + end + + it 'does not load the commit when the user cannot read the project' do + expect(Ability).to receive(:allowed?).with(user, :read_project, project) { false } + + get :branch_to, + namespace_id: fork_project.namespace, + project_id: fork_project, + target_project_id: project.id, + ref: 'master' + + expect(assigns(:commit)).to be_nil + expect(response).to have_gitlab_http_status(200) + end + end + + describe 'GET #update_branches' do + before do + allow(Ability).to receive(:allowed?).and_call_original + end + + it 'lists the branches of another fork if the user has access' do + expect(Ability).to receive(:allowed?).with(user, :read_project, project) { true } + + get :update_branches, + namespace_id: fork_project.namespace, + project_id: fork_project, + target_project_id: project.id + + expect(assigns(:target_branches)).not_to be_empty + expect(response).to have_gitlab_http_status(200) + end + + it 'does not list branches when the user cannot read the project' do + expect(Ability).to receive(:allowed?).with(user, :read_project, project) { false } + + get :update_branches, + namespace_id: fork_project.namespace, + project_id: fork_project, + target_project_id: project.id + + expect(response).to have_gitlab_http_status(200) + expect(assigns(:target_branches)).to eq([]) + end + end end diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb index e9e7d357d9c..83a3799e883 100644 --- a/spec/controllers/projects/pages_domains_controller_spec.rb +++ b/spec/controllers/projects/pages_domains_controller_spec.rb @@ -46,8 +46,107 @@ describe Projects::PagesDomainsController do post(:create, request_params.merge(pages_domain: pages_domain_params)) end.to change { PagesDomain.count }.by(1) + created_domain = PagesDomain.reorder(:id).last + + expect(created_domain).to be_present + expect(response).to redirect_to(project_pages_domain_path(project, created_domain)) + end + end + + describe 'GET edit' do + it "displays the 'edit' page" do + get(:edit, request_params.merge(id: pages_domain.domain)) + + expect(response).to have_gitlab_http_status(200) + expect(response).to render_template('edit') + end + end + + describe 'PATCH update' do + before do + controller.instance_variable_set(:@domain, pages_domain) + end + + let(:pages_domain_params) do + attributes_for(:pages_domain, :with_certificate, :with_key).slice(:key, :certificate) + end + + let(:params) do + request_params.merge(id: pages_domain.domain, pages_domain: pages_domain_params) + end + + it 'updates the domain' do + expect(pages_domain) + .to receive(:update) + .with(pages_domain_params) + .and_return(true) + + patch(:update, params) + end + + it 'redirects to the project page' do + patch(:update, params) + + expect(flash[:notice]).to eq 'Domain was updated' expect(response).to redirect_to(project_pages_path(project)) end + + context 'the domain is invalid' do + it 'renders the edit action' do + allow(pages_domain).to receive(:update).and_return(false) + + patch(:update, params) + + expect(response).to render_template('edit') + end + end + + context 'the parameters include the domain' do + it 'renders 400 Bad Request' do + expect(pages_domain) + .to receive(:update) + .with(hash_not_including(:domain)) + .and_return(true) + + patch(:update, params.deep_merge(pages_domain: { domain: 'abc' })) + end + end + end + + describe 'POST verify' do + let(:params) { request_params.merge(id: pages_domain.domain) } + + def stub_service + service = double(:service) + + expect(VerifyPagesDomainService).to receive(:new) { service } + + service + end + + it 'handles verification success' do + expect(stub_service).to receive(:execute).and_return(status: :success) + + post :verify, params + + expect(response).to redirect_to project_pages_domain_path(project, pages_domain) + expect(flash[:notice]).to eq('Successfully verified domain ownership') + end + + it 'handles verification failure' do + expect(stub_service).to receive(:execute).and_return(status: :failed) + + post :verify, params + + expect(response).to redirect_to project_pages_domain_path(project, pages_domain) + expect(flash[:alert]).to eq('Failed to verify domain ownership') + end + + it 'returns a 404 response for an unknown domain' do + post :verify, request_params.merge(id: 'unknown-domain') + + expect(response).to have_gitlab_http_status(404) + end end describe 'DELETE destroy' do diff --git a/spec/controllers/projects/prometheus_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb index bbfe78d305a..f17f819feee 100644 --- a/spec/controllers/projects/prometheus_controller_spec.rb +++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb @@ -1,20 +1,20 @@ -require('spec_helper') +require 'spec_helper' -describe Projects::PrometheusController do +describe Projects::Prometheus::MetricsController do let(:user) { create(:user) } - let!(:project) { create(:project) } + let(:project) { create(:project) } let(:prometheus_service) { double('prometheus_service') } before do allow(controller).to receive(:project).and_return(project) - allow(project).to receive(:prometheus_service).and_return(prometheus_service) + allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return(prometheus_service) project.add_master(user) sign_in(user) end - describe 'GET #active_metrics' do + describe 'GET #active_common' do context 'when prometheus metrics are enabled' do context 'when data is not present' do before do @@ -22,7 +22,7 @@ describe Projects::PrometheusController do end it 'returns no content response' do - get :active_metrics, project_params(format: :json) + get :active_common, project_params(format: :json) expect(response).to have_gitlab_http_status(204) end @@ -36,7 +36,7 @@ describe Projects::PrometheusController do end it 'returns no content response' do - get :active_metrics, project_params(format: :json) + get :active_common, project_params(format: :json) expect(response).to have_gitlab_http_status(200) expect(json_response).to eq(sample_response.deep_stringify_keys) @@ -45,7 +45,7 @@ describe Projects::PrometheusController do context 'when requesting non json response' do it 'returns not found response' do - get :active_metrics, project_params + get :active_common, project_params expect(response).to have_gitlab_http_status(404) end diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb index d572085661d..eca9baed9c9 100644 --- a/spec/controllers/projects/uploads_controller_spec.rb +++ b/spec/controllers/projects/uploads_controller_spec.rb @@ -7,4 +7,12 @@ describe Projects::UploadsController do end it_behaves_like 'handle uploads' + + context 'when the URL the old style, without /-/system' do + it 'responds with a redirect to the login page' do + get :show, namespace_id: 'project', project_id: 'avatar', filename: 'foo.png', secret: 'bar' + + expect(response).to redirect_to(new_user_session_path) + end + end end diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb index 5202ffdd8bb..994da3cd159 100644 --- a/spec/controllers/projects_controller_spec.rb +++ b/spec/controllers/projects_controller_spec.rb @@ -288,62 +288,82 @@ describe ProjectsController do render_views let(:admin) { create(:admin) } - let(:project) { create(:project, :repository) } before do sign_in(admin) end - context 'when only renaming a project path' do - it "sets the repository to the right path after a rename" do - expect { update_project path: 'renamed_path' } - .to change { project.reload.path } + shared_examples_for 'updating a project' do + context 'when only renaming a project path' do + it "sets the repository to the right path after a rename" do + original_repository_path = project.repository.path - expect(project.path).to include 'renamed_path' - expect(assigns(:repository).path).to include project.path - expect(response).to have_gitlab_http_status(302) - end - end + expect { update_project path: 'renamed_path' } + .to change { project.reload.path } + expect(project.path).to include 'renamed_path' - context 'when project has container repositories with tags' do - before do - stub_container_registry_config(enabled: true) - stub_container_registry_tags(repository: /image/, tags: %w[rc1]) - create(:container_repository, project: project, name: :image) + if project.hashed_storage?(:repository) + expect(assigns(:repository).path).to eq(original_repository_path) + else + expect(assigns(:repository).path).to include(project.path) + end + + expect(response).to have_gitlab_http_status(302) + end end - it 'does not allow to rename the project' do - expect { update_project path: 'renamed_path' } - .not_to change { project.reload.path } + context 'when project has container repositories with tags' do + before do + stub_container_registry_config(enabled: true) + stub_container_registry_tags(repository: /image/, tags: %w[rc1]) + create(:container_repository, project: project, name: :image) + end - expect(controller).to set_flash[:alert].to(/container registry tags/) - expect(response).to have_gitlab_http_status(200) + it 'does not allow to rename the project' do + expect { update_project path: 'renamed_path' } + .not_to change { project.reload.path } + + expect(controller).to set_flash[:alert].to(/container registry tags/) + expect(response).to have_gitlab_http_status(200) + end end - end - it 'updates Fast Forward Merge attributes' do - controller.instance_variable_set(:@project, project) + it 'updates Fast Forward Merge attributes' do + controller.instance_variable_set(:@project, project) - params = { - merge_method: :ff - } + params = { + merge_method: :ff + } - put :update, - namespace_id: project.namespace, - id: project.id, - project: params + put :update, + namespace_id: project.namespace, + id: project.id, + project: params - expect(response).to have_gitlab_http_status(302) - params.each do |param, value| - expect(project.public_send(param)).to eq(value) + expect(response).to have_gitlab_http_status(302) + params.each do |param, value| + expect(project.public_send(param)).to eq(value) + end + end + + def update_project(**parameters) + put :update, + namespace_id: project.namespace.path, + id: project.path, + project: parameters end end - def update_project(**parameters) - put :update, - namespace_id: project.namespace.path, - id: project.path, - project: parameters + context 'hashed storage' do + let(:project) { create(:project, :repository) } + + it_behaves_like 'updating a project' + end + + context 'legacy storage' do + let(:project) { create(:project, :repository, :legacy_storage) } + + it_behaves_like 'updating a project' end end diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb index 37f961d0c94..30c06ddf744 100644 --- a/spec/controllers/search_controller_spec.rb +++ b/spec/controllers/search_controller_spec.rb @@ -16,6 +16,32 @@ describe SearchController do expect(assigns[:search_objects].first).to eq note end + context 'when the user cannot read cross project' do + before do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?) + .with(user, :read_cross_project, :global) { false } + end + + it 'still allows accessing the search page' do + get :show + + expect(response).to have_gitlab_http_status(200) + end + + it 'still blocks searches without a project_id' do + get :show, search: 'hello' + + expect(response).to have_gitlab_http_status(404) + end + + it 'allows searches with a project_id' do + get :show, search: 'hello', project_id: create(:project, :public).id + + expect(response).to have_gitlab_http_status(200) + end + end + context 'on restricted projects' do context 'when signed out' do before do diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb index 2898c4b119e..b0acf4a49ac 100644 --- a/spec/controllers/users_controller_spec.rb +++ b/spec/controllers/users_controller_spec.rb @@ -74,6 +74,31 @@ describe UsersController do end end end + + context 'json with events' do + let(:project) { create(:project) } + before do + project.add_developer(user) + Gitlab::DataBuilder::Push.build_sample(project, user) + + sign_in(user) + end + + it 'loads events' do + get :show, username: user, format: :json + + expect(assigns(:events)).not_to be_empty + end + + it 'hides events if the user cannot read cross project' do + allow(Ability).to receive(:allowed?).and_call_original + expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + + get :show, username: user, format: :json + + expect(assigns(:events)).to be_empty + end + end end describe 'GET #calendar' do diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb index 6ba599cdf83..f6ba3a581ca 100644 --- a/spec/factories/ci/builds.rb +++ b/spec/factories/ci/builds.rb @@ -180,8 +180,8 @@ FactoryBot.define do trait :artifacts do after(:create) do |build| - create(:ci_job_artifact, :archive, job: build) - create(:ci_job_artifact, :metadata, job: build) + create(:ci_job_artifact, :archive, job: build, expire_at: build.artifacts_expire_at) + create(:ci_job_artifact, :metadata, job: build, expire_at: build.artifacts_expire_at) build.reload end end diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb index 775fbb3d27b..3deca103578 100644 --- a/spec/factories/clusters/applications/helm.rb +++ b/spec/factories/clusters/applications/helm.rb @@ -34,5 +34,6 @@ FactoryBot.define do factory :clusters_applications_ingress, class: Clusters::Applications::Ingress factory :clusters_applications_prometheus, class: Clusters::Applications::Prometheus + factory :clusters_applications_runner, class: Clusters::Applications::Runner end end diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb index 23a98a899f1..3f0c60f32b7 100644 --- a/spec/factories/keys.rb +++ b/spec/factories/keys.rb @@ -22,38 +22,104 @@ FactoryBot.define do factory :rsa_key_2048 do key do <<~KEY.delete("\n") - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFf6RYK3qu/RKF/3ndJmL5xgMLp3O9 - 6x8lTay+QGZ0+9FnnAXMdUqBq/ZU6d/gyMB4IaW3nHzM1w049++yAB6UPCzMB8Uo27K5 - /jyZCtj7Vm9PFNjF/8am1kp46c/SeYicQgQaSBdzIW3UDEa1Ef68qroOlvpi9PYZ/tA7 - M0YP0K5PXX+E36zaIRnJVMPT3f2k+GnrxtjafZrwFdpOP/Fol5BQLBgcsyiU+LM1SuaC - rzd8c9vyaTA1CxrkxaZh+buAi0PmdDtaDrHd42gqZkXCKavyvgM5o2CkQ5LJHCgzpXy0 - 5qNFzmThBSkb+XtoxbyagBiGbVZtSVow6Xa7qewz= dummy@gitlab.com + ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC98dbu7gxcbmAvwMqz/6AALhSr1jiX + G0UC8FQMvoDt+ciB+uSJhg7KlxinKjYJnPGfhX+q2K+mmCGAmI/D6q7rFxE+bn09O+75 + qgkTHi+suDVE6KG7L3n0alGd/qSevfomR77Snh6fQPdG6sEAZz3kehcpfVnq5/IuLFq9 + FBrgmu52Jd4XZLQZKkDq6zYOJ69FUkGf93LZIV/OOaS+f+qkOGPCUkdKl7oEcgpVNY9S + RjBCduXnvi2CyQnnJVkBguGL5VlXwFXH+17Whs7oFWmdiG+4jzBRLIMz4EuIW09b8Su5 + PW6+bBuXOifHA8KG5TMmjs5LYdCMPFnhTyDyO3a1 dummy@gitlab.com KEY end factory :rsa_deploy_key_2048, class: 'DeployKey' end + factory :rsa_key_4096 do + key do + <<~KEY.delete("\n") + ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDGSD77lLtjmzewiBs6nu2R5nu6oNkrA + kH/0co1fHHosKfRr+sWkSTKXOVcL7bhRu+tniGBmB5pn+i1qX7BXtrcnv//bCXWIp+me0 + 27L4RJa5/Ep077iiTJlzTpcV664xNUXC8mzBr601HR/Z2TzX5DWJvnyqqFkN7qHTYo/+I + oKECnKqNzI5SQrAxgi6sbWA5DFQ/nwcqsUSBo5gCCJ/0QPrR19yVV5lJA19EY2LawOb1S + JNOFo4mQupSlBZwvERZJ7IqhBTPtQIfrqqz5VJbI13jK3ViZTugIZqydWAhosUyejP3Sd + Cj1KMexrvV95tjUtmhVFlph4tKThQO0p9pXKZNCzYsbQTye6O6Hk2rojOJLyFWqNBVKtI + 8Ymfu7OQWppRnuUFuhuuS515H1s888bZFMPsC74mPyo0Y7Q9wAoTnQ9Hw6b0J6OfY3PIR + VphaCmxh6b7dgSPFdD7TA6j0xk6PCTOIEzBKuc85B3GQc8Nt4sTv6fW8lGeuYWqepW74i + geC4qB6U3/3+p3nPdq/bTM1txrhnQsl1r4dv6TLZ51EtHp6sXayp0qd0pRaiavebXFC0i + aETLraQpye4FWbBL/8xTjQ/0VPrYVuUCDvDSMIIS3/9g7Kp7ERUDC9jUqOVonm4pTXL9i + ItiUBlK7Mob9C4fQIRFnVR00DCmkmVgw== dummy@gitlab.com + KEY + end + end + + factory :rsa_key_5120 do + key do + <<~KEY.delete("\n") + ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACgQDxnZP0TucLH3zcrvt75DPNq+xKqOmJk + CEzTytKq4S5MDH0nlx+xOZ9WykhwDHXU0iZBJF7yRdLkZweYDJVKnBzr4t7QP5Sw2/ZdL + elvUMWGJjuz28x8Z+8NZ+IxL/exDz7itrhCsLupQhGO1obiIwf8xVzzPoxrQ9dxaN4x96 + 5N+QdQcld8O6xfpSE0p5Y3sRn3kp57aHWoNa/bUGZy0OHLr/ig0uc6EKyWsTmEESOgDyV + 94wOyHR0KNGEENyxQt4BwAbEBn3Y41HKqD358KKh+XjbECebrrBFigdDL/eYFIUlstJ07 + SK/HtYjZbiUZCPs8bJA+SBaLK0pGGqguM2LXRoMeMUZFwKKKS2LpRqjKGj3Qt7qMnp1Sk + VhiMnxNqL4nJnDOOVo07xDIPKqIBYO67/cp4Icv3IjKxy6K3EIpLr+iRCxcllpDogxolz + FC+pEDVpmEvcrGEv1ON6HcCdk/6Q8Iekr8rYDHpKCU5FF2uBHkqq7yNJ1/+NFC4dgyOo0 + xCVL4D3DvDKNxFYkrzW4ICt0f5XcMnU10yS/OFXz8JwA3jvuLvMRe5JdFiIjb/l86+TgY + yvK8Y8N/UWgSgyjXUCv8nxdvpsxdz5h7HBF8E2DIxCVMC23655e5rp5eJW9EU9X5YFZc3 + u6uWJ1f1aO+1ViTtqkPrqxovNDD+gVel8Ny6MJ4MvmDKY+eM8beNMSSf1n1Oyh/SvCffh + ZpUqrXdTr9qwZEOaC75T74AJ7KBl9VvO3vPLZuJrt38R2OZG/4SlNEUA6bb5TWQLtdor/ + qpPN5jAskkAUzOh5L/M+dmq2jNn03U9xwORCYPZj+fFM9bL99/0knsV0ypZDZyWH dummy@gitlab.com + KEY + end + end + + factory :rsa_key_8192 do + key do + <<~KEY.delete("\n") + ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAEAQC5jMyGtgMOVX4t2GuXkbirJA0Edr+ql + OH9grnRBPHPo0Npt6XE6ZN3J3hDULTQo03wmekGw42dxdNSgk+F0GjsUBrMLbqrk485MM + e0cUbP4lRXNu4ao87wPVM5fAsD4E3FQiZcI6Df011ZGIL7hGTHt6eafTfr9cJheRyYSu6 + g06rlnFWbbtSh9oQ7Y6sfDLBcsC9ECcXwe3mwViuQXPIVomZ02EdnBbAhbGHDtA+ZbSvT + fraxOMjkxkVvvdjLxXEykpwVuZf8eZ+R/Js8jQ5RKvTZMbfxJNsGEqHD32s43ml4VF549 + Qz2GJDXF7Cld/n3CT6wvw0mMPM0LnykL2v0CMr44bjIA3KsNEs5MhkcBO8sv5hGfcPhrp + m9WwI6gd9vdZVcxarVI+iQS947owvdn4VbEZXynCDqEEv3Zh+FA5p23mf2p7DkG/swiK/ + IPrjr1wmsiWmwIUsENzJNyJtibKuRsBawC4ZdL797tFilSoTzSpriegSL13joPXz3eOHC + Vu4ATHMo3QyLfIFbxrf9PQ79nyOpHoX2YeFXvei3xFkGMundkOqeI+pnJKDyqbiLV7UVl + clua11QWNQZf1ZUd0n1wZ1g89de+wl3oJSRbSA5ZpveZEPstcMC/JhogY4JBYsvCT1yHO + oNWHo90NZQsUCjNnR+/FVaACtpt2zcPTjjbXvxwCDlT3gXTmTBp/kEZq6u8p+BOlqFgxc + P/sdAR8jWTin3Iw/YAcbqNgRHdjMUzJBrPQ5NcK6xFcmkOEQahdJDZs98xozCHkD4Urx6 + +auTr/uqRYobKoNUNiYqN1n7/dfZjQJJVkHtKd06JTFx+7/SqyfrTKS+/EIf2Hypdy9r9 + IFR+SWAOi11N/wflS/ZbH95Qt3STifXRecmHzyYGkMOZ+mg3Hi2YU0yn7k+P1jy627xud + pT9Ak3HWT5ji8tMyn9udL7m80dYpUiEAxoYZdbSSNCDaKP4ViABnGIeZreIujabI8IdtE + IjFQTaF2d5HTYjp28/qf576CFP5L7AGydypipYqZUmsYnay5YVjdm89He3TMD71SwspJl + POC4RnM0HS87OE+U0+mVaIe8YYbcjTekpVU9mkqsE/GQ34Egw79VMNNgWq5avOzpT8msC + lTJxgfJ1agGgigTvGxUM0FB07+sIdJxxNymAGpLKZ1op8xaJI3o8D86jWgI22za1zxUB5 + il9U7+KOzaWo9mp3bmhvZWGDwzTXEZhUJYMRby7o6UxSHlA6fKE63JSDD2yhXk4CjsQRN + C7Ph9cYSB+Wa3i9Am4rRlJgrF79okmEOMpj1idliHkpIsy/k2CN9Lf2EIHOD4NMuLrSUH + 4qJsPUq19ZbGIMdImD3vMS5b dummy@gitlab.com + KEY + end + end + factory :dsa_key_2048 do key do <<~KEY.delete("\n") - ssh-dss AAAAB3NzaC1kc3MAAAEBAO/3/NPLA/zSFkMOCaTtGo+uos1flfQ5f038Uk+G - Y9AeLGzX+Srhw59GdVXmOQLYBrOt5HdGwqYcmLnE2VurUGmhtfeO5H+3p5pGJbkS0Gxp - YH1HRO9lWsncF3Hh1w4lYsDjkclDiSTdfTuN8F4Kb3DXNnVSCieeonp+B25F/CXagyTQ - /pvNmHFeYgGCVdnBtFdi+xfxaZ8NKdPrGggzokbKHElDZQ4Xo5EpdcyLajgM7nB2r2Rz - OrmeaevKi5lV68ehRa9Yyrb7vxvwiwBwOgqR/mnN7Gnaq1jUdmJY+ct04Qwx37f5jvhv - 5gA4U40SGMoiHM8RFIN7Ksz0jsyX73MAAAAVALRWOfjfzHpK7KLz4iqDvvTUAevJAAAB - AEa9NZ+6y9iQ5erGsdfLTXFrhSefTG0NhghoO/5IFkSGfd8V7kzTvCHaFrcfpEA5kP8t - poeOG0TASB6tgGOxm1Bq4Wncry5RORBPJlAVpDGRcvZ931ddH7IgltEInS6za2uH6F/1 - M1QfKePSLr6xJ1ZLYfP0Og5KTp1x6yMQvfwV0a+XdA+EPgaJWLWp/pWwKWa0oLUgjsIH - MYzuOGh5c708uZrmkzqvgtW2NgXhcIroRgynT3IfI2lP2rqqb3uuuE/qH5UCUFO+Dc3H - nAFNeQDT/M25AERdPYBAY5a+iPjIgO+jT7BfmfByT+AZTqZySrCyc7nNZL3YgGLK0l6A - 1GgAAAEBAN9FpFOdIXE+YEZhKl1vPmbcn+b1y5zOl6N4x1B7Q8pD/pLMziWROIS8uLzb - aZ0sMIWezHIkxuo1iROMeT+jtCubn7ragaN6AX7nMpxYUH9+mYZZs/fyElt6wCviVhTI - zM+u7VdQsnZttOOlQfogHdL+SpeAft0DsfJjlcgQnsLlHQKv6aPqCPYUST2nE7RyW/Ex - PrMxLtOWt0/j8RYHbwwqvyeZqBz3ESBgrS9c5tBdBfauwYUV/E7gPLOU3OZFw9ue7o+z - wzoTZqW6Xouy5wtWvSLQSLT5XwOslmQz8QMBxD0AQyDfEFGsBCWzmbTgKv9uqrBjubsS - Taja+Cf9kMo== dummy@gitlab.com + ssh-dss AAAAB3NzaC1kc3MAAAEBALEB3sM2kPy6LKLiyL+UlDx2vzuKrzSD2nsW2Kb7 + 0ivIqDNJu5CbqIQSkjdMzJiocs33ESFqXid6ezOtVdDwXHJQRxKGalW1kBbFAPjtMxlD + bf559+7qN2zfCfcQsgTmNAZ7O+wltqJmyLv5i4QqNwPDvyeBvJ4C+770DzlcQtpkflKJ + X+O7i8Ylq34h6UTCTnjry+dFVm1xz97LPf7XuzXGZcAG/eGUNQgxQ2bferKnrpYOXx6c + ocSRj9W54nrRFMWuDeOspWp4MoYK0FRMfDQYPksUayGUnm1KQTGuDbB0ahRNCOm8b3tf + P9Z+vjANAkqenzDuXCpz2PU/Oj6/N/UAAAAhAPOLyut12Mjcp3eUXLe1xSoI5IRXSLso + W9no93dcFNprAAABAQCLhpqKY+PNcwbhhPruL+f+uROghHzDwRNX+e231F4wHHeDDomf + WyLVFj31XrHdDXZnS9tTTj5D2XWLovSSxYb3H7earTctmktL0lQ3HapujzvOkn+VM0pG + s6B3j54+AM3mg50KZdYWxxv+v/lb6oEcsCjfKNyRIx/5pqX6XI3dxl9MMIxrfVWpkNX+ + FI68v1LVV61DC9PkNyEHU0v9YBOfrTiS21TIlVIZcSFhuDjg52MekfZAnoKaP7YFJNF3 + fdCrXaU3hYQrwB9XdskBUppwxKGhf7O6SWEZhAEfPA9kgxaWHoJvsDz8aca576UNe7BP + mjzo/SLUX+P4uvcaffd+AAABAEqzpmwjzTxB+DV8C+0LnmKf3L/UlQWyGdmhd65rnbkH + GgRMAAkoh4GBOEHL5bznNRmO7X/H6g2fR7SEabxfbvb903KI4nbfFF+3QtnwyIbTBAcH + 0893D3bi5rsaJcz+c6lBob2En2nThRciefXUk2oPzCQuDyFIyHLJikqRQVcalHCdQ00c + /H/JkiJedHNqaeU4TeMk8SM53Brjplj/iiJq+ujc5MlEgACdCwWp0BviFACEoYyFaa3R + kc7Xdm9vFpclm9fzgUfPloASA0SkO945in3mIqMfODTb4yRvbjk8If9483fEPgQkczpd + ptBz1VAKg8AmRcz1GmBIxs+Stn0= dummy@gitlab.com KEY end end diff --git a/spec/factories/lfs_file_locks.rb b/spec/factories/lfs_file_locks.rb new file mode 100644 index 00000000000..b9d24f82b65 --- /dev/null +++ b/spec/factories/lfs_file_locks.rb @@ -0,0 +1,7 @@ +FactoryBot.define do + factory :lfs_file_lock do + user + project + path 'README.md' + end +end diff --git a/spec/factories/pages_domains.rb b/spec/factories/pages_domains.rb index 61b04708da2..35b44e1c52e 100644 --- a/spec/factories/pages_domains.rb +++ b/spec/factories/pages_domains.rb @@ -1,6 +1,25 @@ FactoryBot.define do factory :pages_domain, class: 'PagesDomain' do - domain 'my.domain.com' + sequence(:domain) { |n| "my#{n}.domain.com" } + verified_at { Time.now } + enabled_until { 1.week.from_now } + + trait :disabled do + verified_at nil + enabled_until nil + end + + trait :unverified do + verified_at nil + end + + trait :reverify do + enabled_until { 1.hour.from_now } + end + + trait :expired do + enabled_until { 1.hour.ago } + end trait :with_certificate do certificate '-----BEGIN CERTIFICATE----- diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index 20976977f21..1761b6e2a3b 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -81,8 +81,10 @@ FactoryBot.define do archived true end - trait :hashed do - storage_version Project::LATEST_STORAGE_VERSION + storage_version Project::LATEST_STORAGE_VERSION + + trait :legacy_storage do + storage_version nil end trait :access_requestable do @@ -249,7 +251,8 @@ FactoryBot.define do project.create_prometheus_service( active: true, properties: { - api_url: 'https://prometheus.example.com' + api_url: 'https://prometheus.example.com/', + manual_configuration: true } ) end diff --git a/spec/factories/services.rb b/spec/factories/services.rb index 110ef33c6f7..0d4fd49bf3a 100644 --- a/spec/factories/services.rb +++ b/spec/factories/services.rb @@ -30,7 +30,8 @@ FactoryBot.define do project active true properties({ - api_url: 'https://prometheus.example.com/' + api_url: 'https://prometheus.example.com/', + manual_configuration: true }) end diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb index a01c129defd..7eeed7da998 100644 --- a/spec/features/admin/admin_runners_spec.rb +++ b/spec/features/admin/admin_runners_spec.rb @@ -19,7 +19,7 @@ describe "Admin Runners" do end it 'has all necessary texts' do - expect(page).to have_text "How to setup" + expect(page).to have_text "Setup a shared Runner manually" expect(page).to have_text "Runners with last contact more than a minute ago: 1" end @@ -54,7 +54,7 @@ describe "Admin Runners" do end it 'has all necessary texts including no runner message' do - expect(page).to have_text "How to setup" + expect(page).to have_text "Setup a shared Runner manually" expect(page).to have_text "Runners with last contact more than a minute ago: 0" expect(page).to have_text 'No runners found' end diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb index a69b428d117..8f0a3611052 100644 --- a/spec/features/admin/admin_users_spec.rb +++ b/spec/features/admin/admin_users_spec.rb @@ -26,8 +26,8 @@ describe "Admin::Users" do expect(page).to have_content(user.email) expect(page).to have_content(user.name) expect(page).to have_link('Block', href: block_admin_user_path(user)) - expect(page).to have_link('Remove user', href: admin_user_path(user)) - expect(page).to have_link('Remove user and contributions', href: admin_user_path(user, hard_delete: true)) + expect(page).to have_button('Delete user') + expect(page).to have_button('Delete user and contributions') end describe 'Two-factor Authentication filters' do @@ -122,8 +122,8 @@ describe "Admin::Users" do expect(page).to have_content(user.email) expect(page).to have_content(user.name) expect(page).to have_link('Block user', href: block_admin_user_path(user)) - expect(page).to have_link('Remove user', href: admin_user_path(user)) - expect(page).to have_link('Remove user and contributions', href: admin_user_path(user, hard_delete: true)) + expect(page).to have_button('Delete user') + expect(page).to have_button('Delete user and contributions') end describe 'Impersonation' do @@ -382,7 +382,7 @@ describe "Admin::Users" do describe 'update user identities' do before do - allow(Gitlab::OAuth::Provider).to receive(:providers).and_return([:twitter, :twitter_updated]) + allow(Gitlab::Auth::OAuth::Provider).to receive(:providers).and_return([:twitter, :twitter_updated]) end it 'modifies twitter identity' do diff --git a/spec/features/admin/services/admin_activates_prometheus_spec.rb b/spec/features/admin/services/admin_activates_prometheus_spec.rb new file mode 100644 index 00000000000..904fe5b406b --- /dev/null +++ b/spec/features/admin/services/admin_activates_prometheus_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +describe 'Admin activates Prometheus' do + let(:admin) { create(:user, :admin) } + + before do + sign_in(admin) + + visit(admin_application_settings_services_path) + + click_link('Prometheus') + end + + it 'activates service' do + check('Active') + fill_in('API URL', with: 'http://prometheus.example.com') + click_button('Save') + + expect(page).to have_content('Application settings saved successfully') + end +end diff --git a/spec/features/auto_deploy_spec.rb b/spec/features/auto_deploy_spec.rb deleted file mode 100644 index 9aef68b7156..00000000000 --- a/spec/features/auto_deploy_spec.rb +++ /dev/null @@ -1,77 +0,0 @@ -require 'spec_helper' - -describe 'Auto deploy' do - let(:user) { create(:user) } - let(:project) { create(:project, :repository) } - - shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do - context 'when no deployment service is active' do - before do - trun_off - end - - it 'does not show a button to set up auto deploy' do - visit project_path(project) - expect(page).to have_no_content('Set up auto deploy') - end - end - - context 'when a deployment service is active' do - before do - trun_on - visit project_path(project) - end - - it 'shows a button to set up auto deploy' do - expect(page).to have_link('Set up auto deploy') - end - - it 'includes OpenShift as an available template', :js do - click_link 'Set up auto deploy' - click_button 'Apply a GitLab CI Yaml template' - - within '.gitlab-ci-yml-selector' do - expect(page).to have_content('OpenShift') - end - end - - it 'creates a merge request using "auto-deploy" branch', :js do - click_link 'Set up auto deploy' - click_button 'Apply a GitLab CI Yaml template' - within '.gitlab-ci-yml-selector' do - click_on 'OpenShift' - end - wait_for_requests - click_button 'Commit changes' - - expect(page).to have_content('New Merge Request From auto-deploy into master') - end - end - end - - context 'when user configured kubernetes from Integration > Kubernetes' do - before do - create :kubernetes_service, project: project - project.add_master(user) - sign_in user - end - - let(:trun_on) { project.deployment_platform.update!(active: true) } - let(:trun_off) { project.deployment_platform.update!(active: false) } - - it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' - end - - context 'when user configured kubernetes from CI/CD > Clusters' do - before do - create(:cluster, :provided_by_gcp, projects: [project]) - project.add_master(user) - sign_in user - end - - let(:trun_on) { project.deployment_platform.cluster.update!(enabled: true) } - let(:trun_off) { project.deployment_platform.cluster.update!(enabled: false) } - - it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' - end -end diff --git a/spec/features/ci_lint_spec.rb b/spec/features/ci_lint_spec.rb index 9bc23baf6cf..b1dceec9da8 100644 --- a/spec/features/ci_lint_spec.rb +++ b/spec/features/ci_lint_spec.rb @@ -3,16 +3,19 @@ require 'spec_helper' describe 'CI Lint', :js do before do sign_in(create(:user)) + + visit ci_lint_path + find('#ci-editor') + execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});") + + # Ace editor updates a hidden textarea and it happens asynchronously + wait_for('YAML content') do + find('.ace_content').text.present? + end end describe 'YAML parsing' do before do - visit ci_lint_path - # Ace editor updates a hidden textarea and it happens asynchronously - # `sleep 0.1` is actually needed here because of this - find('#ci-editor') - execute_script("ace.edit('ci-editor').setValue(" + yaml_content.to_json + ");") - sleep 0.1 click_on 'Validate' end @@ -32,11 +35,10 @@ describe 'CI Lint', :js do end context 'YAML is incorrect' do - let(:yaml_content) { '' } + let(:yaml_content) { 'value: cannot have :' } it 'displays information about an error' do expect(page).to have_content('Status: syntax is incorrect') - expect(page).to have_content('Error: Please provide content of .gitlab-ci.yml') end end @@ -48,4 +50,20 @@ describe 'CI Lint', :js do end end end + + describe 'YAML clearing' do + before do + click_on 'Clear' + end + + context 'YAML is present' do + let(:yaml_content) do + File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + end + + it 'YAML content is cleared' do + expect(page).to have_field('content', with: '', visible: false, type: 'textarea') + end + end + end end diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb index 510677ecf56..ef493db3f11 100644 --- a/spec/features/cycle_analytics_spec.rb +++ b/spec/features/cycle_analytics_spec.rb @@ -6,7 +6,7 @@ feature 'Cycle Analytics', :js do let(:project) { create(:project, :repository) } let(:issue) { create(:issue, project: project, created_at: 2.days.ago) } let(:milestone) { create(:milestone, project: project) } - let(:mr) { create_merge_request_closing_issue(issue, commit_message: "References #{issue.to_reference}") } + let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") } let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) } context 'as an allowed user' do @@ -41,8 +41,8 @@ feature 'Cycle Analytics', :js do allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) project.add_master(user) - create_cycle - deploy_master + @build = create_cycle(user, project, issue, mr, milestone, pipeline) + deploy_master(user, project) sign_in(user) visit project_cycle_analytics_path(project) @@ -117,8 +117,8 @@ feature 'Cycle Analytics', :js do project.add_guest(guest) allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) - create_cycle - deploy_master + create_cycle(user, project, issue, mr, milestone, pipeline) + deploy_master(user, project) sign_in(guest) visit project_cycle_analytics_path(project) @@ -166,16 +166,6 @@ feature 'Cycle Analytics', :js do expect(find('.stage-events')).to have_content("!#{mr.iid}") end - def create_cycle - issue.update(milestone: milestone) - pipeline.run - - @build = create(:ci_build, pipeline: pipeline, status: :success, author: user) - - merge_merge_requests_closing_issue(issue) - ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash) - end - def click_stage(stage_name) find('.stage-nav li', text: stage_name).click wait_for_requests diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb index 243e8536168..04217fec06c 100644 --- a/spec/features/groups/empty_states_spec.rb +++ b/spec/features/groups/empty_states_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Groups Merge Requests Empty States' do +feature 'Group empty states' do let(:group) { create(:group) } let(:user) { create(:group_member, :developer, user: create(:user), group: group ).user } @@ -8,62 +8,100 @@ feature 'Groups Merge Requests Empty States' do sign_in(user) end - context 'group has a project' do - let(:project) { create(:project, namespace: group) } + [:issue, :merge_request].each do |issuable| + issuable_name = issuable.to_s.humanize.downcase + project_relation = issuable == :issue ? :project : :source_project - before do - project.add_master(user) - end + context "for #{issuable_name}s" do + let(:path) { public_send(:"#{issuable}s_group_path", group) } - context 'the project has a merge request' do - before do - create(:merge_request, source_project: project) + context 'group has a project' do + let(:project) { create(:project, namespace: group) } - visit merge_requests_group_path(group) - end + before do + project.add_master(user) + end - it 'should not display an empty state' do - expect(page).not_to have_selector('.empty-state') - end - end + context "the project has #{issuable_name}s" do + before do + create(issuable, project_relation => project) - context 'the project has no merge requests', :js do - before do - visit merge_requests_group_path(group) - end + visit path + end - it 'should display an empty state' do - expect(page).to have_selector('.empty-state') - end + it 'does not display an empty state' do + expect(page).not_to have_selector('.empty-state') + end + end + + context "the project has no #{issuable_name}s", :js do + before do + visit path + end + + it 'displays an empty state' do + expect(page).to have_selector('.empty-state') + end + + it "shows a new #{issuable_name} button" do + within '.empty-state' do + expect(page).to have_content("create #{issuable_name}") + end + end + + it "the new #{issuable_name} button opens a project dropdown" do + within '.empty-state' do + find('.new-project-item-select-button').click + end - it 'should show a new merge request button' do - within '.empty-state' do - expect(page).to have_content('create merge request') + expect(page).to have_selector('.ajax-project-dropdown') + end end end - it 'the new merge request button opens a project dropdown' do - within '.empty-state' do - find('.new-project-item-select-button').click - end + context 'group without a project' do + context 'group has a subgroup', :nested_groups do + let(:subgroup) { create(:group, parent: group) } + let(:subgroup_project) { create(:project, namespace: subgroup) } - expect(page).to have_selector('.ajax-project-dropdown') - end - end - end + context "the project has #{issuable_name}s" do + before do + create(issuable, project_relation => subgroup_project) - context 'group without a project' do - before do - visit merge_requests_group_path(group) - end + visit path + end - it 'should display an empty state' do - expect(page).to have_selector('.empty-state') - end + it 'does not display an empty state' do + expect(page).not_to have_selector('.empty-state') + end + end - it 'should not show a new merge request button' do - within '.empty-state' do - expect(page).not_to have_link('create merge request') + context "the project has no #{issuable_name}s" do + before do + visit path + end + + it 'displays an empty state' do + expect(page).to have_selector('.empty-state') + end + end + end + + context 'group has no subgroups' do + before do + visit path + end + + it 'displays an empty state' do + expect(page).to have_selector('.empty-state') + end + + it "shows a new #{issuable_name} button" do + within '.empty-state' do + expect(page).not_to have_link("create #{issuable_name}") + end + end + end end end end diff --git a/spec/features/groups/members/manage_members.rb b/spec/features/groups/members/manage_members_spec.rb index 21f7b4999ad..21f7b4999ad 100644 --- a/spec/features/groups/members/manage_members.rb +++ b/spec/features/groups/members/manage_members_spec.rb diff --git a/spec/features/groups/members/search_members_spec.rb b/spec/features/groups/members/search_members_spec.rb new file mode 100644 index 00000000000..31fbbcf562c --- /dev/null +++ b/spec/features/groups/members/search_members_spec.rb @@ -0,0 +1,29 @@ +require 'spec_helper' + +describe 'Search group member' do + let(:user) { create :user } + let(:member) { create :user } + + let!(:guest_group) do + create(:group) do |group| + group.add_guest(user) + group.add_guest(member) + end + end + + before do + sign_in(user) + visit group_group_members_path(guest_group) + end + + it 'renders member users' do + page.within '.member-search-form' do + fill_in 'search', with: member.name + find('.member-search-btn').click + end + + group_members_list = find(".panel .content-list") + expect(group_members_list).to have_content(member.name) + expect(group_members_list).not_to have_content(user.name) + end +end diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb index 1b41b3842c8..20337f1d3b0 100644 --- a/spec/features/groups/milestone_spec.rb +++ b/spec/features/groups/milestone_spec.rb @@ -1,6 +1,6 @@ require 'rails_helper' -feature 'Group milestones', :js do +feature 'Group milestones' do let(:group) { create(:group) } let!(:project) { create(:project_empty_repo, group: group) } let(:user) { create(:group_member, :master, user: create(:user), group: group ).user } @@ -13,7 +13,7 @@ feature 'Group milestones', :js do sign_in(user) end - context 'create a milestone' do + context 'create a milestone', :js do before do visit new_group_milestone_path(group) end @@ -61,55 +61,132 @@ feature 'Group milestones', :js do end context 'milestones list' do - let!(:other_project) { create(:project_empty_repo, group: group) } - - let!(:active_project_milestone1) { create(:milestone, project: project, state: 'active', title: 'v1.0') } - let!(:active_project_milestone2) { create(:milestone, project: other_project, state: 'active', title: 'v1.0') } - let!(:closed_project_milestone1) { create(:milestone, project: project, state: 'closed', title: 'v2.0') } - let!(:closed_project_milestone2) { create(:milestone, project: other_project, state: 'closed', title: 'v2.0') } - let!(:active_group_milestone) { create(:milestone, group: group, state: 'active') } - let!(:closed_group_milestone) { create(:milestone, group: group, state: 'closed') } - - before do - visit group_milestones_path(group) + context 'when no milestones' do + it 'renders no milestones text' do + visit group_milestones_path(group) + expect(page).to have_content('No milestones to show') + end end - it 'counts milestones correctly' do - expect(find('.top-area .active .badge').text).to eq("2") - expect(find('.top-area .closed .badge').text).to eq("2") - expect(find('.top-area .all .badge').text).to eq("4") - end + context 'when milestones exists' do + let!(:other_project) { create(:project_empty_repo, group: group) } + + let!(:active_project_milestone1) do + create( + :milestone, + project: project, + state: 'active', + title: 'v1.0', + due_date: '2114-08-20', + description: 'Lorem Ipsum is simply dummy text' + ) + end + let!(:active_project_milestone2) { create(:milestone, project: other_project, state: 'active', title: 'v1.0') } + let!(:closed_project_milestone1) { create(:milestone, project: project, state: 'closed', title: 'v2.0') } + let!(:closed_project_milestone2) { create(:milestone, project: other_project, state: 'closed', title: 'v2.0') } + let!(:active_group_milestone) { create(:milestone, group: group, state: 'active', title: 'GL-113') } + let!(:closed_group_milestone) { create(:milestone, group: group, state: 'closed') } + let!(:issue) do + create :issue, project: project, assignees: [user], author: user, milestone: active_project_milestone1 + end - it 'lists legacy group milestones and group milestones' do - legacy_milestone = GroupMilestone.build_collection(group, group.projects, { state: 'active' }).first + before do + visit group_milestones_path(group) + end - expect(page).to have_selector("#milestone_#{active_group_milestone.id}", count: 1) - expect(page).to have_selector("#milestone_#{legacy_milestone.milestones.first.id}", count: 1) - end + it 'counts milestones correctly' do + expect(find('.top-area .active .badge').text).to eq("2") + expect(find('.top-area .closed .badge').text).to eq("2") + expect(find('.top-area .all .badge').text).to eq("4") + end - it 'updates milestone' do - page.within(".milestones #milestone_#{active_group_milestone.id}") do - click_link('Edit') + it 'lists legacy group milestones and group milestones' do + legacy_milestone = GroupMilestone.build_collection(group, group.projects, { state: 'active' }).first + + expect(page).to have_selector("#milestone_#{active_group_milestone.id}", count: 1) + expect(page).to have_selector("#milestone_#{legacy_milestone.milestones.first.id}", count: 1) end - page.within('.milestone-form') do - fill_in 'milestone_title', with: 'new title' - click_button('Update milestone') + it 'updates milestone' do + page.within(".milestones #milestone_#{active_group_milestone.id}") do + click_link('Edit') + end + + page.within('.milestone-form') do + fill_in 'milestone_title', with: 'new title' + click_button('Update milestone') + end + + expect(find('#content-body h2')).to have_content('new title') end - expect(find('#content-body h2')).to have_content('new title') - end + it 'shows milestone detail and supports its edit' do + page.within(".milestones #milestone_#{active_group_milestone.id}") do + click_link(active_group_milestone.title) + end + + page.within('.detail-page-header') do + click_link('Edit') + end - it 'shows milestone detail and supports its edit' do - page.within(".milestones #milestone_#{active_group_milestone.id}") do - click_link(active_group_milestone.title) + expect(page).to have_selector('.milestone-form') end - page.within('.detail-page-header') do - click_link('Edit') + it 'renders milestones' do + expect(page).to have_content('v1.0') + expect(page).to have_content('GL-113') + expect(page).to have_link( + '1 Issue', + href: issues_group_path(group, milestone_title: 'v1.0') + ) + expect(page).to have_link( + '0 Merge Requests', + href: merge_requests_group_path(group, milestone_title: 'v1.0') + ) end - expect(page).to have_selector('.milestone-form') + it 'renders group milestone details' do + click_link 'v1.0' + + expect(page).to have_content('expires on Aug 20, 2114') + expect(page).to have_content('v1.0') + expect(page).to have_content('Issues 1 Open: 1 Closed: 0') + expect(page).to have_link(issue.title, href: project_issue_path(issue.project, issue)) + end + + describe 'labels' do + before do + create(:label, project: project, title: 'bug') do |label| + issue.labels << label + end + + create(:label, project: project, title: 'feature') do |label| + issue.labels << label + end + end + + it 'renders labels' do + click_link 'v1.0' + + page.within('#tab-issues') do + expect(page).to have_content 'bug' + expect(page).to have_content 'feature' + end + end + + it 'renders labels list', :js do + click_link 'v1.0' + + page.within('.content .nav-links') do + page.find(:xpath, "//a[@href='#tab-labels']").click + end + + page.within('#tab-labels') do + expect(page).to have_content 'bug' + expect(page).to have_content 'feature' + end + end + end end end end diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb index f355cec3ba9..41b9ada988a 100644 --- a/spec/features/issues/filtered_search/recent_searches_spec.rb +++ b/spec/features/issues/filtered_search/recent_searches_spec.rb @@ -39,8 +39,8 @@ describe 'Recent searches', :js do items = all('.filtered-search-history-dropdown-item', visible: false, count: 2) - expect(items[0].text).to eq('label:~qux garply') - expect(items[1].text).to eq('label:~foo bar') + expect(items[0].text).to eq('label: ~qux garply') + expect(items[1].text).to eq('label: ~foo bar') end it 'saved recent searches are restored last on the list' do diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb index faf14be4818..ef6b8edd0ad 100644 --- a/spec/features/issues/form_spec.rb +++ b/spec/features/issues/form_spec.rb @@ -189,6 +189,18 @@ describe 'New/edit issue', :js do expect(find('.js-label-select')).to have_content('Labels') end + it 'clears label search input field when a label is selected' do + click_button 'Labels' + + page.within '.dropdown-menu-labels' do + search_field = find('input[type="search"]') + + search_field.set(label2.title) + click_link label2.title + expect(search_field.value).to eq '' + end + end + it 'correctly updates the selected user when changing assignee' do click_button 'Unassigned' @@ -271,6 +283,18 @@ describe 'New/edit issue', :js do end end + context 'inline edit' do + before do + visit project_issue_path(project, issue) + end + + it 'opens inline edit form with shortcut' do + find('body').send_keys('e') + + expect(page).to have_selector('.detail-page-description form') + end + end + describe 'sub-group project' do let(:group) { create(:group) } let(:nested_group_1) { create(:group, parent: group) } diff --git a/spec/features/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb index f82ed6300cc..f82ed6300cc 100644 --- a/spec/features/copy_as_gfm_spec.rb +++ b/spec/features/markdown/copy_as_gfm_spec.rb diff --git a/spec/features/gitlab_flavored_markdown_spec.rb b/spec/features/markdown/gitlab_flavored_markdown_spec.rb index 3c2186b3598..3c2186b3598 100644 --- a/spec/features/gitlab_flavored_markdown_spec.rb +++ b/spec/features/markdown/gitlab_flavored_markdown_spec.rb diff --git a/spec/features/markdown_spec.rb b/spec/features/markdown/markdown_spec.rb index f13d78d24e3..f13d78d24e3 100644 --- a/spec/features/markdown_spec.rb +++ b/spec/features/markdown/markdown_spec.rb diff --git a/spec/features/markdown/math_spec.rb b/spec/features/markdown/math_spec.rb new file mode 100644 index 00000000000..6a23d6b78ab --- /dev/null +++ b/spec/features/markdown/math_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' + +describe 'Math rendering', :js do + it 'renders inline and display math correctly' do + description = <<~MATH + This math is inline $`a^2+b^2=c^2`$. + + This is on a separate line + ```math + a^2+b^2=c^2 + ``` + MATH + + project = create(:project, :public) + issue = create(:issue, project: project, description: description) + + visit project_issue_path(project, issue) + + expect(page).to have_selector('.katex .mord.mathit', text: 'b') + expect(page).to have_selector('.katex-display .mord.mathit', text: 'b') + end +end diff --git a/spec/features/markdown/mermaid_spec.rb b/spec/features/markdown/mermaid_spec.rb new file mode 100644 index 00000000000..a25d701ee35 --- /dev/null +++ b/spec/features/markdown/mermaid_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe 'Mermaid rendering', :js do + it 'renders Mermaid diagrams correctly' do + description = <<~MERMAID + ```mermaid + graph TD; + A-->B; + A-->C; + B-->D; + C-->D; + ``` + MERMAID + + project = create(:project, :public) + issue = create(:issue, project: project, description: description) + + visit project_issue_path(project, issue) + + %w[A B C D].each do |label| + expect(page).to have_selector('svg foreignObject', text: label) + end + end +end diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb index 50d06565fc0..b54addce993 100644 --- a/spec/features/merge_request/user_posts_notes_spec.rb +++ b/spec/features/merge_request/user_posts_notes_spec.rb @@ -144,7 +144,7 @@ describe 'Merge request > User posts notes', :js do end end - describe 'deleting an attachment' do + describe 'deleting attachment on legacy diff note' do before do find('.note').hover diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb index b02d2d4261c..cc12a1005ba 100644 --- a/spec/features/milestone_spec.rb +++ b/spec/features/milestone_spec.rb @@ -66,15 +66,16 @@ feature 'Milestone' do end end - feature 'Open a milestone' do + feature 'Open a milestone', :js do scenario 'shows total issue time spent correctly when no time has been logged' do milestone = create(:milestone, project: project, title: 8.7) visit project_milestone_path(project, milestone) - page.within('.block.time_spent') do - expect(page).to have_content 'No time spent' - expect(page).to have_content 'None' + wait_for_requests + + page.within('.time-tracking-no-tracking-pane') do + expect(page).to have_content 'No estimate or time spent' end end @@ -89,8 +90,10 @@ feature 'Milestone' do visit project_milestone_path(project, milestone) - page.within('.block.time_spent') do - expect(page).to have_content '3h' + wait_for_requests + + page.within('.time-tracking-spend-only-pane') do + expect(page).to have_content 'Spent: 3h' end end end diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb index 4665626f114..f9c6ff90ca1 100644 --- a/spec/features/profiles/password_spec.rb +++ b/spec/features/profiles/password_spec.rb @@ -1,6 +1,15 @@ require 'spec_helper' describe 'Profile > Password' do + let(:user) { create(:user) } + + def fill_passwords(password, confirmation) + fill_in 'New password', with: password + fill_in 'Password confirmation', with: confirmation + + click_button 'Save password' + end + context 'Password authentication enabled' do let(:user) { create(:user, password_automatically_set: true) } @@ -9,13 +18,6 @@ describe 'Profile > Password' do visit edit_profile_password_path end - def fill_passwords(password, confirmation) - fill_in 'New password', with: password - fill_in 'Password confirmation', with: confirmation - - click_button 'Save password' - end - context 'User with password automatically set' do describe 'User puts different passwords in the field and in the confirmation' do it 'shows an error message' do @@ -73,4 +75,74 @@ describe 'Profile > Password' do end end end + + context 'Change passowrd' do + before do + sign_in(user) + visit(edit_profile_password_path) + end + + it 'does not change user passowrd without old one' do + page.within '.update-password' do + fill_passwords('22233344', '22233344') + end + + page.within '.flash-container' do + expect(page).to have_content 'You must provide a valid current password' + end + end + + it 'does not change password with invalid old password' do + page.within '.update-password' do + fill_in 'user_current_password', with: 'invalid' + fill_passwords('password', 'confirmation') + end + + page.within '.flash-container' do + expect(page).to have_content 'You must provide a valid current password' + end + end + + it 'changes user password' do + page.within '.update-password' do + fill_in "user_current_password", with: user.password + fill_passwords('22233344', '22233344') + end + + expect(current_path).to eq new_user_session_path + end + end + + context 'when password is expired' do + before do + sign_in(user) + + user.update_attributes(password_expires_at: 1.hour.ago) + user.identities.delete + expect(user.ldap_user?).to eq false + end + + it 'needs change user password' do + visit edit_profile_password_path + + expect(current_path).to eq new_profile_password_path + + fill_in :user_current_password, with: user.password + fill_in :user_password, with: '12345678' + fill_in :user_password_confirmation, with: '12345678' + click_button 'Set new password' + + expect(current_path).to eq new_user_session_path + end + + context 'when global require_two_factor_authentication is enabled' do + it 'needs change user password' do + stub_application_setting(require_two_factor_authentication: true) + + visit profile_path + + expect(current_path).to eq new_profile_password_path + end + end + end end diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb new file mode 100644 index 00000000000..0b5eacbe916 --- /dev/null +++ b/spec/features/profiles/user_edit_profile_spec.rb @@ -0,0 +1,58 @@ +require 'spec_helper' + +describe 'User edit profile' do + let(:user) { create(:user) } + + before do + sign_in(user) + visit(profile_path) + end + + it 'changes user profile' do + fill_in 'user_skype', with: 'testskype' + fill_in 'user_linkedin', with: 'testlinkedin' + fill_in 'user_twitter', with: 'testtwitter' + fill_in 'user_website_url', with: 'testurl' + fill_in 'user_location', with: 'Ukraine' + fill_in 'user_bio', with: 'I <3 GitLab' + fill_in 'user_organization', with: 'GitLab' + click_button 'Update profile settings' + + expect(user.reload).to have_attributes( + skype: 'testskype', + linkedin: 'testlinkedin', + twitter: 'testtwitter', + website_url: 'testurl', + bio: 'I <3 GitLab', + organization: 'GitLab' + ) + + expect(find('#user_location').value).to eq 'Ukraine' + expect(page).to have_content('Profile was successfully updated') + end + + context 'user avatar' do + before do + attach_file(:user_avatar, Rails.root.join('spec', 'fixtures', 'banana_sample.gif')) + click_button 'Update profile settings' + end + + it 'changes user avatar' do + expect(page).to have_link('Remove avatar') + + user.reload + expect(user.avatar).to be_instance_of AvatarUploader + expect(user.avatar.url).to eq "/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif" + end + + it 'removes user avatar' do + click_link 'Remove avatar' + + user.reload + + expect(user.avatar?).to eq false + expect(page).not_to have_link('Remove avatar') + expect(page).to have_link('gravatar.com') + end + end +end diff --git a/spec/features/profiles/user_manages_applications_spec.rb b/spec/features/profiles/user_manages_applications_spec.rb new file mode 100644 index 00000000000..387584fef62 --- /dev/null +++ b/spec/features/profiles/user_manages_applications_spec.rb @@ -0,0 +1,39 @@ +require 'spec_helper' + +describe 'User manages applications' do + let(:user) { create(:user) } + + before do + sign_in(user) + visit applications_profile_path + end + + it 'manages applications' do + expect(page).to have_content 'Add new application' + + fill_in :doorkeeper_application_name, with: 'test' + fill_in :doorkeeper_application_redirect_uri, with: 'https://test.com' + click_on 'Save application' + + expect(page).to have_content 'Application: test' + expect(page).to have_content 'Application Id' + expect(page).to have_content 'Secret' + + click_on 'Edit' + + expect(page).to have_content 'Edit application' + fill_in :doorkeeper_application_name, with: 'test_changed' + click_on 'Save application' + + expect(page).to have_content 'test_changed' + expect(page).to have_content 'Application Id' + expect(page).to have_content 'Secret' + + visit applications_profile_path + + page.within '.oauth-applications' do + click_on 'Destroy' + end + expect(page.find('.oauth-applications')).not_to have_content 'test_changed' + end +end diff --git a/spec/features/profiles/user_visits_profile_authentication_log_spec.rb b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb index a50ebb29e01..0f419c3c2c0 100644 --- a/spec/features/profiles/user_visits_profile_authentication_log_spec.rb +++ b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb @@ -3,13 +3,28 @@ require 'spec_helper' describe 'User visits the authentication log' do let(:user) { create(:user) } - before do - sign_in(user) + context 'when user signed in' do + before do + sign_in(user) + end - visit(audit_log_profile_path) + it 'shows correct menu item' do + visit(audit_log_profile_path) + + expect(page).to have_active_navigation('Authentication log') + end end - it 'shows correct menu item' do - expect(page).to have_active_navigation('Authentication log') + context 'when user has activity' do + before do + create(:closed_issue_event, author: user) + gitlab_sign_in(user) + end + + it 'shows user activity' do + visit(audit_log_profile_path) + + expect(page).to have_content 'Signed in with standard authentication' + end end end diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb index 6601d3039ed..713112477c8 100644 --- a/spec/features/profiles/user_visits_profile_spec.rb +++ b/spec/features/profiles/user_visits_profile_spec.rb @@ -5,11 +5,58 @@ describe 'User visits their profile' do before do sign_in(user) - - visit(profile_path) end it 'shows correct menu item' do + visit(profile_path) + expect(page).to have_active_navigation('Profile') end + + it 'shows profile info' do + visit(profile_path) + + expect(page).to have_content "This information will appear on your profile" + end + + context 'when user has groups' do + let(:group) do + create :group do |group| + group.add_owner(user) + end + end + + let!(:project) do + create(:project, :repository, namespace: group) do |project| + create(:closed_issue_event, project: project) + project.add_master(user) + end + end + + def click_on_profile_picture + find(:css, '.header-user-dropdown-toggle').click + + page.within ".header-user" do + click_link "Profile" + end + end + + it 'shows user groups', :js do + visit(profile_path) + click_on_profile_picture + + page.within ".cover-block" do + expect(page).to have_content user.name + expect(page).to have_content user.username + end + + page.within ".content" do + click_link "Groups" + end + + page.within "#groups" do + expect(page).to have_content group.name + end + end + end end diff --git a/spec/features/projects/clusters/applications_spec.rb b/spec/features/projects/clusters/applications_spec.rb index 8d1e10b7191..7b2c57aa652 100644 --- a/spec/features/projects/clusters/applications_spec.rb +++ b/spec/features/projects/clusters/applications_spec.rb @@ -22,7 +22,7 @@ feature 'Clusters Applications', :js do scenario 'user is unable to install applications' do page.within('.js-cluster-application-row-helm') do expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') - expect(page.find(:css, '.js-cluster-application-install-button').text).to eq('Install') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install') end end end @@ -33,13 +33,13 @@ feature 'Clusters Applications', :js do scenario 'user can install applications' do page.within('.js-cluster-application-row-helm') do expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to be_nil - expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install') end end context 'when user installs Helm' do before do - allow(ClusterInstallAppWorker).to receive(:perform_async).and_return(nil) + allow(ClusterInstallAppWorker).to receive(:perform_async) page.within('.js-cluster-application-row-helm') do page.find(:css, '.js-cluster-application-install-button').click @@ -50,18 +50,18 @@ feature 'Clusters Applications', :js do page.within('.js-cluster-application-row-helm') do # FE sends request and gets the response, then the buttons is "Install" expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') - expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install') Clusters::Cluster.last.application_helm.make_installing! # FE starts polling and update the buttons to "Installing" expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') - expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installing') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing') Clusters::Cluster.last.application_helm.make_installed! expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') - expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installed') end expect(page).to have_content('Helm Tiller was successfully installed on your Kubernetes cluster') @@ -71,11 +71,14 @@ feature 'Clusters Applications', :js do context 'when user installs Ingress' do context 'when user installs application: Ingress' do before do - allow(ClusterInstallAppWorker).to receive(:perform_async).and_return(nil) + allow(ClusterInstallAppWorker).to receive(:perform_async) + allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in) + allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async) create(:clusters_applications_helm, :installed, cluster: cluster) page.within('.js-cluster-application-row-ingress') do + expect(page).to have_css('.js-cluster-application-install-button:not([disabled])') page.find(:css, '.js-cluster-application-install-button').click end end @@ -83,19 +86,28 @@ feature 'Clusters Applications', :js do it 'he sees status transition' do page.within('.js-cluster-application-row-ingress') do # FE sends request and gets the response, then the buttons is "Install" - expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') - expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install') + expect(page).to have_css('.js-cluster-application-install-button[disabled]') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install') Clusters::Cluster.last.application_ingress.make_installing! # FE starts polling and update the buttons to "Installing" - expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') - expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installing') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing') + expect(page).to have_css('.js-cluster-application-install-button[disabled]') + # The application becomes installed but we keep waiting for external IP address Clusters::Cluster.last.application_ingress.make_installed! - expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') - expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed') + expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installed') + expect(page).to have_css('.js-cluster-application-install-button[disabled]') + expect(page).to have_selector('.js-no-ip-message') + expect(page.find('.js-ip-address').value).to eq('?') + + # We receive the external IP address and display + Clusters::Cluster.last.application_ingress.update!(external_ip: '192.168.1.100') + + expect(page).not_to have_selector('.js-no-ip-message') + expect(page.find('.js-ip-address').value).to eq('192.168.1.100') end expect(page).to have_content('Ingress was successfully installed on your Kubernetes cluster') diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb index 02dbd3380b3..4d47cdb500c 100644 --- a/spec/features/projects/clusters/gcp_spec.rb +++ b/spec/features/projects/clusters/gcp_spec.rb @@ -25,7 +25,7 @@ feature 'Gcp Cluster', :js do context 'when user has a GCP project with billing enabled' do before do allow_any_instance_of(Projects::Clusters::GcpController).to receive(:authorize_google_project_billing) - allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return('true') + allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return(true) end context 'when user does not have a cluster and visits cluster index page' do @@ -134,7 +134,7 @@ feature 'Gcp Cluster', :js do context 'when user does not have a GCP project with billing enabled' do before do allow_any_instance_of(Projects::Clusters::GcpController).to receive(:authorize_google_project_billing) - allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return('false') + allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return(false) visit project_clusters_path(project) diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb index 8ac9821b879..7f1d1934103 100644 --- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb +++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb @@ -11,7 +11,7 @@ feature 'project owner sees a link to create a license file in empty project', : scenario 'project master creates a license file from a template' do visit project_path(project) - click_on 'LICENSE' + click_on 'Add License' expect(page).to have_content('New file') expect(current_path).to eq( diff --git a/spec/features/projects/import_export/namespace_export_file_spec.rb b/spec/features/projects/import_export/namespace_export_file_spec.rb index c1fccf4a40b..7d056b0c140 100644 --- a/spec/features/projects/import_export/namespace_export_file_spec.rb +++ b/spec/features/projects/import_export/namespace_export_file_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature 'Import/Export - Namespace export file cleanup', :js do +describe 'Import/Export - Namespace export file cleanup', :js do let(:export_path) { Dir.mktmpdir('namespace_export_file_spec') } before do @@ -42,13 +42,13 @@ feature 'Import/Export - Namespace export file cleanup', :js do end describe 'legacy storage' do - let(:project) { create(:project) } + let(:project) { create(:project, :legacy_storage) } it_behaves_like 'handling project exports on namespace change' end describe 'hashed storage' do - let(:project) { create(:project, :hashed) } + let(:project) { create(:project) } it_behaves_like 'handling project exports on namespace change' end diff --git a/spec/features/projects/members/share_with_group_spec.rb b/spec/features/projects/members/share_with_group_spec.rb index 4cf48098401..134c8b8bc39 100644 --- a/spec/features/projects/members/share_with_group_spec.rb +++ b/spec/features/projects/members/share_with_group_spec.rb @@ -149,6 +149,11 @@ feature 'Project > Members > Share with Group', :js do create(:group).add_owner(master) visit project_settings_members_path(project) + + click_link 'Share with group' + + find('.ajax-groups-select.select2-container') + execute_script 'GROUP_SELECT_PER_PAGE = 1;' open_select2 '#link_group_id' end diff --git a/spec/features/projects/network_graph_spec.rb b/spec/features/projects/network_graph_spec.rb new file mode 100644 index 00000000000..9f9a7787093 --- /dev/null +++ b/spec/features/projects/network_graph_spec.rb @@ -0,0 +1,108 @@ +require 'spec_helper' + +describe 'Project Network Graph', :js do + let(:user) { create :user } + let(:project) { create :project, :repository, namespace: user.namespace } + + before do + sign_in(user) + + # Stub Graph max_size to speed up test (10 commits vs. 650) + allow(Network::Graph).to receive(:max_count).and_return(10) + end + + context 'when branch is master' do + def switch_ref_to(ref_name) + first('.js-project-refs-dropdown').click + + page.within '.project-refs-form' do + click_link ref_name + end + end + + def click_show_only_selected_branch_checkbox + find('#filter_ref').click + end + + before do + visit project_network_path(project, 'master') + end + + it 'renders project network' do + expect(page).to have_selector ".network-graph" + expect(page).to have_selector '.dropdown-menu-toggle', text: "master" + page.within '.network-graph' do + expect(page).to have_content 'master' + end + end + + it 'switches ref to branch' do + switch_ref_to('feature') + + expect(page).to have_selector '.dropdown-menu-toggle', text: 'feature' + page.within '.network-graph' do + expect(page).to have_content 'feature' + end + end + + it 'switches ref to tag' do + switch_ref_to('v1.0.0') + + expect(page).to have_selector '.dropdown-menu-toggle', text: 'v1.0.0' + page.within '.network-graph' do + expect(page).to have_content 'v1.0.0' + end + end + + it 'renders by commit sha of "v1.0.0"' do + page.within ".network-form" do + fill_in 'extended_sha1', with: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' + find('button').click + end + + expect(page).to have_selector ".network-graph" + expect(page).to have_selector '.dropdown-menu-toggle', text: "master" + page.within '.network-graph' do + expect(page).to have_content 'v1.0.0' + end + end + + it 'filters select tag' do + switch_ref_to('v1.0.0') + + expect(page).to have_css 'title', text: 'Graph · v1.0.0', visible: false + page.within '.network-graph' do + expect(page).to have_content 'Change some files' + end + + click_show_only_selected_branch_checkbox + + page.within '.network-graph' do + expect(page).not_to have_content 'Change some files' + end + + click_show_only_selected_branch_checkbox + + page.within '.network-graph' do + expect(page).to have_content 'Change some files' + end + end + + it 'renders error message when sha commit not exists' do + page.within ".network-form" do + fill_in 'extended_sha1', with: ';' + find('button').click + end + + expect(page).to have_selector '.flash-alert', text: "Git revision ';' does not exist." + end + end + + it 'renders project network with test branch' do + visit project_network_path(project, "'test'") + + page.within '.network-graph' do + expect(page).to have_content "'test'" + end + end +end diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb index 6f097ad16c7..b5104747d00 100644 --- a/spec/features/projects/new_project_spec.rb +++ b/spec/features/projects/new_project_spec.rb @@ -140,7 +140,7 @@ feature 'New project' do find('#import-project-tab').click end - context 'from git repository url' do + context 'from git repository url, "Repo by URL"' do before do first('.import_git').click end @@ -157,6 +157,18 @@ feature 'New project' do expect(git_import_instructions).to be_visible expect(git_import_instructions).to have_content 'Git repository URL' end + + it 'keeps "Import project" tab open after form validation error' do + collision_project = create(:project, name: 'test-name-collision', namespace: user.namespace) + + fill_in 'project_import_url', with: collision_project.http_url_to_repo + fill_in 'project_path', with: collision_project.path + + click_on 'Create project' + + expect(page).to have_css('#import-project-pane.active') + expect(page).not_to have_css('.toggle-import-form.hide') + end end context 'from GitHub' do diff --git a/spec/features/projects/pages_spec.rb b/spec/features/projects/pages_spec.rb index 2e334caa98f..2a0d235ef04 100644 --- a/spec/features/projects/pages_spec.rb +++ b/spec/features/projects/pages_spec.rb @@ -17,6 +17,7 @@ feature 'Pages' do scenario 'does not see anything to destroy' do visit project_pages_path(project) + expect(page).to have_content('Configure pages') expect(page).not_to have_link('Remove pages') expect(page).not_to have_text('Only the project owner can remove pages') end @@ -32,14 +33,192 @@ feature 'Pages' do allow_any_instance_of(Project).to receive(:pages_deployed?) { true } end - scenario 'sees "Remove pages" link' do + scenario 'renders Access pages' do visit project_pages_path(project) - expect(page).to have_link('Remove pages') + expect(page).to have_content('Access pages') + end + + context 'when support for external domains is disabled' do + before do + allow(Gitlab.config.pages).to receive(:external_http).and_return(nil) + allow(Gitlab.config.pages).to receive(:external_https).and_return(nil) + end + + it 'renders message that support is disabled' do + visit project_pages_path(project) + + expect(page).to have_content('Support for domains and certificates is disabled') + end + end + + context 'when pages are exposed on external HTTP address' do + shared_examples 'adds new domain' do + it 'adds new domain' do + visit new_project_pages_domain_path(project) + + fill_in 'Domain', with: 'my.test.domain.com' + click_button 'Create New Domain' + + expect(page).to have_content('my.test.domain.com') + end + end + + before do + allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80']) + allow(Gitlab.config.pages).to receive(:external_https).and_return(nil) + end + + it 'allows to add new domain' do + visit project_pages_path(project) + + expect(page).to have_content('New Domain') + end + + it_behaves_like 'adds new domain' + + context 'when project in group namespace' do + it_behaves_like 'adds new domain' do + let(:group) { create :group } + let(:project) { create :project, namespace: group } + end + end + + context 'when pages domain is added' do + before do + project.pages_domains.create!(domain: 'my.test.domain.com') + + visit new_project_pages_domain_path(project) + end + + it 'renders certificates is disabled' do + expect(page).to have_content('Support for custom certificates is disabled') + end + + it 'does not adds new domain and renders error message' do + fill_in 'Domain', with: 'my.test.domain.com' + click_button 'Create New Domain' + + expect(page).to have_content('Domain has already been taken') + end + end + end + + context 'when pages are exposed on external HTTPS address' do + let(:certificate_pem) do + <<~PEM + -----BEGIN CERTIFICATE----- + MIICGzCCAYSgAwIBAgIBATANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExB0ZXN0 + LWNlcnRpZmljYXRlMB4XDTE2MDIxMjE0MzIwMFoXDTIwMDQxMjE0MzIwMFowGzEZ + MBcGA1UEAxMQdGVzdC1jZXJ0aWZpY2F0ZTCBnzANBgkqhkiG9w0BAQEFAAOBjQAw + gYkCgYEApL4J9L0ZxFJ1hI1LPIflAlAGvm6ZEvoT4qKU5Xf2JgU7/2geNR1qlNFa + SvCc08Knupp5yTgmvyK/Xi09U0N82vvp4Zvr/diSc4A/RA6Mta6egLySNT438kdT + nY2tR5feoTLwQpX0t4IMlwGQGT5h6Of2fKmDxzuwuyffcIHqLdsCAwEAAaNvMG0w + DAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUxl9WSxBprB0z0ibJs3rXEk0+95AwCwYD + VR0PBAQDAgXgMBEGCWCGSAGG+EIBAQQEAwIGQDAeBglghkgBhvhCAQ0EERYPeGNh + IGNlcnRpZmljYXRlMA0GCSqGSIb3DQEBBQUAA4GBAGC4T8SlFHK0yPSa+idGLQFQ + joZp2JHYvNlTPkRJ/J4TcXxBTJmArcQgTIuNoBtC+0A/SwdK4MfTCUY4vNWNdese + 5A4K65Nb7Oh1AdQieTBHNXXCdyFsva9/ScfQGEl7p55a52jOPs0StPd7g64uvjlg + YHi2yesCrOvVXt+lgPTd + -----END CERTIFICATE----- + PEM + end + + let(:certificate_key) do + <<~KEY + -----BEGIN PRIVATE KEY----- + MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAKS+CfS9GcRSdYSN + SzyH5QJQBr5umRL6E+KilOV39iYFO/9oHjUdapTRWkrwnNPCp7qaeck4Jr8iv14t + PVNDfNr76eGb6/3YknOAP0QOjLWunoC8kjU+N/JHU52NrUeX3qEy8EKV9LeCDJcB + kBk+Yejn9nypg8c7sLsn33CB6i3bAgMBAAECgYA2D26w80T7WZvazYr86BNMePpd + j2mIAqx32KZHzt/lhh40J/SRtX9+Kl0Y7nBoRR5Ja9u/HkAIxNxLiUjwg9r6cpg/ + uITEF5nMt7lAk391BuI+7VOZZGbJDsq2ulPd6lO+C8Kq/PI/e4kXcIjeH6KwQsuR + 5vrXfBZ3sQfflaiN4QJBANBt8JY2LIGQF8o89qwUpRL5vbnKQ4IzZ5+TOl4RLR7O + AQpJ81tGuINghO7aunctb6rrcKJrxmEH1whzComybrMCQQDKV49nOBudRBAIgG4K + EnLzsRKISUHMZSJiYTYnablof8cKw1JaQduw7zgrUlLwnroSaAGX88+Jw1f5n2Lh + Vlg5AkBDdUGnrDLtYBCDEQYZHblrkc7ZAeCllDOWjxUV+uMqlCv8A4Ey6omvY57C + m6I8DkWVAQx8VPtozhvHjUw80rZHAkB55HWHAM3h13axKG0htCt7klhPsZHpx6MH + EPjGlXIT+aW2XiPmK3ZlCDcWIenE+lmtbOpI159Wpk8BGXs/s/xBAkEAlAY3ymgx + 63BDJEwvOb2IaP8lDDxNsXx9XJNVvQbv5n15vNsLHbjslHfAhAbxnLQ1fLhUPqSi + nNp/xedE1YxutQ== + -----END PRIVATE KEY----- + KEY + end + + before do + allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80']) + allow(Gitlab.config.pages).to receive(:external_https).and_return(['1.1.1.1:443']) + end + + it 'adds new domain with certificate' do + visit new_project_pages_domain_path(project) + + fill_in 'Domain', with: 'my.test.domain.com' + fill_in 'Certificate (PEM)', with: certificate_pem + fill_in 'Key (PEM)', with: certificate_key + click_button 'Create New Domain' + + expect(page).to have_content('my.test.domain.com') + end + + describe 'updating the certificate for an existing domain' do + let!(:domain) do + create(:pages_domain, :with_key, :with_certificate, project: project) + end + + it 'allows the certificate to be updated' do + visit project_pages_path(project) + + within('#content-body') { click_link 'Details' } + click_link 'Edit' + click_button 'Save Changes' + + expect(page).to have_content('Domain was updated') + end + + context 'when the certificate is invalid' do + it 'tells the user what the problem is' do + visit project_pages_path(project) + + within('#content-body') { click_link 'Details' } + click_link 'Edit' + fill_in 'Certificate (PEM)', with: 'invalid data' + click_button 'Save Changes' + + expect(page).to have_content('Certificate must be a valid PEM certificate') + expect(page).to have_content('Certificate misses intermediates') + expect(page).to have_content("Key doesn't match the certificate") + end + end + end end end it_behaves_like 'no pages deployed' + + describe 'project settings page' do + it 'renders "Pages" tab' do + visit edit_project_path(project) + + page.within '.nav-sidebar' do + expect(page).to have_link('Pages') + end + end + + context 'when pages are disabled' do + before do + allow(Gitlab.config.pages).to receive(:enabled).and_return(false) + end + + it 'does not render "Pages" tab' do + visit edit_project_path(project) + + page.within '.nav-sidebar' do + expect(page).not_to have_link('Pages') + end + end + end + end end context 'when the user is not the owner' do @@ -57,4 +236,54 @@ feature 'Pages' do it_behaves_like 'no pages deployed' end + + describe 'Remove page' do + context 'when user is the owner' do + let(:project) { create :project, :repository } + + before do + project.namespace.update(owner: user) + end + + context 'when pages are deployed' do + let(:pipeline) do + commit_sha = project.commit('HEAD').sha + + project.pipelines.create( + ref: 'HEAD', + sha: commit_sha, + source: :push, + protected: false + ) + end + + let(:ci_build) do + build( + :ci_build, + project: project, + pipeline: pipeline, + ref: 'HEAD', + legacy_artifacts_file: fixture_file_upload(Rails.root.join('spec/fixtures/pages.zip')), + legacy_artifacts_metadata: fixture_file_upload(Rails.root.join('spec/fixtures/pages.zip.meta')) + ) + end + + before do + result = Projects::UpdatePagesService.new(project, ci_build).execute + expect(result[:status]).to eq(:success) + expect(project).to be_pages_deployed + end + + it 'removes the pages' do + visit project_pages_path(project) + + expect(page).to have_link('Remove pages') + + click_link 'Remove pages' + + expect(project.pages_deployed?).to be_falsey + end + end + end + end end diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb index 592c99fc64a..3a8e7c05cc4 100644 --- a/spec/features/projects/pipelines/pipelines_spec.rb +++ b/spec/features/projects/pipelines/pipelines_spec.rb @@ -109,7 +109,8 @@ describe 'Pipelines', :js do context 'when canceling' do before do - accept_confirm { find('.js-pipelines-cancel-button').click } + find('.js-pipelines-cancel-button').click + find('.js-primary-button').click wait_for_requests end @@ -140,6 +141,7 @@ describe 'Pipelines', :js do context 'when retrying' do before do find('.js-pipelines-retry-button').click + find('.js-primary-button').click wait_for_requests end @@ -238,7 +240,8 @@ describe 'Pipelines', :js do context 'when canceling' do before do - accept_alert { find('.js-pipelines-cancel-button').click } + find('.js-pipelines-cancel-button').click + find('.js-primary-button').click end it 'indicates that pipeline was canceled' do @@ -364,23 +367,6 @@ describe 'Pipelines', :js do expect(build.reload).to be_canceled end end - - context 'dropdown jobs list' do - it 'should keep the dropdown open when the user ctr/cmd + clicks in the job name' do - find('.js-builds-dropdown-button').click - dropdown_item = find('.mini-pipeline-graph-dropdown-item').native - - %i(alt control).each do |meta_key| - page.driver.browser.action - .key_down(meta_key) - .click(dropdown_item) - .key_up(meta_key) - .perform - end - - expect(page).to have_selector('.js-ci-action-icon') - end - end end context 'with pagination' do diff --git a/spec/features/projects/services/user_activates_issue_tracker_spec.rb b/spec/features/projects/services/user_activates_issue_tracker_spec.rb new file mode 100644 index 00000000000..e9502178bd7 --- /dev/null +++ b/spec/features/projects/services/user_activates_issue_tracker_spec.rb @@ -0,0 +1,92 @@ +require 'spec_helper' + +describe 'User activates issue tracker', :js do + let(:user) { create(:user) } + let(:project) { create(:project) } + + let(:url) { 'http://tracker.example.com' } + + def fill_form(active = true) + check 'Active' if active + + fill_in 'service_project_url', with: url + fill_in 'service_issues_url', with: "#{url}/:id" + fill_in 'service_new_issue_url', with: url + end + + before do + project.add_master(user) + sign_in(user) + + visit project_settings_integrations_path(project) + end + + shared_examples 'external issue tracker activation' do |tracker:| + describe 'user sets and activates the Service' do + context 'when the connection test succeeds' do + before do + stub_request(:head, url).to_return(headers: { 'Content-Type' => 'application/json' }) + + click_link(tracker) + fill_form + click_button('Test settings and save changes') + wait_for_requests + end + + it 'activates the service' do + expect(page).to have_content("#{tracker} activated.") + expect(current_path).to eq(project_settings_integrations_path(project)) + end + + it 'shows the link in the menu' do + page.within('.nav-sidebar') do + expect(page).to have_link(tracker, href: url) + end + end + end + + context 'when the connection test fails' do + it 'activates the service' do + stub_request(:head, url).to_raise(HTTParty::Error) + + click_link(tracker) + fill_form + click_button('Test settings and save changes') + wait_for_requests + + expect(find('.flash-container-page')).to have_content 'Test failed.' + expect(find('.flash-container-page')).to have_content 'Save anyway' + + find('.flash-alert .flash-action').click + wait_for_requests + + expect(page).to have_content("#{tracker} activated.") + expect(current_path).to eq(project_settings_integrations_path(project)) + end + end + end + + describe 'user sets the service but keeps it disabled' do + before do + click_link(tracker) + fill_form(false) + click_button('Save changes') + end + + it 'saves but does not activate the service' do + expect(page).to have_content("#{tracker} settings saved, but not activated.") + expect(current_path).to eq(project_settings_integrations_path(project)) + end + + it 'does not show the external tracker link in the menu' do + page.within('.nav-sidebar') do + expect(page).not_to have_link(tracker, href: url) + end + end + end + end + + it_behaves_like 'external issue tracker activation', tracker: 'Redmine' + it_behaves_like 'external issue tracker activation', tracker: 'Bugzilla' + it_behaves_like 'external issue tracker activation', tracker: 'Custom Issue Tracker' +end diff --git a/spec/features/projects/services/user_activates_jira_spec.rb b/spec/features/projects/services/user_activates_jira_spec.rb index 028669eeaf2..429128ec096 100644 --- a/spec/features/projects/services/user_activates_jira_spec.rb +++ b/spec/features/projects/services/user_activates_jira_spec.rb @@ -3,7 +3,6 @@ require 'spec_helper' describe 'User activates Jira', :js do let(:user) { create(:user) } let(:project) { create(:project) } - let(:service) { project.create_jira_service } let(:url) { 'http://jira.example.com' } let(:test_url) { 'http://jira.example.com/rest/api/2/serverInfo' } @@ -26,7 +25,7 @@ describe 'User activates Jira', :js do describe 'user sets and activates Jira Service' do context 'when Jira connection test succeeds' do - it 'activates the JIRA service' do + before do server_info = { key: 'value' }.to_json WebMock.stub_request(:get, test_url).with(basic_auth: %w(username password)).to_return(body: server_info) @@ -34,10 +33,18 @@ describe 'User activates Jira', :js do fill_form click_button('Test settings and save changes') wait_for_requests + end + it 'activates the JIRA service' do expect(page).to have_content('JIRA activated.') expect(current_path).to eq(project_settings_integrations_path(project)) end + + it 'shows the JIRA link in the menu' do + page.within('.nav-sidebar') do + expect(page).to have_link('JIRA', href: url) + end + end end context 'when Jira connection test fails' do @@ -75,14 +82,20 @@ describe 'User activates Jira', :js do end describe 'user sets Jira Service but keeps it disabled' do - context 'when Jira connection test succeeds' do - it 'activates the JIRA service' do - click_link('JIRA') - fill_form(false) - click_button('Save changes') + before do + click_link('JIRA') + fill_form(false) + click_button('Save changes') + end - expect(page).to have_content('JIRA settings saved, but not activated.') - expect(current_path).to eq(project_settings_integrations_path(project)) + it 'saves but does not activate the JIRA service' do + expect(page).to have_content('JIRA settings saved, but not activated.') + expect(current_path).to eq(project_settings_integrations_path(project)) + end + + it 'does not show the JIRA link in the menu' do + page.within('.nav-sidebar') do + expect(page).not_to have_link('JIRA', href: url) end end end diff --git a/spec/features/projects/services/user_activates_prometheus_spec.rb b/spec/features/projects/services/user_activates_prometheus_spec.rb new file mode 100644 index 00000000000..33f884eb148 --- /dev/null +++ b/spec/features/projects/services/user_activates_prometheus_spec.rb @@ -0,0 +1,23 @@ +require 'spec_helper' + +describe 'User activates Prometheus' do + let(:project) { create(:project) } + let(:user) { create(:user) } + + before do + project.add_master(user) + sign_in(user) + + visit(project_settings_integrations_path(project)) + + click_link('Prometheus') + end + + it 'activates service' do + check('Active') + fill_in('API URL', with: 'http://prometheus.example.com') + click_button('Save changes') + + expect(page).to have_content('Prometheus activated.') + end +end diff --git a/spec/features/projects/show_project_spec.rb b/spec/features/projects/show_project_spec.rb index 0b94c9eae5d..0a014e9f080 100644 --- a/spec/features/projects/show_project_spec.rb +++ b/spec/features/projects/show_project_spec.rb @@ -17,4 +17,321 @@ describe 'Project show page', :feature do expect(page).to have_content("This project was scheduled for deletion, but failed with the following message: #{project.delete_error}") end end + + describe 'stat button existence' do + # For "New file", "Add License" functionality, + # see spec/features/projects/files/project_owner_creates_license_file_spec.rb + # see spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb + + let(:user) { create(:user) } + + describe 'empty project' do + let(:project) { create(:project, :public, :empty_repo) } + let(:presenter) { project.present(current_user: user) } + + describe 'as a normal user' do + before do + sign_in(user) + + visit project_path(project) + end + + it 'no Auto DevOps button if can not manage pipelines' do + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + + it '"Auto DevOps enabled" button not linked' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_text('Auto DevOps enabled') + end + end + end + + describe 'as a master' do + before do + project.add_master(user) + sign_in(user) + + visit project_path(project) + end + + it '"New file" button linked to new file page' do + page.within('.project-stats') do + expect(page).to have_link('New file', href: project_new_blob_path(project, project.default_branch || 'master')) + end + end + + it '"Add Readme" button linked to new file populated for a readme' do + page.within('.project-stats') do + expect(page).to have_link('Add Readme', href: presenter.add_readme_path) + end + end + + it '"Add License" button linked to new file populated for a license' do + page.within('.project-stats') do + expect(page).to have_link('Add License', href: presenter.add_license_path) + end + end + + describe 'Auto DevOps button' do + it '"Enable Auto DevOps" button linked to settings page' do + page.within('.project-stats') do + expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + + it '"Auto DevOps enabled" anchor linked to settings page' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + end + + describe 'Kubernetes cluster button' do + it '"Add Kubernetes cluster" button linked to clusters page' do + page.within('.project-stats') do + expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project)) + end + end + + it '"Kubernetes cluster" anchor linked to cluster page' do + cluster = create(:cluster, :provided_by_gcp, projects: [project]) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster)) + end + end + end + end + end + + describe 'populated project' do + let(:project) { create(:project, :public, :repository) } + let(:presenter) { project.present(current_user: user) } + + describe 'as a normal user' do + before do + sign_in(user) + + visit project_path(project) + end + + it 'no Auto DevOps button if can not manage pipelines' do + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + + it '"Auto DevOps enabled" button not linked' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_text('Auto DevOps enabled') + end + end + + it 'no Kubernetes cluster button if can not manage clusters' do + page.within('.project-stats') do + expect(page).not_to have_link('Add Kubernetes cluster') + expect(page).not_to have_link('Kubernetes configured') + end + end + end + + describe 'as a master' do + before do + allow_any_instance_of(AutoDevopsHelper).to receive(:show_auto_devops_callout?).and_return(false) + project.add_master(user) + sign_in(user) + + visit project_path(project) + end + + it 'no "Add Changelog" button if the project already has a changelog' do + expect(project.repository.changelog).not_to be_nil + + page.within('.project-stats') do + expect(page).not_to have_link('Add Changelog') + end + end + + it 'no "Add License" button if the project already has a license' do + expect(project.repository.license_blob).not_to be_nil + + page.within('.project-stats') do + expect(page).not_to have_link('Add License') + end + end + + it 'no "Add Contribution guide" button if the project already has a contribution guide' do + expect(project.repository.contribution_guide).not_to be_nil + + page.within('.project-stats') do + expect(page).not_to have_link('Add Contribution guide') + end + end + + describe 'GitLab CI configuration button' do + it '"Set up CI/CD" button linked to new file populated for a .gitlab-ci.yml' do + expect(project.repository.gitlab_ci_yml).to be_nil + + page.within('.project-stats') do + expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_path) + end + end + + it 'no "Set up CI/CD" button if the project already has a .gitlab-ci.yml' do + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add .gitlab-ci.yml", + file_path: '.gitlab-ci.yml', + file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + ).execute + + expect(project.repository.gitlab_ci_yml).not_to be_nil + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up CI/CD') + end + end + + it 'no "Set up CI/CD" button if the project has Auto DevOps enabled' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up CI/CD') + end + end + end + + describe 'Auto DevOps button' do + it '"Enable Auto DevOps" button linked to settings page' do + page.within('.project-stats') do + expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + + it '"Enable Auto DevOps" button linked to settings page' do + project.create_auto_devops!(enabled: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings')) + end + end + + it 'no Auto DevOps button if Auto DevOps callout is shown' do + allow_any_instance_of(AutoDevopsHelper).to receive(:show_auto_devops_callout?).and_return(true) + + visit project_path(project) + + expect(page).to have_selector('.js-autodevops-banner') + + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + + it 'no "Enable Auto DevOps" button when .gitlab-ci.yml already exists' do + Files::CreateService.new( + project, + project.creator, + start_branch: 'master', + branch_name: 'master', + commit_message: "Add .gitlab-ci.yml", + file_path: '.gitlab-ci.yml', + file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + ).execute + + expect(project.repository.gitlab_ci_yml).not_to be_nil + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Enable Auto DevOps') + expect(page).not_to have_link('Auto DevOps enabled') + end + end + end + + describe 'Kubernetes cluster button' do + it '"Add Kubernetes cluster" button linked to clusters page' do + page.within('.project-stats') do + expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project)) + end + end + + it '"Kubernetes cluster" button linked to cluster page' do + cluster = create(:cluster, :provided_by_gcp, projects: [project]) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster)) + end + end + end + + describe '"Set up Koding" button' do + it 'no "Set up Koding" button if Koding disabled' do + stub_application_setting(koding_enabled?: false) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up Koding') + end + end + + it 'no "Set up Koding" button if the project already has a .koding.yml' do + stub_application_setting(koding_enabled?: true) + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:koding_url).and_return('http://koding.example.com') + expect(project.repository.changelog).not_to be_nil + allow_any_instance_of(Repository).to receive(:koding_yml).and_return(project.repository.changelog) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).not_to have_link('Set up Koding') + end + end + + it '"Set up Koding" button linked to new file populated for a .koding.yml' do + stub_application_setting(koding_enabled?: true) + + visit project_path(project) + + page.within('.project-stats') do + expect(page).to have_link('Set up Koding', href: presenter.add_koding_stack_path) + end + end + end + end + end + end end diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb index 4d2a08afecc..ef1bb712846 100644 --- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb +++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb @@ -1,226 +1,234 @@ require 'spec_helper' -# Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages -describe 'User updates wiki page', :skip_gitaly_mock do - let(:user) { create(:user) } - - before do - project.add_master(user) - sign_in(user) - end - - context 'when wiki is empty' do +describe 'User updates wiki page' do + shared_examples 'wiki page user update' do + let(:user) { create(:user) } before do - visit(project_wikis_path(project)) + project.add_master(user) + sign_in(user) end - context 'in a user namespace' do - let(:project) { create(:project, namespace: user.namespace) } - - it 'redirects back to the home edit page' do - page.within(:css, '.wiki-form .form-actions') do - click_on('Cancel') - end - - expect(current_path).to eq project_wiki_path(project, :home) + context 'when wiki is empty' do + before do + visit(project_wikis_path(project)) end - it 'updates a page that has a path', :js do - click_on('New page') + context 'in a user namespace' do + let(:project) { create(:project, namespace: user.namespace) } - page.within('#modal-new-wiki') do - fill_in(:new_wiki_path, with: 'one/two/three-test') - click_on('Create page') - end + it 'redirects back to the home edit page' do + page.within(:css, '.wiki-form .form-actions') do + click_on('Cancel') + end - page.within '.wiki-form' do - fill_in(:wiki_content, with: 'wiki content') - click_on('Create page') + expect(current_path).to eq project_wiki_path(project, :home) end - expect(current_path).to include('one/two/three-test') - expect(find('.wiki-pages')).to have_content('Three') + it 'updates a page that has a path', :js do + click_on('New page') - first(:link, text: 'Three').click + page.within('#modal-new-wiki') do + fill_in(:new_wiki_path, with: 'one/two/three-test') + click_on('Create page') + end - expect(find('.nav-text')).to have_content('Three') + page.within '.wiki-form' do + fill_in(:wiki_content, with: 'wiki content') + click_on('Create page') + end - click_on('Edit') + expect(current_path).to include('one/two/three-test') + expect(find('.wiki-pages')).to have_content('Three') - expect(current_path).to include('one/two/three-test') - expect(page).to have_content('Edit Page') + first(:link, text: 'Three').click - fill_in('Content', with: 'Updated Wiki Content') - click_on('Save changes') + expect(find('.nav-text')).to have_content('Three') - expect(page).to have_content('Updated Wiki Content') - end - end - end + click_on('Edit') - context 'when wiki is not empty' do - let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) } - let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: 'home', content: 'Home page' }) } + expect(current_path).to include('one/two/three-test') + expect(page).to have_content('Edit Page') - before do - visit(project_wikis_path(project)) + fill_in('Content', with: 'Updated Wiki Content') + click_on('Save changes') + + expect(page).to have_content('Updated Wiki Content') + end + end end - context 'in a user namespace' do - let(:project) { create(:project, namespace: user.namespace) } + context 'when wiki is not empty' do + let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) } + let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: 'home', content: 'Home page' }) } - it 'updates a page' do - click_link('Edit') + before do + visit(project_wikis_path(project)) + end - # Commit message field should have correct value. - expect(page).to have_field('wiki[message]', with: 'Update home') + context 'in a user namespace' do + let(:project) { create(:project, namespace: user.namespace) } - fill_in(:wiki_content, with: 'My awesome wiki!') - click_button('Save changes') + it 'updates a page' do + click_link('Edit') - expect(page).to have_content('Home') - expect(page).to have_content("Last edited by #{user.name}") - expect(page).to have_content('My awesome wiki!') - end + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Update home') - it 'shows a validation error message' do - click_link('Edit') + fill_in(:wiki_content, with: 'My awesome wiki!') + click_button('Save changes') - fill_in(:wiki_content, with: '') - click_button('Save changes') + expect(page).to have_content('Home') + expect(page).to have_content("Last edited by #{user.name}") + expect(page).to have_content('My awesome wiki!') + end - expect(page).to have_selector('.wiki-form') - expect(page).to have_content('Edit Page') - expect(page).to have_content('The form contains the following error:') - expect(page).to have_content("Content can't be blank") - expect(find('textarea#wiki_content').value).to eq('') - end + it 'shows a validation error message' do + click_link('Edit') - it 'shows the autocompletion dropdown', :js do - click_link('Edit') + fill_in(:wiki_content, with: '') + click_button('Save changes') - find('#wiki_content').native.send_keys('') - fill_in(:wiki_content, with: '@') + expect(page).to have_selector('.wiki-form') + expect(page).to have_content('Edit Page') + expect(page).to have_content('The form contains the following error:') + expect(page).to have_content("Content can't be blank") + expect(find('textarea#wiki_content').value).to eq('') + end - expect(page).to have_selector('.atwho-view') - end + it 'shows the autocompletion dropdown', :js do + click_link('Edit') - it 'shows the error message' do - click_link('Edit') + find('#wiki_content').native.send_keys('') + fill_in(:wiki_content, with: '@') - wiki_page.update(content: 'Update') + expect(page).to have_selector('.atwho-view') + end - click_button('Save changes') + it 'shows the error message' do + click_link('Edit') - expect(page).to have_content('Someone edited the page the same time you did.') - end + wiki_page.update(content: 'Update') - it 'updates a page' do - click_on('Edit') - fill_in('Content', with: 'Updated Wiki Content') - click_on('Save changes') + click_button('Save changes') - expect(page).to have_content('Updated Wiki Content') - end + expect(page).to have_content('Someone edited the page the same time you did.') + end - it 'cancels edititng of a page' do - click_on('Edit') + it 'updates a page' do + click_on('Edit') + fill_in('Content', with: 'Updated Wiki Content') + click_on('Save changes') - page.within(:css, '.wiki-form .form-actions') do - click_on('Cancel') + expect(page).to have_content('Updated Wiki Content') end - expect(current_path).to eq(project_wiki_path(project, wiki_page)) + it 'cancels edititng of a page' do + click_on('Edit') + + page.within(:css, '.wiki-form .form-actions') do + click_on('Cancel') + end + + expect(current_path).to eq(project_wiki_path(project, wiki_page)) + end end - end - context 'in a group namespace' do - let(:project) { create(:project, namespace: create(:group, :public)) } + context 'in a group namespace' do + let(:project) { create(:project, namespace: create(:group, :public)) } - it 'updates a page' do - click_link('Edit') + it 'updates a page' do + click_link('Edit') - # Commit message field should have correct value. - expect(page).to have_field('wiki[message]', with: 'Update home') + # Commit message field should have correct value. + expect(page).to have_field('wiki[message]', with: 'Update home') - fill_in(:wiki_content, with: 'My awesome wiki!') + fill_in(:wiki_content, with: 'My awesome wiki!') - click_button('Save changes') + click_button('Save changes') - expect(page).to have_content('Home') - expect(page).to have_content("Last edited by #{user.name}") - expect(page).to have_content('My awesome wiki!') + expect(page).to have_content('Home') + expect(page).to have_content("Last edited by #{user.name}") + expect(page).to have_content('My awesome wiki!') + end end end - end - context 'when the page is in a subdir' do - let!(:project) { create(:project, namespace: user.namespace) } - let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) } - let(:page_name) { 'page_name' } - let(:page_dir) { "foo/bar/#{page_name}" } - let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: page_dir, content: 'Home page' }) } + context 'when the page is in a subdir' do + let!(:project) { create(:project, namespace: user.namespace) } + let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) } + let(:page_name) { 'page_name' } + let(:page_dir) { "foo/bar/#{page_name}" } + let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: page_dir, content: 'Home page' }) } - before do - visit(project_wiki_edit_path(project, wiki_page)) - end + before do + visit(project_wiki_edit_path(project, wiki_page)) + end - it 'moves the page to the root folder' do - fill_in(:wiki_title, with: "/#{page_name}") + it 'moves the page to the root folder', :skip_gitaly_mock do + fill_in(:wiki_title, with: "/#{page_name}") - click_button('Save changes') + click_button('Save changes') - expect(current_path).to eq(project_wiki_path(project, page_name)) - end + expect(current_path).to eq(project_wiki_path(project, page_name)) + end - it 'moves the page to other dir' do - new_page_dir = "foo1/bar1/#{page_name}" + it 'moves the page to other dir' do + new_page_dir = "foo1/bar1/#{page_name}" - fill_in(:wiki_title, with: new_page_dir) + fill_in(:wiki_title, with: new_page_dir) - click_button('Save changes') + click_button('Save changes') - expect(current_path).to eq(project_wiki_path(project, new_page_dir)) - end + expect(current_path).to eq(project_wiki_path(project, new_page_dir)) + end - it 'remains in the same place if title has not changed' do - original_path = project_wiki_path(project, wiki_page) + it 'remains in the same place if title has not changed' do + original_path = project_wiki_path(project, wiki_page) - fill_in(:wiki_title, with: page_name) + fill_in(:wiki_title, with: page_name) - click_button('Save changes') + click_button('Save changes') - expect(current_path).to eq(original_path) - end + expect(current_path).to eq(original_path) + end - it 'can be moved to a different dir with a different name' do - new_page_dir = "foo1/bar1/new_page_name" + it 'can be moved to a different dir with a different name' do + new_page_dir = "foo1/bar1/new_page_name" - fill_in(:wiki_title, with: new_page_dir) + fill_in(:wiki_title, with: new_page_dir) - click_button('Save changes') + click_button('Save changes') - expect(current_path).to eq(project_wiki_path(project, new_page_dir)) - end + expect(current_path).to eq(project_wiki_path(project, new_page_dir)) + end - it 'can be renamed and moved to the root folder' do - new_name = 'new_page_name' + it 'can be renamed and moved to the root folder' do + new_name = 'new_page_name' - fill_in(:wiki_title, with: "/#{new_name}") + fill_in(:wiki_title, with: "/#{new_name}") - click_button('Save changes') + click_button('Save changes') - expect(current_path).to eq(project_wiki_path(project, new_name)) - end + expect(current_path).to eq(project_wiki_path(project, new_name)) + end - it 'squishes the title before creating the page' do - new_page_dir = " foo1 / bar1 / #{page_name} " + it 'squishes the title before creating the page' do + new_page_dir = " foo1 / bar1 / #{page_name} " - fill_in(:wiki_title, with: new_page_dir) + fill_in(:wiki_title, with: new_page_dir) - click_button('Save changes') + click_button('Save changes') - expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}")) + expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}")) + end end end + + context 'when Gitaly is enabled' do + it_behaves_like 'wiki page user update' + end + + context 'when Gitaly is disabled', :skip_gitaly_mock do + it_behaves_like 'wiki page user update' + end end diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_page_spec.rb index e37436838fd..306e382119a 100644 --- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb +++ b/spec/features/projects/wiki/user_views_wiki_page_spec.rb @@ -1,146 +1,155 @@ require 'spec_helper' -# Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages -describe 'User views a wiki page', :skip_gitaly_mock do - let(:user) { create(:user) } - let(:project) { create(:project, namespace: user.namespace) } - let(:wiki_page) do - create(:wiki_page, - wiki: project.wiki, - attrs: { title: 'home', content: 'Look at this [image](image.jpg)\n\n ![alt text](image.jpg)' }) - end - - before do - project.add_master(user) - sign_in(user) - end +describe 'User views a wiki page' do + shared_examples 'wiki page user view' do + let(:user) { create(:user) } + let(:project) { create(:project, namespace: user.namespace) } + let(:wiki_page) do + create(:wiki_page, + wiki: project.wiki, + attrs: { title: 'home', content: 'Look at this [image](image.jpg)\n\n ![alt text](image.jpg)' }) + end - context 'when wiki is empty' do before do - visit(project_wikis_path(project)) + project.add_master(user) + sign_in(user) + end - click_on('New page') + context 'when wiki is empty' do + before do + visit(project_wikis_path(project)) - page.within('#modal-new-wiki') do - fill_in(:new_wiki_path, with: 'one/two/three-test') - click_on('Create page') - end + click_on('New page') - page.within('.wiki-form') do - fill_in(:wiki_content, with: 'wiki content') - click_on('Create page') + page.within('#modal-new-wiki') do + fill_in(:new_wiki_path, with: 'one/two/three-test') + click_on('Create page') + end + + page.within('.wiki-form') do + fill_in(:wiki_content, with: 'wiki content') + click_on('Create page') + end end - end - it 'shows the history of a page that has a path', :js do - expect(current_path).to include('one/two/three-test') + it 'shows the history of a page that has a path', :js do + expect(current_path).to include('one/two/three-test') - first(:link, text: 'Three').click - click_on('Page history') + first(:link, text: 'Three').click + click_on('Page history') - expect(current_path).to include('one/two/three-test') + expect(current_path).to include('one/two/three-test') - page.within(:css, '.nav-text') do - expect(page).to have_content('History') + page.within(:css, '.nav-text') do + expect(page).to have_content('History') + end end - end - it 'shows an old version of a page', :js do - expect(current_path).to include('one/two/three-test') - expect(find('.wiki-pages')).to have_content('Three') + it 'shows an old version of a page', :js do + expect(current_path).to include('one/two/three-test') + expect(find('.wiki-pages')).to have_content('Three') - first(:link, text: 'Three').click + first(:link, text: 'Three').click - expect(find('.nav-text')).to have_content('Three') + expect(find('.nav-text')).to have_content('Three') - click_on('Edit') + click_on('Edit') - expect(current_path).to include('one/two/three-test') - expect(page).to have_content('Edit Page') + expect(current_path).to include('one/two/three-test') + expect(page).to have_content('Edit Page') - fill_in('Content', with: 'Updated Wiki Content') + fill_in('Content', with: 'Updated Wiki Content') - click_on('Save changes') - click_on('Page history') + click_on('Save changes') + click_on('Page history') - page.within(:css, '.nav-text') do - expect(page).to have_content('History') - end + page.within(:css, '.nav-text') do + expect(page).to have_content('History') + end - find('a[href*="?version_id"]') + find('a[href*="?version_id"]') + end end - end - context 'when a page does not have history' do - before do - visit(project_wiki_path(project, wiki_page)) - end + context 'when a page does not have history' do + before do + visit(project_wiki_path(project, wiki_page)) + end - it 'shows all the pages' do - expect(page).to have_content(user.name) - expect(find('.wiki-pages')).to have_content(wiki_page.title.capitalize) - end + it 'shows all the pages' do + expect(page).to have_content(user.name) + expect(find('.wiki-pages')).to have_content(wiki_page.title.capitalize) + end - it 'shows a file stored in a page' do - gollum_file_double = double('Gollum::File', - mime_type: 'image/jpeg', - name: 'images/image.jpg', - path: 'images/image.jpg', - raw_data: '') - wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double) + it 'shows a file stored in a page' do + gollum_file_double = double('Gollum::File', + mime_type: 'image/jpeg', + name: 'images/image.jpg', + path: 'images/image.jpg', + raw_data: '') + wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double) - allow(wiki_file).to receive(:mime_type).and_return('image/jpeg') - allow_any_instance_of(ProjectWiki).to receive(:find_file).with('image.jpg', nil).and_return(wiki_file) + allow(wiki_file).to receive(:mime_type).and_return('image/jpeg') + allow_any_instance_of(ProjectWiki).to receive(:find_file).with('image.jpg', nil).and_return(wiki_file) - expect(page).to have_xpath('//img[@data-src="image.jpg"]') - expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg") + expect(page).to have_xpath('//img[@data-src="image.jpg"]') + expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg") - click_on('image') + click_on('image') - expect(current_path).to match('wikis/image.jpg') - expect(page).not_to have_xpath('/html') # Page should render the image which means there is no html involved - end + expect(current_path).to match('wikis/image.jpg') + expect(page).not_to have_xpath('/html') # Page should render the image which means there is no html involved + end - it 'shows the creation page if file does not exist' do - expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg") + it 'shows the creation page if file does not exist' do + expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg") - click_on('image') + click_on('image') - expect(current_path).to match('wikis/image.jpg') - expect(page).to have_content('New Wiki Page') - expect(page).to have_content('Create page') + expect(current_path).to match('wikis/image.jpg') + expect(page).to have_content('New Wiki Page') + expect(page).to have_content('Create page') + end end - end - context 'when a page has history' do - before do - wiki_page.update(message: 'updated home', content: 'updated [some link](other-page)') - end + context 'when a page has history' do + before do + wiki_page.update(message: 'updated home', content: 'updated [some link](other-page)') + end - it 'shows the page history' do - visit(project_wiki_path(project, wiki_page)) + it 'shows the page history' do + visit(project_wiki_path(project, wiki_page)) - expect(page).to have_selector('a.btn', text: 'Edit') + expect(page).to have_selector('a.btn', text: 'Edit') - click_on('Page history') + click_on('Page history') - expect(page).to have_content(user.name) - expect(page).to have_content("#{user.username} created page: home") - expect(page).to have_content('updated home') + expect(page).to have_content(user.name) + expect(page).to have_content("#{user.username} created page: home") + expect(page).to have_content('updated home') + end + + it 'does not show the "Edit" button' do + visit(project_wiki_path(project, wiki_page, version_id: wiki_page.versions.last.id)) + + expect(page).not_to have_selector('a.btn', text: 'Edit') + end end - it 'does not show the "Edit" button' do - visit(project_wiki_path(project, wiki_page, version_id: wiki_page.versions.last.id)) + it 'opens a default wiki page', :js do + visit(project_path(project)) - expect(page).not_to have_selector('a.btn', text: 'Edit') + find('.shortcuts-wiki').click + + expect(page).to have_content('Home · Create Page') end end - it 'opens a default wiki page', :js do - visit(project_path(project)) - - find('.shortcuts-wiki').click + context 'when Gitaly is enabled' do + it_behaves_like 'wiki page user view' + end - expect(page).to have_content('Home · Create Page') + context 'when Gitaly is disabled', :skip_gitaly_mock do + it_behaves_like 'wiki page user view' end end diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb index b66a7dea598..cfe979a8647 100644 --- a/spec/features/projects_spec.rb +++ b/spec/features/projects_spec.rb @@ -25,6 +25,24 @@ feature 'Project' do end end + describe 'shows tip about push to create git command' do + let(:user) { create(:user) } + + before do + sign_in user + visit new_project_path + end + + it 'shows the command in a popover', :js do + page.within '.profile-settings-sidebar' do + click_link 'Show command' + end + + expect(page).to have_css('.popover .push-to-create-popover #push_to_create_tip') + expect(page).to have_content 'Private projects can be created in your personal namespace with:' + end + end + describe 'description' do let(:project) { create(:project, :repository) } let(:path) { project_path(project) } @@ -128,8 +146,8 @@ feature 'Project' do end describe 'removal', :js do - let(:user) { create(:user, username: 'test', name: 'test') } - let(:project) { create(:project, namespace: user.namespace, name: 'project1') } + let(:user) { create(:user) } + let(:project) { create(:project, namespace: user.namespace) } before do sign_in(user) @@ -138,8 +156,8 @@ feature 'Project' do end it 'removes a project' do - expect { remove_with_confirm('Remove project', project.path) }.to change {Project.count}.by(-1) - expect(page).to have_content "Project 'test / project1' is in the process of being deleted." + expect { remove_with_confirm('Remove project', project.path) }.to change { Project.count }.by(-1) + expect(page).to have_content "Project '#{project.full_name}' is in the process of being deleted." expect(Project.all.count).to be_zero expect(project.issues).to be_empty expect(project.merge_requests).to be_empty diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb index aec9de6c7ca..df65c2d2f83 100644 --- a/spec/features/runners_spec.rb +++ b/spec/features/runners_spec.rb @@ -7,6 +7,20 @@ feature 'Runners' do sign_in(user) end + context 'when user opens runners page' do + given(:project) { create(:project) } + + background do + project.add_master(user) + end + + scenario 'user can see a button to install runners on kubernetes clusters' do + visit runners_path(project) + + expect(page).to have_link('Install Runner on Kubernetes', href: project_clusters_path(project)) + end + end + context 'when a project has enabled shared_runners' do given(:project) { create(:project) } diff --git a/spec/features/signup_spec.rb b/spec/features/signup_spec.rb deleted file mode 100644 index 917fad74ef1..00000000000 --- a/spec/features/signup_spec.rb +++ /dev/null @@ -1,103 +0,0 @@ -require 'spec_helper' - -feature 'Signup' do - describe 'signup with no errors' do - context "when sending confirmation email" do - before do - stub_application_setting(send_user_confirmation_email: true) - end - - it 'creates the user account and sends a confirmation email' do - user = build(:user) - - visit root_path - - fill_in 'new_user_name', with: user.name - fill_in 'new_user_username', with: user.username - fill_in 'new_user_email', with: user.email - fill_in 'new_user_email_confirmation', with: user.email - fill_in 'new_user_password', with: user.password - click_button "Register" - - expect(current_path).to eq users_almost_there_path - expect(page).to have_content("Please check your email to confirm your account") - end - end - - context "when sigining up with different cased emails" do - it "creates the user successfully" do - user = build(:user) - - visit root_path - - fill_in 'new_user_name', with: user.name - fill_in 'new_user_username', with: user.username - fill_in 'new_user_email', with: user.email - fill_in 'new_user_email_confirmation', with: user.email.capitalize - fill_in 'new_user_password', with: user.password - click_button "Register" - - expect(current_path).to eq dashboard_projects_path - expect(page).to have_content("Welcome! You have signed up successfully.") - end - end - - context "when not sending confirmation email" do - before do - stub_application_setting(send_user_confirmation_email: false) - end - - it 'creates the user account and goes to dashboard' do - user = build(:user) - - visit root_path - - fill_in 'new_user_name', with: user.name - fill_in 'new_user_username', with: user.username - fill_in 'new_user_email', with: user.email - fill_in 'new_user_email_confirmation', with: user.email - fill_in 'new_user_password', with: user.password - click_button "Register" - - expect(current_path).to eq dashboard_projects_path - expect(page).to have_content("Welcome! You have signed up successfully.") - end - end - end - - describe 'signup with errors' do - it "displays the errors" do - existing_user = create(:user) - user = build(:user) - - visit root_path - - fill_in 'new_user_name', with: user.name - fill_in 'new_user_username', with: user.username - fill_in 'new_user_email', with: existing_user.email - fill_in 'new_user_password', with: user.password - click_button "Register" - - expect(current_path).to eq user_registration_path - expect(page).to have_content("errors prohibited this user from being saved") - expect(page).to have_content("Email has already been taken") - expect(page).to have_content("Email confirmation doesn't match") - end - - it 'does not redisplay the password' do - existing_user = create(:user) - user = build(:user) - - visit root_path - - fill_in 'new_user_name', with: user.name - fill_in 'new_user_username', with: user.username - fill_in 'new_user_email', with: existing_user.email - fill_in 'new_user_password', with: user.password - click_button "Register" - - expect(current_path).to eq user_registration_path - expect(page.body).not_to match(/#{user.password}/) - end - end -end diff --git a/spec/features/tags/master_views_tags_spec.rb b/spec/features/tags/master_views_tags_spec.rb index 4662367d843..b625e7065cc 100644 --- a/spec/features/tags/master_views_tags_spec.rb +++ b/spec/features/tags/master_views_tags_spec.rb @@ -13,7 +13,7 @@ feature 'Master views tags' do before do visit project_path(project) - click_on 'README' + click_on 'Add Readme' fill_in :commit_message, with: 'Add a README file', visible: true click_button 'Commit changes' visit project_tags_path(project) diff --git a/spec/features/u2f_spec.rb b/spec/features/u2f_spec.rb index 50ee1656e10..fb65b570dd6 100644 --- a/spec/features/u2f_spec.rb +++ b/spec/features/u2f_spec.rb @@ -1,10 +1,6 @@ require 'spec_helper' feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', :js do - before do - allow_any_instance_of(U2fHelper).to receive(:inject_u2f_api?).and_return(true) - end - def manage_two_factor_authentication click_on 'Manage two-factor authentication' expect(page).to have_content("Setup new U2F device") diff --git a/spec/features/login_spec.rb b/spec/features/users/login_spec.rb index 6dfabcc7225..6ef235cf870 100644 --- a/spec/features/login_spec.rb +++ b/spec/features/users/login_spec.rb @@ -1,6 +1,26 @@ require 'spec_helper' feature 'Login' do + scenario 'Successful user signin invalidates password reset token' do + user = create(:user) + + expect(user.reset_password_token).to be_nil + + visit new_user_password_path + fill_in 'user_email', with: user.email + click_button 'Reset password' + + user.reload + expect(user.reset_password_token).not_to be_nil + + find('a[href="#login-pane"]').click + gitlab_sign_in(user) + expect(current_path).to eq root_path + + user.reload + expect(user.reset_password_token).to be_nil + end + describe 'initial login after setup' do it 'allows the initial admin to create a password' do # This behavior is dependent on there only being one user diff --git a/spec/features/logout_spec.rb b/spec/features/users/logout_spec.rb index 635729efa53..635729efa53 100644 --- a/spec/features/logout_spec.rb +++ b/spec/features/users/logout_spec.rb diff --git a/spec/features/users/projects_spec.rb b/spec/features/users/projects_spec.rb deleted file mode 100644 index f079771cee1..00000000000 --- a/spec/features/users/projects_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -require 'spec_helper' - -describe 'Projects tab on a user profile', :js do - let(:user) { create(:user) } - let!(:project) { create(:project, namespace: user.namespace) } - let!(:project2) { create(:project, namespace: user.namespace) } - - before do - allow(Project).to receive(:default_per_page).and_return(1) - - sign_in(user) - - visit user_path(user) - - page.within('.user-profile-nav') do - click_link('Personal projects') - end - - wait_for_requests - end - - it 'paginates results' do - expect(page).to have_content(project2.name) - - click_link('Next') - - expect(page).to have_content(project.name) - end -end diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb new file mode 100644 index 00000000000..b5bbb2c0ea5 --- /dev/null +++ b/spec/features/users/show_spec.rb @@ -0,0 +1,17 @@ +require 'spec_helper' + +describe 'User page' do + let(:user) { create(:user) } + + it 'shows all the tabs' do + visit(user_path(user)) + + page.within '.nav-links' do + expect(page).to have_link('Activity') + expect(page).to have_link('Groups') + expect(page).to have_link('Contributed projects') + expect(page).to have_link('Personal projects') + expect(page).to have_link('Snippets') + end + end +end diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb new file mode 100644 index 00000000000..5d539f0ccbe --- /dev/null +++ b/spec/features/users/signup_spec.rb @@ -0,0 +1,135 @@ +require 'spec_helper' + +describe 'Signup' do + let(:new_user) { build_stubbed(:user) } + + describe 'username validation', :js do + before do + visit root_path + click_link 'Register' + end + + it 'does not show an error border if the username is available' do + fill_in 'new_user_username', with: 'new-user' + wait_for_requests + + expect(find('.username')).not_to have_css '.gl-field-error-outline' + end + + it 'does not show an error border if the username contains dots (.)' do + fill_in 'new_user_username', with: 'new.user.username' + wait_for_requests + + expect(find('.username')).not_to have_css '.gl-field-error-outline' + end + + it 'shows an error border if the username already exists' do + existing_user = create(:user) + + fill_in 'new_user_username', with: existing_user.username + wait_for_requests + + expect(find('.username')).to have_css '.gl-field-error-outline' + end + + it 'shows an error border if the username contains special characters' do + fill_in 'new_user_username', with: 'new$user!username' + wait_for_requests + + expect(find('.username')).to have_css '.gl-field-error-outline' + end + end + + context 'with no errors' do + context "when sending confirmation email" do + before do + stub_application_setting(send_user_confirmation_email: true) + end + + it 'creates the user account and sends a confirmation email' do + visit root_path + + fill_in 'new_user_name', with: new_user.name + fill_in 'new_user_username', with: new_user.username + fill_in 'new_user_email', with: new_user.email + fill_in 'new_user_email_confirmation', with: new_user.email + fill_in 'new_user_password', with: new_user.password + + expect { click_button 'Register' }.to change { User.count }.by(1) + + expect(current_path).to eq users_almost_there_path + expect(page).to have_content("Please check your email to confirm your account") + end + end + + context "when sigining up with different cased emails" do + it "creates the user successfully" do + visit root_path + + fill_in 'new_user_name', with: new_user.name + fill_in 'new_user_username', with: new_user.username + fill_in 'new_user_email', with: new_user.email + fill_in 'new_user_email_confirmation', with: new_user.email.capitalize + fill_in 'new_user_password', with: new_user.password + click_button "Register" + + expect(current_path).to eq dashboard_projects_path + expect(page).to have_content("Welcome! You have signed up successfully.") + end + end + + context "when not sending confirmation email" do + before do + stub_application_setting(send_user_confirmation_email: false) + end + + it 'creates the user account and goes to dashboard' do + visit root_path + + fill_in 'new_user_name', with: new_user.name + fill_in 'new_user_username', with: new_user.username + fill_in 'new_user_email', with: new_user.email + fill_in 'new_user_email_confirmation', with: new_user.email + fill_in 'new_user_password', with: new_user.password + click_button "Register" + + expect(current_path).to eq dashboard_projects_path + expect(page).to have_content("Welcome! You have signed up successfully.") + end + end + end + + context 'with errors' do + it "displays the errors" do + existing_user = create(:user) + + visit root_path + + fill_in 'new_user_name', with: new_user.name + fill_in 'new_user_username', with: new_user.username + fill_in 'new_user_email', with: existing_user.email + fill_in 'new_user_password', with: new_user.password + click_button "Register" + + expect(current_path).to eq user_registration_path + expect(page).to have_content("errors prohibited this user from being saved") + expect(page).to have_content("Email has already been taken") + expect(page).to have_content("Email confirmation doesn't match") + end + + it 'does not redisplay the password' do + existing_user = create(:user) + + visit root_path + + fill_in 'new_user_name', with: new_user.name + fill_in 'new_user_username', with: new_user.username + fill_in 'new_user_email', with: existing_user.email + fill_in 'new_user_password', with: new_user.password + click_button "Register" + + expect(current_path).to eq user_registration_path + expect(page.body).not_to match(/#{new_user.password}/) + end + end +end diff --git a/spec/features/user_page_spec.rb b/spec/features/users/user_browses_projects_on_user_page_spec.rb index 19c587e53c8..a70637c8370 100644 --- a/spec/features/user_page_spec.rb +++ b/spec/features/users/user_browses_projects_on_user_page_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'User page', :js do +describe 'Users > User browses projects on user page', :js do let!(:user) { create :user } let!(:private_project) do create :project, :private, name: 'private', namespace: user.namespace do |project| @@ -26,6 +26,28 @@ describe 'User page', :js do end end + it 'paginates projects', :js do + project = create(:project, namespace: user.namespace) + project2 = create(:project, namespace: user.namespace) + allow(Project).to receive(:default_per_page).and_return(1) + + sign_in(user) + + visit user_path(user) + + page.within('.user-profile-nav') do + click_link('Personal projects') + end + + wait_for_requests + + expect(page).to have_content(project2.name) + + click_link('Next') + + expect(page).to have_content(project.name) + end + context 'when not signed in' do it 'renders user public project' do visit user_path(user) diff --git a/spec/features/users_spec.rb b/spec/features/users_spec.rb deleted file mode 100644 index a9973cdf214..00000000000 --- a/spec/features/users_spec.rb +++ /dev/null @@ -1,114 +0,0 @@ -require 'spec_helper' - -feature 'Users', :js do - let(:user) { create(:user, username: 'user1', name: 'User 1', email: 'user1@gitlab.com') } - - scenario 'GET /users/sign_in creates a new user account' do - visit new_user_session_path - click_link 'Register' - fill_in 'new_user_name', with: 'Name Surname' - fill_in 'new_user_username', with: 'Great' - fill_in 'new_user_email', with: 'name@mail.com' - fill_in 'new_user_email_confirmation', with: 'name@mail.com' - fill_in 'new_user_password', with: 'password1234' - expect { click_button 'Register' }.to change { User.count }.by(1) - end - - scenario 'Successful user signin invalidates password reset token' do - expect(user.reset_password_token).to be_nil - - visit new_user_password_path - fill_in 'user_email', with: user.email - click_button 'Reset password' - - user.reload - expect(user.reset_password_token).not_to be_nil - - find('a[href="#login-pane"]').click - gitlab_sign_in(user) - expect(current_path).to eq root_path - - user.reload - expect(user.reset_password_token).to be_nil - end - - scenario 'Should show one error if email is already taken' do - visit new_user_session_path - click_link 'Register' - fill_in 'new_user_name', with: 'Another user name' - fill_in 'new_user_username', with: 'anotheruser' - fill_in 'new_user_email', with: user.email - fill_in 'new_user_email_confirmation', with: user.email - fill_in 'new_user_password', with: '12341234' - expect { click_button 'Register' }.to change { User.count }.by(0) - expect(page).to have_text('Email has already been taken') - expect(number_of_errors_on_page(page)).to be(1), 'errors on page:\n #{errors_on_page page}' - end - - describe 'redirect alias routes' do - before do - expect(user).to be_persisted - end - - scenario '/u/user1 redirects to user page' do - visit '/u/user1' - - expect(current_path).to eq user_path(user) - expect(page).to have_text(user.name) - end - - scenario '/u/user1/groups redirects to user groups page' do - visit '/u/user1/groups' - - expect(current_path).to eq user_groups_path(user) - end - - scenario '/u/user1/projects redirects to user projects page' do - visit '/u/user1/projects' - - expect(current_path).to eq user_projects_path(user) - end - end - - feature 'username validation' do - let(:loading_icon) { '.fa.fa-spinner' } - let(:username_input) { 'new_user_username' } - - before do - visit new_user_session_path - click_link 'Register' - end - - scenario 'doesn\'t show an error border if the username is available' do - fill_in username_input, with: 'new-user' - wait_for_requests - expect(find('.username')).not_to have_css '.gl-field-error-outline' - end - - scenario 'does not show an error border if the username contains dots (.)' do - fill_in username_input, with: 'new.user.username' - wait_for_requests - expect(find('.username')).not_to have_css '.gl-field-error-outline' - end - - scenario 'shows an error border if the username already exists' do - fill_in username_input, with: user.username - wait_for_requests - expect(find('.username')).to have_css '.gl-field-error-outline' - end - - scenario 'shows an error border if the username contains special characters' do - fill_in username_input, with: 'new$user!username' - wait_for_requests - expect(find('.username')).to have_css '.gl-field-error-outline' - end - end - - def errors_on_page(page) - page.find('#error_explanation').find('ul').all('li').map { |item| item.text }.join("\n") - end - - def number_of_errors_on_page(page) - page.find('#error_explanation').find('ul').all('li').count - end -end diff --git a/spec/finders/concerns/finder_methods_spec.rb b/spec/finders/concerns/finder_methods_spec.rb new file mode 100644 index 00000000000..a4ad331f613 --- /dev/null +++ b/spec/finders/concerns/finder_methods_spec.rb @@ -0,0 +1,70 @@ +require 'spec_helper' + +describe FinderMethods do + let(:finder_class) do + Class.new do + include FinderMethods + + attr_reader :current_user + + def initialize(user) + @current_user = user + end + + def execute + Project.all + end + end + end + + let(:user) { create(:user) } + let(:finder) { finder_class.new(user) } + let(:authorized_project) { create(:project) } + let(:unauthorized_project) { create(:project) } + + before do + authorized_project.add_developer(user) + end + + describe '#find_by!' do + it 'returns the project if the user has access' do + expect(finder.find_by!(id: authorized_project.id)).to eq(authorized_project) + end + + it 'raises not found when the project is not found' do + expect { finder.find_by!(id: 0) }.to raise_error(ActiveRecord::RecordNotFound) + end + + it 'raises not found the user does not have access' do + expect { finder.find_by!(id: unauthorized_project.id) }.to raise_error(ActiveRecord::RecordNotFound) + end + end + + describe '#find' do + it 'returns the project if the user has access' do + expect(finder.find(authorized_project.id)).to eq(authorized_project) + end + + it 'raises not found when the project is not found' do + expect { finder.find(0) }.to raise_error(ActiveRecord::RecordNotFound) + end + + it 'raises not found the user does not have access' do + expect { finder.find(unauthorized_project.id) }.to raise_error(ActiveRecord::RecordNotFound) + end + end + + describe '#find_by' do + it 'returns the project if the user has access' do + expect(finder.find_by(id: authorized_project.id)).to eq(authorized_project) + end + + it 'returns nil when the project is not found' do + expect(finder.find_by(id: 0)).to be_nil + end + + it 'returns nil when the user does not have access' do + expect(finder.find_by(id: unauthorized_project.id)).to be_nil + end + end +end diff --git a/spec/finders/concerns/finder_with_cross_project_access_spec.rb b/spec/finders/concerns/finder_with_cross_project_access_spec.rb new file mode 100644 index 00000000000..c784fb87972 --- /dev/null +++ b/spec/finders/concerns/finder_with_cross_project_access_spec.rb @@ -0,0 +1,118 @@ +require 'spec_helper' + +describe FinderWithCrossProjectAccess do + let(:finder_class) do + Class.new do + prepend FinderWithCrossProjectAccess + include FinderMethods + + requires_cross_project_access if: -> { requires_access? } + + attr_reader :current_user + + def initialize(user) + @current_user = user + end + + def execute + Issue.all + end + end + end + + let(:user) { create(:user) } + subject(:finder) { finder_class.new(user) } + let!(:result) { create(:issue) } + + before do + result.project.add_master(user) + end + + def expect_access_check_on_result + expect(finder).not_to receive(:requires_access?) + expect(Ability).to receive(:allowed?).with(user, :read_issue, result).and_call_original + end + + context 'when the user cannot read cross project' do + before do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(user, :read_cross_project) + .and_return(false) + end + + describe '#execute' do + it 'returns a issue if the check is disabled' do + expect(finder).to receive(:requires_access?).and_return(false) + + expect(finder.execute).to include(result) + end + + it 'returns an empty relation when the check is enabled' do + expect(finder).to receive(:requires_access?).and_return(true) + + expect(finder.execute).to be_empty + end + + it 'only queries once when check is enabled' do + expect(finder).to receive(:requires_access?).and_return(true) + + expect { finder.execute }.not_to exceed_query_limit(1) + end + + it 'only queries once when check is disabled' do + expect(finder).to receive(:requires_access?).and_return(false) + + expect { finder.execute }.not_to exceed_query_limit(1) + end + end + + describe '#find' do + it 'checks the accessibility of the subject directly' do + expect_access_check_on_result + + finder.find(result.id) + end + + it 'returns the issue' do + expect(finder.find(result.id)).to eq(result) + end + end + + describe '#find_by' do + it 'checks the accessibility of the subject directly' do + expect_access_check_on_result + + finder.find_by(id: result.id) + end + end + + describe '#find_by!' do + it 'checks the accessibility of the subject directly' do + expect_access_check_on_result + + finder.find_by!(id: result.id) + end + + it 're-enables the check after the find failed' do + finder.find_by!(id: 9999) rescue ActiveRecord::RecordNotFound + + expect(finder.instance_variable_get(:@should_skip_cross_project_check)) + .to eq(false) + end + end + end + + context 'when the user can read cross project' do + before do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(user, :read_cross_project) + .and_return(true) + end + + it 'returns the result' do + expect(finder).not_to receive(:requires_access?) + + expect(finder.execute).to include(result) + end + end +end diff --git a/spec/finders/events_finder_spec.rb b/spec/finders/events_finder_spec.rb index 18d6c0cfd74..62968e83292 100644 --- a/spec/finders/events_finder_spec.rb +++ b/spec/finders/events_finder_spec.rb @@ -26,6 +26,14 @@ describe EventsFinder do expect(events).not_to include(opened_merge_request_event) end + + it 'returns nothing when the current user cannot read cross project' do + expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + + events = described_class.new(source: user, current_user: user).execute + + expect(events).to be_empty + end end context 'when targeting a project' do diff --git a/spec/finders/labels_finder_spec.rb b/spec/finders/labels_finder_spec.rb index 06031aee217..d434c501110 100644 --- a/spec/finders/labels_finder_spec.rb +++ b/spec/finders/labels_finder_spec.rb @@ -5,6 +5,8 @@ describe LabelsFinder do let(:group_1) { create(:group) } let(:group_2) { create(:group) } let(:group_3) { create(:group) } + let(:private_group_1) { create(:group, :private) } + let(:private_subgroup_1) { create(:group, :private, parent: private_group_1) } let(:project_1) { create(:project, namespace: group_1) } let(:project_2) { create(:project, namespace: group_2) } @@ -20,6 +22,8 @@ describe LabelsFinder do let!(:group_label_1) { create(:group_label, group: group_1, title: 'Label 1 (group)') } let!(:group_label_2) { create(:group_label, group: group_1, title: 'Group Label 2') } let!(:group_label_3) { create(:group_label, group: group_2, title: 'Group Label 3') } + let!(:private_group_label_1) { create(:group_label, group: private_group_1, title: 'Private Group Label 1') } + let!(:private_subgroup_label_1) { create(:group_label, group: private_subgroup_1, title: 'Private Sub Group Label 1') } let(:user) { create(:user) } @@ -66,6 +70,44 @@ describe LabelsFinder do expect(finder.execute).to eq [group_label_2, group_label_1] end end + + context 'when including labels from group ancestors', :nested_groups do + it 'returns labels from group and its ancestors' do + private_group_1.add_developer(user) + private_subgroup_1.add_developer(user) + + finder = described_class.new(user, group_id: private_subgroup_1.id, only_group_labels: true, include_ancestor_groups: true) + + expect(finder.execute).to eq [private_group_label_1, private_subgroup_label_1] + end + + it 'ignores labels from groups which user can not read' do + private_subgroup_1.add_developer(user) + + finder = described_class.new(user, group_id: private_subgroup_1.id, only_group_labels: true, include_ancestor_groups: true) + + expect(finder.execute).to eq [private_subgroup_label_1] + end + end + + context 'when including labels from group descendants', :nested_groups do + it 'returns labels from group and its descendants' do + private_group_1.add_developer(user) + private_subgroup_1.add_developer(user) + + finder = described_class.new(user, group_id: private_group_1.id, only_group_labels: true, include_descendant_groups: true) + + expect(finder.execute).to eq [private_group_label_1, private_subgroup_label_1] + end + + it 'ignores labels from groups which user can not read' do + private_subgroup_1.add_developer(user) + + finder = described_class.new(user, group_id: private_group_1.id, only_group_labels: true, include_descendant_groups: true) + + expect(finder.execute).to eq [private_subgroup_label_1] + end + end end context 'filtering by project_id' do diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb index 9385c892c9e..7917a00fc50 100644 --- a/spec/finders/merge_requests_finder_spec.rb +++ b/spec/finders/merge_requests_finder_spec.rb @@ -18,7 +18,7 @@ describe MergeRequestsFinder do let(:project4) { create(:project, :public, group: subgroup) } let!(:merge_request1) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1) } - let!(:merge_request2) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1, state: 'closed') } + let!(:merge_request2) { create(:merge_request, :conflict, author: user, source_project: project2, target_project: project1, state: 'closed') } let!(:merge_request3) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project2) } let!(:merge_request4) { create(:merge_request, :simple, author: user, source_project: project3, target_project: project3) } let!(:merge_request5) { create(:merge_request, :simple, author: user, source_project: project4, target_project: project4) } @@ -74,6 +74,22 @@ describe MergeRequestsFinder do expect(merge_requests).to contain_exactly(merge_request1) end + it 'filters by source branch' do + params = { source_branch: merge_request2.source_branch } + + merge_requests = described_class.new(user, params).execute + + expect(merge_requests).to contain_exactly(merge_request2) + end + + it 'filters by target branch' do + params = { target_branch: merge_request2.target_branch } + + merge_requests = described_class.new(user, params).execute + + expect(merge_requests).to contain_exactly(merge_request2) + end + context 'filtering by group milestone' do let!(:group) { create(:group, :public) } let(:group_milestone) { create(:milestone, group: group) } diff --git a/spec/finders/milestones_finder_spec.rb b/spec/finders/milestones_finder_spec.rb index 0b3cf7ece5f..656d120311a 100644 --- a/spec/finders/milestones_finder_spec.rb +++ b/spec/finders/milestones_finder_spec.rb @@ -70,4 +70,12 @@ describe MilestonesFinder do expect(result.to_a).to contain_exactly(milestone_1) end end + + describe '#find_by' do + it 'finds a single milestone' do + finder = described_class.new(project_ids: [project_1.id], state: 'all') + + expect(finder.find_by(iid: milestone_3.iid)).to eq(milestone_3) + end + end end diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb index 0a018d2b417..1ae0bd988f2 100644 --- a/spec/finders/snippets_finder_spec.rb +++ b/spec/finders/snippets_finder_spec.rb @@ -1,57 +1,8 @@ require 'spec_helper' describe SnippetsFinder do - let(:user) { create :user } - let(:user1) { create :user } - let(:group) { create :group, :public } - - let(:project1) { create(:project, :public, group: group) } - let(:project2) { create(:project, :private, group: group) } - - context 'all snippets visible to a user' do - let!(:snippet1) { create(:personal_snippet, :private) } - let!(:snippet2) { create(:personal_snippet, :internal) } - let!(:snippet3) { create(:personal_snippet, :public) } - let!(:project_snippet1) { create(:project_snippet, :private) } - let!(:project_snippet2) { create(:project_snippet, :internal) } - let!(:project_snippet3) { create(:project_snippet, :public) } - - it "returns all private and internal snippets" do - snippets = described_class.new(user, scope: :all).execute - expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3) - expect(snippets).not_to include(snippet1, project_snippet1) - end - - it "returns all public snippets" do - snippets = described_class.new(nil, scope: :all).execute - expect(snippets).to include(snippet3, project_snippet3) - expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2) - end - - it "returns all public and internal snippets for normal user" do - snippets = described_class.new(user).execute - - expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3) - expect(snippets).not_to include(snippet1, project_snippet1) - end - - it "returns all public snippets for non authorized user" do - snippets = described_class.new(nil).execute - - expect(snippets).to include(snippet3, project_snippet3) - expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2) - end - - it "returns all public and authored snippets for external user" do - external_user = create(:user, :external) - authored_snippet = create(:personal_snippet, :internal, author: external_user) - - snippets = described_class.new(external_user).execute - - expect(snippets).to include(snippet3, project_snippet3, authored_snippet) - expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2) - end - end + include Gitlab::Allowable + using RSpec::Parameterized::TableSyntax context 'filter by visibility' do let!(:snippet1) { create(:personal_snippet, :private) } @@ -67,6 +18,7 @@ describe SnippetsFinder do end context 'filter by scope' do + let(:user) { create :user } let!(:snippet1) { create(:personal_snippet, :private, author: user) } let!(:snippet2) { create(:personal_snippet, :internal, author: user) } let!(:snippet3) { create(:personal_snippet, :public, author: user) } @@ -84,7 +36,7 @@ describe SnippetsFinder do expect(snippets).not_to include(snippet2, snippet3) end - it "returns all snippets for 'are_interna;' scope" do + it "returns all snippets for 'are_internal' scope" do snippets = described_class.new(user, scope: :are_internal).execute expect(snippets).to include(snippet2) @@ -100,6 +52,8 @@ describe SnippetsFinder do end context 'filter by author' do + let(:user) { create :user } + let(:user1) { create :user } let!(:snippet1) { create(:personal_snippet, :private, author: user) } let!(:snippet2) { create(:personal_snippet, :internal, author: user) } let!(:snippet3) { create(:personal_snippet, :public, author: user) } @@ -147,6 +101,10 @@ describe SnippetsFinder do end context 'filter by project' do + let(:user) { create :user } + let(:group) { create :group, :public } + let(:project1) { create(:project, :public, group: group) } + before do @snippet1 = create(:project_snippet, :private, project: project1) @snippet2 = create(:project_snippet, :internal, project: project1) @@ -203,4 +161,27 @@ describe SnippetsFinder do expect(snippets).to include(@snippet1) end end + + describe '#execute' do + let(:project) { create(:project, :public) } + let!(:project_snippet) { create(:project_snippet, :public, project: project) } + let!(:personal_snippet) { create(:personal_snippet, :public) } + let(:user) { create(:user) } + subject(:finder) { described_class.new(user) } + + it 'returns project- and personal snippets' do + expect(finder.execute).to contain_exactly(project_snippet, personal_snippet) + end + + context 'when the user cannot read cross project' do + before do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + end + + it 'returns only personal snippets when the user cannot read cross project' do + expect(finder.execute).to contain_exactly(personal_snippet) + end + end + end end diff --git a/spec/finders/user_recent_events_finder_spec.rb b/spec/finders/user_recent_events_finder_spec.rb new file mode 100644 index 00000000000..3ca0f7c3c89 --- /dev/null +++ b/spec/finders/user_recent_events_finder_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe UserRecentEventsFinder do + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:project_owner) { project.creator } + let!(:event) { create(:event, project: project, author: project_owner) } + + subject(:finder) { described_class.new(user, project_owner) } + + describe '#execute' do + it 'does not include the event when a user does not have access to the project' do + expect(finder.execute).to be_empty + end + + context 'when the user has access to a project' do + before do + project.add_developer(user) + end + + it 'includes the event' do + expect(finder.execute).to include(event) + end + + it 'does not include the event if the user cannot read cross project' do + expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + expect(finder.execute).to be_empty + end + end + end +end diff --git a/spec/fixtures/api/schemas/cluster_status.json b/spec/fixtures/api/schemas/cluster_status.json index 489d563be2b..d27c12e43f2 100644 --- a/spec/fixtures/api/schemas/cluster_status.json +++ b/spec/fixtures/api/schemas/cluster_status.json @@ -30,7 +30,8 @@ ] } }, - "status_reason": { "type": ["string", "null"] } + "status_reason": { "type": ["string", "null"] }, + "external_ip": { "type": ["string", "null"] } }, "required" : [ "name", "status" ] } diff --git a/spec/fixtures/api/schemas/deployment.json b/spec/fixtures/api/schemas/deployment.json new file mode 100644 index 00000000000..536e6475c23 --- /dev/null +++ b/spec/fixtures/api/schemas/deployment.json @@ -0,0 +1,45 @@ +{ + "additionalProperties": false, + "properties": { + "created_at": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "iid": { + "type": "integer" + }, + "last?": { + "type": "boolean" + }, + "ref": { + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + } + }, + "required": [ + "name" + ], + "type": "object" + }, + "sha": { + "type": "string" + }, + "tag": { + "type": "boolean" + } + }, + "required": [ + "sha", + "created_at", + "iid", + "tag", + "last?", + "ref", + "id" + ], + "type": "object" +} diff --git a/spec/fixtures/api/schemas/deployments.json b/spec/fixtures/api/schemas/deployments.json index 1112f23aab2..7bf50e4f859 100644 --- a/spec/fixtures/api/schemas/deployments.json +++ b/spec/fixtures/api/schemas/deployments.json @@ -3,49 +3,7 @@ "properties": { "deployments": { "items": { - "additionalProperties": false, - "properties": { - "created_at": { - "type": "string" - }, - "id": { - "type": "integer" - }, - "iid": { - "type": "integer" - }, - "last?": { - "type": "boolean" - }, - "ref": { - "additionalProperties": false, - "properties": { - "name": { - "type": "string" - } - }, - "required": [ - "name" - ], - "type": "object" - }, - "sha": { - "type": "string" - }, - "tag": { - "type": "boolean" - } - }, - "required": [ - "sha", - "created_at", - "iid", - "tag", - "last?", - "ref", - "id" - ], - "type": "object" + "$ref": "deployment.json" }, "minItems": 1, "type": "array" diff --git a/spec/fixtures/api/schemas/entities/merge_request_widget.json b/spec/fixtures/api/schemas/entities/merge_request_widget.json index 05461787f06..cfbeec58a45 100644 --- a/spec/fixtures/api/schemas/entities/merge_request_widget.json +++ b/spec/fixtures/api/schemas/entities/merge_request_widget.json @@ -75,7 +75,9 @@ "properties": { "can_remove_source_branch": { "type": "boolean" }, "can_revert_on_current_merge_request": { "type": ["boolean", "null"] }, - "can_cherry_pick_on_current_merge_request": { "type": ["boolean", "null"] } + "can_cherry_pick_on_current_merge_request": { "type": ["boolean", "null"] }, + "can_create_note": { "type": "boolean" }, + "can_update": { "type": "boolean" } }, "additionalProperties": false }, @@ -103,6 +105,7 @@ "merge_ongoing": { "type": "boolean" }, "ff_only_enabled": { "type": ["boolean", false] }, "should_be_rebased": { "type": "boolean" }, + "create_note_path": { "type": ["string", "null"] }, "rebase_commit_sha": { "type": ["string", "null"] }, "rebase_in_progress": { "type": "boolean" }, "can_push_to_source_branch": { "type": "boolean" }, diff --git a/spec/fixtures/api/schemas/public_api/v4/blobs.json b/spec/fixtures/api/schemas/public_api/v4/blobs.json index 9cb1eae3762..a812815838f 100644 --- a/spec/fixtures/api/schemas/public_api/v4/blobs.json +++ b/spec/fixtures/api/schemas/public_api/v4/blobs.json @@ -7,11 +7,12 @@ "data": { "type": "string" }, "filename": { "type": ["string"] }, "id": { "type": ["string", "null"] }, + "project_id": { "type": "integer" }, "ref": { "type": "string" }, "startline": { "type": "integer" } }, "required": [ - "basename", "data", "filename", "id", "ref", "startline" + "basename", "data", "filename", "id", "ref", "startline", "project_id" ], "additionalProperties": false } diff --git a/spec/fixtures/api/schemas/public_api/v4/commit/detail.json b/spec/fixtures/api/schemas/public_api/v4/commit/detail.json index 88a3cad62f6..477e776a804 100644 --- a/spec/fixtures/api/schemas/public_api/v4/commit/detail.json +++ b/spec/fixtures/api/schemas/public_api/v4/commit/detail.json @@ -4,9 +4,9 @@ { "$ref": "basic.json" }, { "required" : [ - "stats", "status", - "last_pipeline" + "last_pipeline", + "project_id" ], "properties": { "stats": { "$ref": "../commit_stats.json" }, @@ -16,7 +16,8 @@ { "type": "null" }, { "$ref": "../pipeline/basic.json" } ] - } + }, + "project_id": { "type": "integer" } } } ] diff --git a/spec/fixtures/api/schemas/public_api/v4/commits_details.json b/spec/fixtures/api/schemas/public_api/v4/commits_details.json new file mode 100644 index 00000000000..1f5b1ad86ef --- /dev/null +++ b/spec/fixtures/api/schemas/public_api/v4/commits_details.json @@ -0,0 +1,4 @@ +{ + "type": "array", + "items": { "$ref": "commit/detail.json" } +} diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json index e8c17298b43..ed8ed9085c0 100644 --- a/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json +++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json @@ -4,6 +4,9 @@ "domain": { "type": "string" }, "url": { "type": "uri" }, "project_id": { "type": "integer" }, + "verified": { "type": "boolean" }, + "verification_code": { "type": ["string", "null"] }, + "enabled_until": { "type": ["date", "null"] }, "certificate_expiration": { "type": "object", "properties": { @@ -14,6 +17,6 @@ "additionalProperties": false } }, - "required": ["domain", "url", "project_id"], + "required": ["domain", "url", "project_id", "verified", "verification_code", "enabled_until"], "additionalProperties": false } diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json index 08db8d47050..b57d544f896 100644 --- a/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json +++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json @@ -3,6 +3,9 @@ "properties": { "domain": { "type": "string" }, "url": { "type": "uri" }, + "verified": { "type": "boolean" }, + "verification_code": { "type": ["string", "null"] }, + "enabled_until": { "type": ["date", "null"] }, "certificate": { "type": "object", "properties": { @@ -15,6 +18,6 @@ "additionalProperties": false } }, - "required": ["domain", "url"], + "required": ["domain", "url", "verified", "verification_code", "enabled_until"], "additionalProperties": false } diff --git a/spec/fixtures/api/schemas/variable.json b/spec/fixtures/api/schemas/variable.json index 78977118b0a..6f6b044115b 100644 --- a/spec/fixtures/api/schemas/variable.json +++ b/spec/fixtures/api/schemas/variable.json @@ -10,7 +10,8 @@ "id": { "type": "integer" }, "key": { "type": "string" }, "value": { "type": "string" }, - "protected": { "type": "boolean" } + "protected": { "type": "boolean" }, + "environment_scope": { "type": "string", "optional": true } }, "additionalProperties": false } diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb index f7a4a7afced..43cb0dfe163 100644 --- a/spec/helpers/application_helper_spec.rb +++ b/spec/helpers/application_helper_spec.rb @@ -63,13 +63,29 @@ describe ApplicationHelper do end end - describe 'avatar_icon' do + describe 'avatar_icon_for' do + let!(:user) { create(:user, avatar: File.open(uploaded_image_temp_path), email: 'bar@example.com') } + let(:email) { 'foo@example.com' } + let!(:another_user) { create(:user, avatar: File.open(uploaded_image_temp_path), email: email) } + + it 'prefers the user to retrieve the avatar_url' do + expect(helper.avatar_icon_for(user, email).to_s) + .to eq(user.avatar.url) + end + + it 'falls back to email lookup if no user given' do + expect(helper.avatar_icon_for(nil, email).to_s) + .to eq(another_user.avatar.url) + end + end + + describe 'avatar_icon_for_email' do let(:user) { create(:user, avatar: File.open(uploaded_image_temp_path)) } context 'using an email' do context 'when there is a matching user' do it 'returns a relative URL for the avatar' do - expect(helper.avatar_icon(user.email).to_s) + expect(helper.avatar_icon_for_email(user.email).to_s) .to eq(user.avatar.url) end end @@ -78,17 +94,37 @@ describe ApplicationHelper do it 'calls gravatar_icon' do expect(helper).to receive(:gravatar_icon).with('foo@example.com', 20, 2) - helper.avatar_icon('foo@example.com', 20, 2) + helper.avatar_icon_for_email('foo@example.com', 20, 2) + end + end + + context 'without an email passed' do + it 'calls gravatar_icon' do + expect(helper).to receive(:gravatar_icon).with(nil, 20, 2) + + helper.avatar_icon_for_email(nil, 20, 2) end end end + end + + describe 'avatar_icon_for_user' do + let(:user) { create(:user, avatar: File.open(uploaded_image_temp_path)) } - describe 'using a user' do + context 'with a user object passed' do it 'returns a relative URL for the avatar' do - expect(helper.avatar_icon(user).to_s) + expect(helper.avatar_icon_for_user(user).to_s) .to eq(user.avatar.url) end end + + context 'without a user object passed' do + it 'calls gravatar_icon' do + expect(helper).to receive(:gravatar_icon).with(nil, 20, 2) + + helper.avatar_icon_for_user(nil, 20, 2) + end + end end describe 'gravatar_icon' do diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb index f44e7ef6843..04c6d259135 100644 --- a/spec/helpers/avatars_helper_spec.rb +++ b/spec/helpers/avatars_helper_spec.rb @@ -29,7 +29,7 @@ describe AvatarsHelper do is_expected.to eq tag( :img, alt: "#{user.name}'s avatar", - src: avatar_icon(user, 16), + src: avatar_icon_for_user(user, 16), data: { container: 'body' }, class: 'avatar s16 has-tooltip', title: user.name @@ -43,7 +43,7 @@ describe AvatarsHelper do is_expected.to eq tag( :img, alt: "#{user.name}'s avatar", - src: avatar_icon(user, 16), + src: avatar_icon_for_user(user, 16), data: { container: 'body' }, class: "avatar s16 #{options[:css_class]} has-tooltip", title: user.name @@ -58,7 +58,7 @@ describe AvatarsHelper do is_expected.to eq tag( :img, alt: "#{user.name}'s avatar", - src: avatar_icon(user, options[:size]), + src: avatar_icon_for_user(user, options[:size]), data: { container: 'body' }, class: "avatar s#{options[:size]} has-tooltip", title: user.name @@ -89,7 +89,7 @@ describe AvatarsHelper do :img, alt: "#{user.name}'s avatar", src: LazyImageTagHelper.placeholder_image, - data: { container: 'body', src: avatar_icon(user, 16) }, + data: { container: 'body', src: avatar_icon_for_user(user, 16) }, class: "avatar s16 has-tooltip lazy", title: user.name ) @@ -104,7 +104,7 @@ describe AvatarsHelper do is_expected.to eq tag( :img, alt: "#{user.name}'s avatar", - src: avatar_icon(user, 16), + src: avatar_icon_for_user(user, 16), data: { container: 'body' }, class: "avatar s16 has-tooltip", title: user.name @@ -119,7 +119,7 @@ describe AvatarsHelper do is_expected.to eq tag( :img, alt: "#{user.name}'s avatar", - src: avatar_icon(user, 16), + src: avatar_icon_for_user(user, 16), class: "avatar s16", title: user.name ) @@ -137,7 +137,7 @@ describe AvatarsHelper do is_expected.to eq tag( :img, alt: "#{user.name}'s avatar", - src: avatar_icon(user, 16), + src: avatar_icon_for_user(user, 16), data: { container: 'body' }, class: "avatar s16 has-tooltip", title: user.name @@ -149,7 +149,7 @@ describe AvatarsHelper do is_expected.to eq tag( :img, alt: "#{options[:user_name]}'s avatar", - src: avatar_icon(options[:user_email], 16), + src: avatar_icon_for_email(options[:user_email], 16), data: { container: 'body' }, class: "avatar s16 has-tooltip", title: options[:user_name] diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb index a030796c54e..1fa194fe1b8 100644 --- a/spec/helpers/blob_helper_spec.rb +++ b/spec/helpers/blob_helper_spec.rb @@ -86,7 +86,7 @@ describe BlobHelper do it 'verifies blob is text' do expect(helper).not_to receive(:blob_text_viewable?) - button = edit_blob_link(project, 'refs/heads/master', 'README.md') + button = edit_blob_button(project, 'refs/heads/master', 'README.md') expect(button).to start_with('<button') end @@ -96,17 +96,17 @@ describe BlobHelper do expect(project.repository).not_to receive(:blob_at) - edit_blob_link(project, 'refs/heads/master', 'README.md', blob: blob) + edit_blob_button(project, 'refs/heads/master', 'README.md', blob: blob) end it 'returns a link with the proper route' do - link = edit_blob_link(project, 'master', 'README.md') + link = edit_blob_button(project, 'master', 'README.md') expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/edit/master/README.md") end it 'returns a link with the passed link_opts on the expected route' do - link = edit_blob_link(project, 'master', 'README.md', link_opts: { mr_id: 10 }) + link = edit_blob_button(project, 'master', 'README.md', link_opts: { mr_id: 10 }) expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/edit/master/README.md?mr_id=10") end diff --git a/spec/helpers/dashboard_helper_spec.rb b/spec/helpers/dashboard_helper_spec.rb new file mode 100644 index 00000000000..7ba24ba2956 --- /dev/null +++ b/spec/helpers/dashboard_helper_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe DashboardHelper do + let(:user) { build(:user) } + + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:can?) { true } + end + + describe '#dashboard_nav_links' do + it 'has all the expected links by default' do + menu_items = [:projects, :groups, :activity, :milestones, :snippets] + + expect(helper.dashboard_nav_links).to contain_exactly(*menu_items) + end + + it 'does not contain cross project elements when the user cannot read cross project' do + expect(helper).to receive(:can?).with(user, :read_cross_project) { false } + + expect(helper.dashboard_nav_links).not_to include(:activity, :milestones) + end + end +end diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb index 8a80b88da5d..fccde8b7eba 100644 --- a/spec/helpers/events_helper_spec.rb +++ b/spec/helpers/events_helper_spec.rb @@ -20,5 +20,9 @@ describe EventsHelper do it 'handles nil values' do expect(helper.event_commit_title(nil)).to eq('') end + + it 'does not escape HTML entities' do + expect(helper.event_commit_title("foo & bar")).to eq("foo & bar") + end end end diff --git a/spec/helpers/explore_helper_spec.rb b/spec/helpers/explore_helper_spec.rb new file mode 100644 index 00000000000..12651d80e36 --- /dev/null +++ b/spec/helpers/explore_helper_spec.rb @@ -0,0 +1,18 @@ +require 'spec_helper' + +describe ExploreHelper do + let(:user) { build(:user) } + + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:can?) { true } + end + + describe '#explore_nav_links' do + it 'has all the expected links by default' do + menu_items = [:projects, :groups, :snippets] + + expect(helper.explore_nav_links).to contain_exactly(*menu_items) + end + end +end diff --git a/spec/helpers/graph_helper_spec.rb b/spec/helpers/graph_helper_spec.rb index 400635abdde..1f8a38dc697 100644 --- a/spec/helpers/graph_helper_spec.rb +++ b/spec/helpers/graph_helper_spec.rb @@ -7,10 +7,10 @@ describe GraphHelper do let(:graph) { Network::Graph.new(project, 'master', commit, '') } it 'filters our refs used by GitLab' do - allow(commit).to receive(:ref_names).and_return(['refs/merge-requests/abc', 'master', 'refs/tmp/xyz']) self.instance_variable_set(:@graph, graph) - refs = get_refs(project.repository, commit) - expect(refs).to eq('master') + refs = refs(project.repository, commit) + + expect(refs).to match('master') end end end diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb index 5f608fe18d9..b48c252acd3 100644 --- a/spec/helpers/groups_helper_spec.rb +++ b/spec/helpers/groups_helper_spec.rb @@ -201,4 +201,39 @@ describe GroupsHelper do end end end + + describe '#group_sidebar_links' do + let(:group) { create(:group, :public) } + let(:user) { create(:user) } + before do + allow(helper).to receive(:current_user) { user } + allow(helper).to receive(:can?) { true } + helper.instance_variable_set(:@group, group) + end + + it 'returns all the expected links' do + links = [ + :overview, :activity, :issues, :labels, :milestones, :merge_requests, + :group_members, :settings + ] + + expect(helper.group_sidebar_links).to include(*links) + end + + it 'includes settings when the user can admin the group' do + expect(helper).to receive(:current_user) { user } + expect(helper).to receive(:can?).with(user, :admin_group, group) { false } + + expect(helper.group_sidebar_links).not_to include(:settings) + end + + it 'excludes cross project features when the user cannot read cross project' do + cross_project_features = [:activity, :issues, :labels, :milestones, + :merge_requests] + + expect(helper).to receive(:can?).with(user, :read_cross_project) { false } + + expect(helper.group_sidebar_links).not_to include(*cross_project_features) + end + end end diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb index 7fa665aecdc..2fecd1a3d27 100644 --- a/spec/helpers/issuables_helper_spec.rb +++ b/spec/helpers/issuables_helper_spec.rb @@ -173,23 +173,23 @@ describe IssuablesHelper do @project = issue.project expected_data = { - 'endpoint' => "/#{@project.full_path}/issues/#{issue.iid}", - 'updateEndpoint' => "/#{@project.full_path}/issues/#{issue.iid}.json", - 'canUpdate' => true, - 'canDestroy' => true, - 'issuableRef' => "##{issue.iid}", - 'markdownPreviewPath' => "/#{@project.full_path}/preview_markdown", - 'markdownDocsPath' => '/help/user/markdown', - 'issuableTemplates' => [], - 'projectPath' => @project.path, - 'projectNamespace' => @project.namespace.path, - 'initialTitleHtml' => issue.title, - 'initialTitleText' => issue.title, - 'initialDescriptionHtml' => '<p dir="auto">issue text</p>', - 'initialDescriptionText' => 'issue text', - 'initialTaskStatus' => '0 of 0 tasks completed' + endpoint: "/#{@project.full_path}/issues/#{issue.iid}", + updateEndpoint: "/#{@project.full_path}/issues/#{issue.iid}.json", + canUpdate: true, + canDestroy: true, + issuableRef: "##{issue.iid}", + markdownPreviewPath: "/#{@project.full_path}/preview_markdown", + markdownDocsPath: '/help/user/markdown', + issuableTemplates: [], + projectPath: @project.path, + projectNamespace: @project.namespace.path, + initialTitleHtml: issue.title, + initialTitleText: issue.title, + initialDescriptionHtml: '<p dir="auto">issue text</p>', + initialDescriptionText: 'issue text', + initialTaskStatus: '0 of 0 tasks completed' } - expect(JSON.parse(helper.issuable_initial_data(issue))).to eq(expected_data) + expect(helper.issuable_initial_data(issue)).to eq(expected_data) end end diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb index ddf881a7b6f..aeef5352333 100644 --- a/spec/helpers/issues_helper_spec.rb +++ b/spec/helpers/issues_helper_spec.rb @@ -113,21 +113,6 @@ describe IssuesHelper do end end - describe "milestone_options" do - it "gets closed milestone from current issue" do - closed_milestone = create(:closed_milestone, project: project) - milestone1 = create(:milestone, project: project) - milestone2 = create(:milestone, project: project) - issue.update_attributes(milestone_id: closed_milestone.id) - - options = milestone_options(issue) - - expect(options).to have_selector('option[selected]', text: closed_milestone.title) - expect(options).to have_selector('option', text: milestone1.title) - expect(options).to have_selector('option', text: milestone2.title) - end - end - describe "#link_to_discussions_to_resolve" do describe "passing only a merge request" do let(:merge_request) { create(:merge_request) } diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb new file mode 100644 index 00000000000..e840c927d59 --- /dev/null +++ b/spec/helpers/nav_helper_spec.rb @@ -0,0 +1,53 @@ +require 'spec_helper' + +describe NavHelper do + describe '#header_links' do + before do + allow(helper).to receive(:session) { {} } + end + + context 'when the user is logged in' do + let(:user) { build(:user) } + + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:can?) { true } + end + + it 'has all the expected links by default' do + menu_items = [:user_dropdown, :search, :issues, :merge_requests, :todos] + + expect(helper.header_links).to contain_exactly(*menu_items) + end + + it 'contains the impersonation link while impersonating' do + expect(helper).to receive(:session) { { impersonator_id: 1 } } + + expect(helper.header_links).to include(:admin_impersonation) + end + + context 'when the user cannot read cross project' do + before do + allow(helper).to receive(:can?).with(user, :read_cross_project) { false } + end + + it 'does not contain cross project elements when the user cannot read cross project' do + expect(helper.header_links).not_to include(:issues, :merge_requests, :todos, :search) + end + + it 'shows the search box when the user cannot read cross project and he is visiting a project' do + helper.instance_variable_set(:@project, create(:project)) + + expect(helper.header_links).to include(:search) + end + end + end + + it 'returns only the sign in and search when the user is not logged in' do + allow(helper).to receive(:current_user).and_return(nil) + allow(helper).to receive(:can?).with(nil, :read_cross_project) { true } + + expect(helper.header_links).to contain_exactly(:sign_in, :search) + end + end +end diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb index 749aa25e632..e2a0c4322ff 100644 --- a/spec/helpers/preferences_helper_spec.rb +++ b/spec/helpers/preferences_helper_spec.rb @@ -77,103 +77,6 @@ describe PreferencesHelper do end end - describe '#default_project_view' do - context 'user not signed in' do - before do - helper.instance_variable_set(:@project, project) - stub_user - end - - context 'when repository is empty' do - let(:project) { create(:project_empty_repo, :public) } - - it 'returns activity if user has repository access' do - allow(helper).to receive(:can?).with(nil, :download_code, project).and_return(true) - - expect(helper.default_project_view).to eq('activity') - end - - it 'returns activity if user does not have repository access' do - allow(helper).to receive(:can?).with(nil, :download_code, project).and_return(false) - - expect(helper.default_project_view).to eq('activity') - end - end - - context 'when repository is not empty' do - let(:project) { create(:project, :public, :repository) } - - it 'returns files and readme if user has repository access' do - allow(helper).to receive(:can?).with(nil, :download_code, project).and_return(true) - - expect(helper.default_project_view).to eq('files') - end - - it 'returns activity if user does not have repository access' do - allow(helper).to receive(:can?).with(nil, :download_code, project).and_return(false) - - expect(helper.default_project_view).to eq('activity') - end - end - end - - context 'user signed in' do - let(:user) { create(:user, :readme) } - let(:project) { create(:project, :public, :repository) } - - before do - helper.instance_variable_set(:@project, project) - allow(helper).to receive(:current_user).and_return(user) - end - - context 'when the user is allowed to see the code' do - it 'returns the project view' do - allow(helper).to receive(:can?).with(user, :download_code, project).and_return(true) - - expect(helper.default_project_view).to eq('readme') - end - end - - context 'with wikis enabled and the right policy for the user' do - before do - project.project_feature.update_attribute(:issues_access_level, 0) - allow(helper).to receive(:can?).with(user, :download_code, project).and_return(false) - end - - it 'returns wiki if the user has the right policy' do - allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(true) - - expect(helper.default_project_view).to eq('wiki') - end - - it 'returns customize_workflow if the user does not have the right policy' do - allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(false) - - expect(helper.default_project_view).to eq('customize_workflow') - end - end - - context 'with issues as a feature available' do - it 'return issues' do - allow(helper).to receive(:can?).with(user, :download_code, project).and_return(false) - allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(false) - - expect(helper.default_project_view).to eq('projects/issues/issues') - end - end - - context 'with no activity, no wikies and no issues' do - it 'returns customize_workflow as default' do - project.project_feature.update_attribute(:issues_access_level, 0) - allow(helper).to receive(:can?).with(user, :download_code, project).and_return(false) - allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(false) - - expect(helper.default_project_view).to eq('customize_workflow') - end - end - end - end - def stub_user(messages = {}) if messages.empty? allow(helper).to receive(:current_user).and_return(nil) diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb index c0251bf7dc0..ce96e90e2d7 100644 --- a/spec/helpers/projects_helper_spec.rb +++ b/spec/helpers/projects_helper_spec.rb @@ -75,6 +75,12 @@ describe ProjectsHelper do describe "#project_list_cache_key", :clean_gitlab_redis_shared_state do let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:can?).with(user, :read_cross_project) { true } + end it "includes the route" do expect(helper.project_list_cache_key(project)).to include(project.route.cache_key) @@ -106,6 +112,10 @@ describe ProjectsHelper do expect(helper.project_list_cache_key(project).last).to start_with('v') end + it 'includes wether or not the user can read cross project' do + expect(helper.project_list_cache_key(project)).to include('cross-project:true') + end + it "includes the pipeline status when there is a status" do create(:ci_pipeline, :success, project: project, sha: project.commit.sha) @@ -215,7 +225,7 @@ describe ProjectsHelper do let(:expected) { double } before do - expect(helper).to receive(:avatar_icon).with(user, 16).and_return(expected) + expect(helper).to receive(:avatar_icon_for_user).with(user, 16).and_return(expected) end it 'returns image tag for member avatar' do @@ -264,32 +274,6 @@ describe ProjectsHelper do end end - describe '#license_short_name' do - let(:project) { create(:project) } - - context 'when project.repository has a license_key' do - it 'returns the nickname of the license if present' do - allow(project.repository).to receive(:license_key).and_return('agpl-3.0') - - expect(helper.license_short_name(project)).to eq('GNU AGPLv3') - end - - it 'returns the name of the license if nickname is not present' do - allow(project.repository).to receive(:license_key).and_return('mit') - - expect(helper.license_short_name(project)).to eq('MIT License') - end - end - - context 'when project.repository has no license_key but a license_blob' do - it 'returns LICENSE' do - allow(project.repository).to receive(:license_key).and_return(nil) - - expect(helper.license_short_name(project)).to eq('LICENSE') - end - end - end - describe '#sanitized_import_error' do let(:project) { create(:project, :repository) } @@ -462,6 +446,22 @@ describe ProjectsHelper do end end + describe('#push_to_create_project_command') do + let(:user) { create(:user, username: 'john') } + + it 'returns the command to push to create project over HTTP' do + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:enabled_git_access_protocol) { 'http' } + + expect(helper.push_to_create_project_command(user)).to eq('git push --set-upstream http://test.host/john/$(git rev-parse --show-toplevel | xargs basename).git $(git rev-parse --abbrev-ref HEAD)') + end + + it 'returns the command to push to create project over SSH' do + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:enabled_git_access_protocol) { 'ssh' } + + expect(helper.push_to_create_project_command(user)).to eq('git push --set-upstream git@localhost:john/$(git rev-parse --show-toplevel | xargs basename).git $(git rev-parse --abbrev-ref HEAD)') + end + end + describe '#any_projects?' do let!(:project) { create(:project) } diff --git a/spec/helpers/u2f_helper_spec.rb b/spec/helpers/u2f_helper_spec.rb deleted file mode 100644 index 0d65b4fe0b8..00000000000 --- a/spec/helpers/u2f_helper_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -require 'spec_helper' - -describe U2fHelper do - describe 'when not on mobile' do - it 'does not inject u2f on chrome 40' do - device = double(mobile?: false) - browser = double(chrome?: true, opera?: false, version: 40, device: device) - allow(helper).to receive(:browser).and_return(browser) - expect(helper.inject_u2f_api?).to eq false - end - - it 'injects u2f on chrome 41' do - device = double(mobile?: false) - browser = double(chrome?: true, opera?: false, version: 41, device: device) - allow(helper).to receive(:browser).and_return(browser) - expect(helper.inject_u2f_api?).to eq true - end - - it 'does not inject u2f on opera 39' do - device = double(mobile?: false) - browser = double(chrome?: false, opera?: true, version: 39, device: device) - allow(helper).to receive(:browser).and_return(browser) - expect(helper.inject_u2f_api?).to eq false - end - - it 'injects u2f on opera 40' do - device = double(mobile?: false) - browser = double(chrome?: false, opera?: true, version: 40, device: device) - allow(helper).to receive(:browser).and_return(browser) - expect(helper.inject_u2f_api?).to eq true - end - end - - describe 'when on mobile' do - it 'does not inject u2f on chrome 41' do - device = double(mobile?: true) - browser = double(chrome?: true, opera?: false, version: 41, device: device) - allow(helper).to receive(:browser).and_return(browser) - expect(helper.inject_u2f_api?).to eq false - end - - it 'does not inject u2f on opera 40' do - device = double(mobile?: true) - browser = double(chrome?: false, opera?: true, version: 40, device: device) - allow(helper).to receive(:browser).and_return(browser) - expect(helper.inject_u2f_api?).to eq false - end - end -end diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb index 03f78de8e91..6332217b920 100644 --- a/spec/helpers/users_helper_spec.rb +++ b/spec/helpers/users_helper_spec.rb @@ -14,4 +14,17 @@ describe UsersHelper do is_expected.to include("title=\"#{user.email}\"") end end + + describe '#profile_tabs' do + subject(:tabs) { helper.profile_tabs } + + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:can?).and_return(true) + end + + it 'includes all the expected tabs' do + expect(tabs).to include(:activity, :groups, :contributed, :projects, :snippets) + end + end end diff --git a/spec/javascripts/ajax_loading_spinner_spec.js b/spec/javascripts/ajax_loading_spinner_spec.js index c93b7cc6cac..95c2c122403 100644 --- a/spec/javascripts/ajax_loading_spinner_spec.js +++ b/spec/javascripts/ajax_loading_spinner_spec.js @@ -1,5 +1,3 @@ -import 'jquery'; -import 'jquery-ujs'; import AjaxLoadingSpinner from '~/ajax_loading_spinner'; describe('Ajax Loading Spinner', () => { diff --git a/spec/javascripts/autosave_spec.js b/spec/javascripts/autosave_spec.js index 9f9acc392c2..b568d7fa8b0 100644 --- a/spec/javascripts/autosave_spec.js +++ b/spec/javascripts/autosave_spec.js @@ -3,28 +3,24 @@ import AccessorUtilities from '~/lib/utils/accessor'; describe('Autosave', () => { let autosave; + const field = $('<textarea></textarea>'); + const key = 'key'; describe('class constructor', () => { - const key = 'key'; - const field = jasmine.createSpyObj('field', ['data', 'on']); - beforeEach(() => { spyOn(AccessorUtilities, 'isLocalStorageAccessSafe').and.returnValue(true); spyOn(Autosave.prototype, 'restore'); - - autosave = new Autosave(field, key); }); it('should set .isLocalStorageAvailable', () => { + autosave = new Autosave(field, key); + expect(AccessorUtilities.isLocalStorageAccessSafe).toHaveBeenCalled(); expect(autosave.isLocalStorageAvailable).toBe(true); }); }); describe('restore', () => { - const key = 'key'; - const field = jasmine.createSpyObj('field', ['trigger']); - beforeEach(() => { autosave = { field, @@ -49,24 +45,53 @@ describe('Autosave', () => { describe('if .isLocalStorageAvailable is `true`', () => { beforeEach(() => { autosave.isLocalStorageAvailable = true; - - Autosave.prototype.restore.call(autosave); }); it('should call .getItem', () => { + Autosave.prototype.restore.call(autosave); + expect(window.localStorage.getItem).toHaveBeenCalledWith(key); }); + + it('triggers jquery event', () => { + spyOn(autosave.field, 'trigger').and.callThrough(); + + Autosave.prototype.restore.call(autosave); + + expect( + field.trigger, + ).toHaveBeenCalled(); + }); + + it('triggers native event', (done) => { + autosave.field.get(0).addEventListener('change', () => { + done(); + }); + + Autosave.prototype.restore.call(autosave); + }); + }); + + describe('if field gets deleted from DOM', () => { + beforeEach(() => { + autosave.field = $('.not-a-real-element'); + }); + + it('does not trigger event', () => { + spyOn(field, 'trigger').and.callThrough(); + + expect( + field.trigger, + ).not.toHaveBeenCalled(); + }); }); }); describe('save', () => { - const field = jasmine.createSpyObj('field', ['val']); - beforeEach(() => { autosave = jasmine.createSpyObj('autosave', ['reset']); autosave.field = field; - - field.val.and.returnValue('value'); + field.val('value'); spyOn(window.localStorage, 'setItem'); }); @@ -97,8 +122,6 @@ describe('Autosave', () => { }); describe('reset', () => { - const key = 'key'; - beforeEach(() => { autosave = { key, diff --git a/spec/javascripts/awards_handler_spec.js b/spec/javascripts/awards_handler_spec.js index 268b5b83b73..8e4bbb90ccb 100644 --- a/spec/javascripts/awards_handler_spec.js +++ b/spec/javascripts/awards_handler_spec.js @@ -79,7 +79,7 @@ import '~/lib/utils/common_utils'; return expect($emojiMenu.length).toBe(1); }); }); - return it('should remove emoji menu when body is clicked', function(done) { + it('should remove emoji menu when body is clicked', function(done) { $('.js-add-award').eq(0).click(); return lazyAssert(done, function() { var $emojiMenu; @@ -90,6 +90,17 @@ import '~/lib/utils/common_utils'; return expect($('.js-awards-block.current').length).toBe(0); }); }); + it('should not remove emoji menu when search is clicked', function(done) { + $('.js-add-award').eq(0).click(); + return lazyAssert(done, function() { + var $emojiMenu; + $emojiMenu = $('.emoji-menu'); + $('.emoji-search').click(); + expect($emojiMenu.length).toBe(1); + expect($emojiMenu.hasClass('is-visible')).toBe(true); + return expect($('.js-awards-block.current').length).toBe(1); + }); + }); }); describe('::addAwardToEmojiBar', function() { it('should add emoji to votes block', function() { @@ -127,7 +138,7 @@ import '~/lib/utils/common_utils'; $thumbsUpEmoji = $votesBlock.find('[data-name=thumbsup]').parent(); $thumbsUpEmoji.attr('data-title', 'sam'); awardsHandler.userAuthored($thumbsUpEmoji); - return expect($thumbsUpEmoji.data("original-title")).toBe("You cannot vote on your own issue, MR and note"); + return expect($thumbsUpEmoji.data("originalTitle")).toBe("You cannot vote on your own issue, MR and note"); }); it('should restore tooltip back to initial vote list', function() { var $thumbsUpEmoji, $votesBlock; @@ -138,7 +149,7 @@ import '~/lib/utils/common_utils'; awardsHandler.userAuthored($thumbsUpEmoji); jasmine.clock().tick(2801); jasmine.clock().uninstall(); - return expect($thumbsUpEmoji.data("original-title")).toBe("sam"); + return expect($thumbsUpEmoji.data("originalTitle")).toBe("sam"); }); }); describe('::getAwardUrl', function() { @@ -183,7 +194,7 @@ import '~/lib/utils/common_utils'; $thumbsUpEmoji.attr('data-title', 'sam, jerry, max, and andy'); awardsHandler.addAward($votesBlock, awardUrl, 'thumbsup', false); $thumbsUpEmoji.tooltip(); - return expect($thumbsUpEmoji.data("original-title")).toBe('You, sam, jerry, max, and andy'); + return expect($thumbsUpEmoji.data("originalTitle")).toBe('You, sam, jerry, max, and andy'); }); return it('handles the special case where "You" is not cleanly comma seperated', function() { var $thumbsUpEmoji, $votesBlock, awardUrl; @@ -193,7 +204,7 @@ import '~/lib/utils/common_utils'; $thumbsUpEmoji.attr('data-title', 'sam'); awardsHandler.addAward($votesBlock, awardUrl, 'thumbsup', false); $thumbsUpEmoji.tooltip(); - return expect($thumbsUpEmoji.data("original-title")).toBe('You and sam'); + return expect($thumbsUpEmoji.data("originalTitle")).toBe('You and sam'); }); }); describe('::removeYouToUserList', function() { @@ -206,7 +217,7 @@ import '~/lib/utils/common_utils'; $thumbsUpEmoji.addClass('active'); awardsHandler.addAward($votesBlock, awardUrl, 'thumbsup', false); $thumbsUpEmoji.tooltip(); - return expect($thumbsUpEmoji.data("original-title")).toBe('sam, jerry, max, and andy'); + return expect($thumbsUpEmoji.data("originalTitle")).toBe('sam, jerry, max, and andy'); }); return it('handles the special case where "You" is not cleanly comma seperated', function() { var $thumbsUpEmoji, $votesBlock, awardUrl; @@ -217,7 +228,7 @@ import '~/lib/utils/common_utils'; $thumbsUpEmoji.addClass('active'); awardsHandler.addAward($votesBlock, awardUrl, 'thumbsup', false); $thumbsUpEmoji.tooltip(); - return expect($thumbsUpEmoji.data("original-title")).toBe('sam'); + return expect($thumbsUpEmoji.data("originalTitle")).toBe('sam'); }); }); describe('::searchEmojis', () => { diff --git a/spec/javascripts/behaviors/requires_input_spec.js b/spec/javascripts/behaviors/requires_input_spec.js index 8287c58ac5a..e500bbe750f 100644 --- a/spec/javascripts/behaviors/requires_input_spec.js +++ b/spec/javascripts/behaviors/requires_input_spec.js @@ -15,7 +15,7 @@ describe('requiresInput', () => { }); it('enables submit when no field is required', () => { - $('*[required=required]').removeAttr('required'); + $('*[required=required]').prop('required', false); $('.js-requires-input').requiresInput(); expect(submitButton).not.toBeDisabled(); }); diff --git a/spec/javascripts/boards/board_new_issue_spec.js b/spec/javascripts/boards/board_new_issue_spec.js index e204985f039..d5fbfdeaa91 100644 --- a/spec/javascripts/boards/board_new_issue_spec.js +++ b/spec/javascripts/boards/board_new_issue_spec.js @@ -4,7 +4,7 @@ import Vue from 'vue'; import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; -import boardNewIssue from '~/boards/components/board_new_issue'; +import boardNewIssue from '~/boards/components/board_new_issue.vue'; import '~/boards/models/list'; import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data'; diff --git a/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js b/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js index 5b9cdceee71..ee457a9c48c 100644 --- a/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js +++ b/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js @@ -1,8 +1,10 @@ +import $ from 'jquery'; import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; import AjaxFormVariableList from '~/ci_variable_list/ajax_variable_list'; const VARIABLE_PATCH_ENDPOINT = 'http://test.host/frontend-fixtures/builds-project/variables'; +const HIDE_CLASS = 'hide'; describe('AjaxFormVariableList', () => { preloadFixtures('projects/ci_cd_settings.html.raw'); @@ -45,16 +47,16 @@ describe('AjaxFormVariableList', () => { const loadingIcon = saveButton.querySelector('.js-secret-variables-save-loading-icon'); mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(() => { - expect(loadingIcon.classList.contains('hide')).toEqual(false); + expect(loadingIcon.classList.contains(HIDE_CLASS)).toEqual(false); return [200, {}]; }); - expect(loadingIcon.classList.contains('hide')).toEqual(true); + expect(loadingIcon.classList.contains(HIDE_CLASS)).toEqual(true); ajaxVariableList.onSaveClicked() .then(() => { - expect(loadingIcon.classList.contains('hide')).toEqual(true); + expect(loadingIcon.classList.contains(HIDE_CLASS)).toEqual(true); }) .then(done) .catch(done.fail); @@ -78,11 +80,11 @@ describe('AjaxFormVariableList', () => { it('hides any previous error box', (done) => { mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(200); - expect(errorBox.classList.contains('hide')).toEqual(true); + expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true); ajaxVariableList.onSaveClicked() .then(() => { - expect(errorBox.classList.contains('hide')).toEqual(true); + expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true); }) .then(done) .catch(done.fail); @@ -103,17 +105,39 @@ describe('AjaxFormVariableList', () => { .catch(done.fail); }); + it('hides secret values', (done) => { + mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(200, {}); + + const row = container.querySelector('.js-row:first-child'); + const valueInput = row.querySelector('.js-ci-variable-input-value'); + const valuePlaceholder = row.querySelector('.js-secret-value-placeholder'); + + valueInput.value = 'bar'; + $(valueInput).trigger('input'); + + expect(valuePlaceholder.classList.contains(HIDE_CLASS)).toBe(true); + expect(valueInput.classList.contains(HIDE_CLASS)).toBe(false); + + ajaxVariableList.onSaveClicked() + .then(() => { + expect(valuePlaceholder.classList.contains(HIDE_CLASS)).toBe(false); + expect(valueInput.classList.contains(HIDE_CLASS)).toBe(true); + }) + .then(done) + .catch(done.fail); + }); + it('shows error box with validation errors', (done) => { const validationError = 'some validation error'; mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(400, [ validationError, ]); - expect(errorBox.classList.contains('hide')).toEqual(true); + expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true); ajaxVariableList.onSaveClicked() .then(() => { - expect(errorBox.classList.contains('hide')).toEqual(false); + expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(false); expect(errorBox.textContent.trim().replace(/\n+\s+/m, ' ')).toEqual(`Validation failed ${validationError}`); }) .then(done) @@ -123,11 +147,11 @@ describe('AjaxFormVariableList', () => { it('shows flash message when request fails', (done) => { mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(500); - expect(errorBox.classList.contains('hide')).toEqual(true); + expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true); ajaxVariableList.onSaveClicked() .then(() => { - expect(errorBox.classList.contains('hide')).toEqual(true); + expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true); }) .then(done) .catch(done.fail); @@ -170,9 +194,9 @@ describe('AjaxFormVariableList', () => { const valueInput = row.querySelector('.js-ci-variable-input-value'); keyInput.value = 'foo'; - keyInput.dispatchEvent(new Event('input')); + $(keyInput).trigger('input'); valueInput.value = 'bar'; - valueInput.dispatchEvent(new Event('input')); + $(valueInput).trigger('input'); expect(idInput.value).toEqual(''); diff --git a/spec/javascripts/ci_variable_list/ci_variable_list_spec.js b/spec/javascripts/ci_variable_list/ci_variable_list_spec.js index 6ab7b50e035..270f925e699 100644 --- a/spec/javascripts/ci_variable_list/ci_variable_list_spec.js +++ b/spec/javascripts/ci_variable_list/ci_variable_list_spec.js @@ -1,5 +1,7 @@ import VariableList from '~/ci_variable_list/ci_variable_list'; -import getSetTimeoutPromise from '../helpers/set_timeout_promise_helper'; +import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper'; + +const HIDE_CLASS = 'hide'; describe('VariableList', () => { preloadFixtures('pipeline_schedules/edit.html.raw'); @@ -92,14 +94,14 @@ describe('VariableList', () => { const $inputValue = $row.find('.js-ci-variable-input-value'); const $placeholder = $row.find('.js-secret-value-placeholder'); - expect($placeholder.hasClass('hide')).toBe(false); - expect($inputValue.hasClass('hide')).toBe(true); + expect($placeholder.hasClass(HIDE_CLASS)).toBe(false); + expect($inputValue.hasClass(HIDE_CLASS)).toBe(true); // Reveal values $wrapper.find('.js-secret-value-reveal-button').click(); - expect($placeholder.hasClass('hide')).toBe(true); - expect($inputValue.hasClass('hide')).toBe(false); + expect($placeholder.hasClass(HIDE_CLASS)).toBe(true); + expect($inputValue.hasClass(HIDE_CLASS)).toBe(false); }); }); }); @@ -126,7 +128,7 @@ describe('VariableList', () => { // Check for the correct default in the new row const $protectedInput = $wrapper.find('.js-row:last-child').find('.js-ci-variable-input-protected'); - expect($protectedInput.val()).toBe('true'); + expect($protectedInput.val()).toBe('false'); }) .then(done) .catch(done.fail); @@ -179,4 +181,35 @@ describe('VariableList', () => { expect($wrapper.find('.js-ci-variable-input-key:not([disabled])').length).toBe(3); }); }); + + describe('hideValues', () => { + beforeEach(() => { + loadFixtures('projects/ci_cd_settings.html.raw'); + $wrapper = $('.js-ci-variable-list-section'); + + variableList = new VariableList({ + container: $wrapper, + formField: 'variables', + }); + variableList.init(); + }); + + it('should hide value input and show placeholder stars', () => { + const $row = $wrapper.find('.js-row'); + const $inputValue = $row.find('.js-ci-variable-input-value'); + const $placeholder = $row.find('.js-secret-value-placeholder'); + + $row.find('.js-ci-variable-input-value') + .val('foo') + .trigger('input'); + + expect($placeholder.hasClass(HIDE_CLASS)).toBe(true); + expect($inputValue.hasClass(HIDE_CLASS)).toBe(false); + + variableList.hideValues(); + + expect($placeholder.hasClass(HIDE_CLASS)).toBe(false); + expect($inputValue.hasClass(HIDE_CLASS)).toBe(true); + }); + }); }); diff --git a/spec/javascripts/clusters/clusters_bundle_spec.js b/spec/javascripts/clusters/clusters_bundle_spec.js index a9e244e523d..a5cd247b689 100644 --- a/spec/javascripts/clusters/clusters_bundle_spec.js +++ b/spec/javascripts/clusters/clusters_bundle_spec.js @@ -7,7 +7,7 @@ import { REQUEST_SUCCESS, REQUEST_FAILURE, } from '~/clusters/constants'; -import getSetTimeoutPromise from '../helpers/set_timeout_promise_helper'; +import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper'; describe('Clusters', () => { let cluster; diff --git a/spec/javascripts/clusters/components/application_row_spec.js b/spec/javascripts/clusters/components/application_row_spec.js index e671c18e1a5..2c4707bb856 100644 --- a/spec/javascripts/clusters/components/application_row_spec.js +++ b/spec/javascripts/clusters/components/application_row_spec.js @@ -12,7 +12,7 @@ import { REQUEST_FAILURE, } from '~/clusters/constants'; import applicationRow from '~/clusters/components/application_row.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { DEFAULT_APPLICATION_STATE } from '../services/mock_data'; describe('Application Row', () => { diff --git a/spec/javascripts/clusters/components/applications_spec.js b/spec/javascripts/clusters/components/applications_spec.js index 1a8affad4e3..d546543d273 100644 --- a/spec/javascripts/clusters/components/applications_spec.js +++ b/spec/javascripts/clusters/components/applications_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import applications from '~/clusters/components/applications.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Applications', () => { let vm; @@ -38,10 +38,75 @@ describe('Applications', () => { expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).toBeDefined(); }); - /* * / it('renders a row for GitLab Runner', () => { expect(vm.$el.querySelector('.js-cluster-application-row-runner')).toBeDefined(); }); - /* */ + }); + + describe('Ingress application', () => { + describe('when installed', () => { + describe('with ip address', () => { + it('renders ip address with a clipboard button', () => { + vm = mountComponent(Applications, { + applications: { + ingress: { + title: 'Ingress', + status: 'installed', + externalIp: '0.0.0.0', + }, + helm: { title: 'Helm Tiller' }, + runner: { title: 'GitLab Runner' }, + prometheus: { title: 'Prometheus' }, + }, + }); + + expect( + vm.$el.querySelector('.js-ip-address').value, + ).toEqual('0.0.0.0'); + + expect( + vm.$el.querySelector('.js-clipboard-btn').getAttribute('data-clipboard-text'), + ).toEqual('0.0.0.0'); + }); + }); + + describe('without ip address', () => { + it('renders an input text with a question mark and an alert text', () => { + vm = mountComponent(Applications, { + applications: { + ingress: { + title: 'Ingress', + status: 'installed', + }, + helm: { title: 'Helm Tiller' }, + runner: { title: 'GitLab Runner' }, + prometheus: { title: 'Prometheus' }, + }, + }); + + expect( + vm.$el.querySelector('.js-ip-address').value, + ).toEqual('?'); + + expect(vm.$el.querySelector('.js-no-ip-message')).not.toBe(null); + }); + }); + }); + + describe('before installing', () => { + it('does not render the IP address', () => { + vm = mountComponent(Applications, { + applications: { + helm: { title: 'Helm Tiller' }, + ingress: { title: 'Ingress' }, + runner: { title: 'GitLab Runner' }, + prometheus: { title: 'Prometheus' }, + }, + }); + + expect(vm.$el.textContent).not.toContain('Ingress IP Address'); + expect(vm.$el.querySelector('.js-ip-address')).toBe(null); + }); + }); }); }); diff --git a/spec/javascripts/clusters/services/mock_data.js b/spec/javascripts/clusters/services/mock_data.js index 253b3c45243..6ae7a792329 100644 --- a/spec/javascripts/clusters/services/mock_data.js +++ b/spec/javascripts/clusters/services/mock_data.js @@ -18,6 +18,7 @@ const CLUSTERS_MOCK_DATA = { name: 'ingress', status: APPLICATION_ERROR, status_reason: 'Cannot connect', + external_ip: null, }, { name: 'runner', status: APPLICATION_INSTALLING, diff --git a/spec/javascripts/clusters/stores/clusters_store_spec.js b/spec/javascripts/clusters/stores/clusters_store_spec.js index 726a4ed30de..8028faf2f02 100644 --- a/spec/javascripts/clusters/stores/clusters_store_spec.js +++ b/spec/javascripts/clusters/stores/clusters_store_spec.js @@ -75,6 +75,7 @@ describe('Clusters Store', () => { statusReason: mockResponseData.applications[1].status_reason, requestStatus: null, requestReason: null, + externalIp: null, }, runner: { title: 'GitLab Runner', diff --git a/spec/javascripts/commit/commit_pipeline_status_component_spec.js b/spec/javascripts/commit/commit_pipeline_status_component_spec.js index 90f290e845e..421fe62a1e7 100644 --- a/spec/javascripts/commit/commit_pipeline_status_component_spec.js +++ b/spec/javascripts/commit/commit_pipeline_status_component_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Commit pipeline status component', () => { let vm; diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js index 44ec9e4eabf..1daccc8dd02 100644 --- a/spec/javascripts/commits_spec.js +++ b/spec/javascripts/commits_spec.js @@ -4,6 +4,8 @@ import axios from '~/lib/utils/axios_utils'; import CommitsList from '~/commits'; describe('Commits List', () => { + let commitsList; + beforeEach(() => { setFixtures(` <form class="commits-search-form" action="/h5bp/html5-boilerplate/commits/master"> @@ -11,6 +13,7 @@ describe('Commits List', () => { </form> <ol id="commits-list"></ol> `); + commitsList = new CommitsList(25); }); it('should be defined', () => { @@ -19,7 +22,7 @@ describe('Commits List', () => { describe('processCommits', () => { it('should join commit headers', () => { - CommitsList.$contentList = $(` + commitsList.$contentList = $(` <div> <li class="commit-header" data-day="2016-09-20"> <span class="day">20 Sep, 2016</span> @@ -39,7 +42,7 @@ describe('Commits List', () => { // The last commit header should be removed // since the previous one has the same data-day value. - expect(CommitsList.processCommits(data).find('li.commit-header').length).toBe(0); + expect(commitsList.processCommits(data).find('li.commit-header').length).toBe(0); }); }); @@ -48,8 +51,7 @@ describe('Commits List', () => { let mock; beforeEach(() => { - CommitsList.init(25); - CommitsList.searchField.val(''); + commitsList.searchField.val(''); spyOn(history, 'replaceState').and.stub(); mock = new MockAdapter(axios); @@ -66,11 +68,11 @@ describe('Commits List', () => { }); it('should save the last search string', (done) => { - CommitsList.searchField.val('GitLab'); - CommitsList.filterResults() + commitsList.searchField.val('GitLab'); + commitsList.filterResults() .then(() => { expect(ajaxSpy).toHaveBeenCalled(); - expect(CommitsList.lastSearch).toEqual('GitLab'); + expect(commitsList.lastSearch).toEqual('GitLab'); done(); }) @@ -78,10 +80,10 @@ describe('Commits List', () => { }); it('should not make ajax call if the input does not change', (done) => { - CommitsList.filterResults() + commitsList.filterResults() .then(() => { expect(ajaxSpy).not.toHaveBeenCalled(); - expect(CommitsList.lastSearch).toEqual(''); + expect(commitsList.lastSearch).toEqual(''); done(); }) diff --git a/spec/javascripts/cycle_analytics/banner_spec.js b/spec/javascripts/cycle_analytics/banner_spec.js index 64a76a6ee5f..2815bdba0c2 100644 --- a/spec/javascripts/cycle_analytics/banner_spec.js +++ b/spec/javascripts/cycle_analytics/banner_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import banner from '~/cycle_analytics/components/banner.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Cycle analytics banner', () => { let vm; diff --git a/spec/javascripts/cycle_analytics/total_time_component_spec.js b/spec/javascripts/cycle_analytics/total_time_component_spec.js index ad0fc38a856..691e03cb8a6 100644 --- a/spec/javascripts/cycle_analytics/total_time_component_spec.js +++ b/spec/javascripts/cycle_analytics/total_time_component_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import component from '~/cycle_analytics/components/total_time_component.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Total time component', () => { let vm; diff --git a/spec/javascripts/datetime_utility_spec.js b/spec/javascripts/datetime_utility_spec.js index 2e5b65f5610..a8d09202154 100644 --- a/spec/javascripts/datetime_utility_spec.js +++ b/spec/javascripts/datetime_utility_spec.js @@ -105,4 +105,68 @@ describe('dateInWords', () => { it('should return abbreviated month name', () => { expect(datetimeUtility.dateInWords(date, true)).toEqual('Jul 1, 2016'); }); + + it('should return date in words without year', () => { + expect(datetimeUtility.dateInWords(date, true, true)).toEqual('Jul 1'); + }); +}); + +describe('monthInWords', () => { + const date = new Date('2017-01-20'); + + it('returns month name from provided date', () => { + expect(datetimeUtility.monthInWords(date)).toBe('January'); + }); + + it('returns abbreviated month name from provided date', () => { + expect(datetimeUtility.monthInWords(date, true)).toBe('Jan'); + }); +}); + +describe('totalDaysInMonth', () => { + it('returns number of days in a month for given date', () => { + // 1st Feb, 2016 (leap year) + expect(datetimeUtility.totalDaysInMonth(new Date(2016, 1, 1))).toBe(29); + + // 1st Feb, 2017 + expect(datetimeUtility.totalDaysInMonth(new Date(2017, 1, 1))).toBe(28); + + // 1st Jan, 2017 + expect(datetimeUtility.totalDaysInMonth(new Date(2017, 0, 1))).toBe(31); + }); +}); + +describe('getSundays', () => { + it('returns array of dates representing all Sundays of the month', () => { + // December, 2017 (it has 5 Sundays) + const dateOfSundays = [3, 10, 17, 24, 31]; + const sundays = datetimeUtility.getSundays(new Date(2017, 11, 1)); + + expect(sundays.length).toBe(5); + sundays.forEach((sunday, index) => { + expect(sunday.getDate()).toBe(dateOfSundays[index]); + }); + }); +}); + +describe('getTimeframeWindow', () => { + it('returns array of dates representing a timeframe based on provided length and date', () => { + const date = new Date(2018, 0, 1); + const mockTimeframe = [ + new Date(2017, 9, 1), + new Date(2017, 10, 1), + new Date(2017, 11, 1), + new Date(2018, 0, 1), + new Date(2018, 1, 1), + new Date(2018, 2, 31), + ]; + const timeframe = datetimeUtility.getTimeframeWindow(6, date); + + expect(timeframe.length).toBe(6); + timeframe.forEach((timeframeItem, index) => { + expect(timeframeItem.getFullYear() === mockTimeframe[index].getFullYear()).toBeTruthy(); + expect(timeframeItem.getMonth() === mockTimeframe[index].getMonth()).toBeTruthy(); + expect(timeframeItem.getDate() === mockTimeframe[index].getDate()).toBeTruthy(); + }); + }); }); diff --git a/spec/javascripts/droplab/drop_down_spec.js b/spec/javascripts/droplab/drop_down_spec.js index 1225fe2cb66..896a04a1a07 100644 --- a/spec/javascripts/droplab/drop_down_spec.js +++ b/spec/javascripts/droplab/drop_down_spec.js @@ -1,8 +1,8 @@ import DropDown from '~/droplab/drop_down'; import utils from '~/droplab/utils'; -import { SELECTED_CLASS, IGNORE_CLASS } from '~/droplab/constants'; +import { SELECTED_CLASS } from '~/droplab/constants'; -describe('DropDown', function () { +describe('DropLab DropDown', function () { describe('class constructor', function () { beforeEach(function () { spyOn(DropDown.prototype, 'getItems'); @@ -128,93 +128,131 @@ describe('DropDown', function () { beforeEach(function () { this.classList = jasmine.createSpyObj('classList', ['contains']); this.list = { dispatchEvent: () => {} }; - this.dropdown = { hide: () => {}, list: this.list, addSelectedClass: () => {} }; - this.event = { preventDefault: () => {}, target: { classList: this.classList } }; + this.dropdown = { + hideOnClick: true, + hide: () => {}, + list: this.list, + addSelectedClass: () => {}, + }; + this.event = { + preventDefault: () => {}, + target: { + classList: this.classList, + closest: () => null, + }, + }; this.customEvent = {}; - this.closestElement = {}; + this.dummyListItem = document.createElement('li'); + spyOn(this.event.target, 'closest').and.callFake((selector) => { + if (selector === 'li') { + return this.dummyListItem; + } + + return null; + }); spyOn(this.dropdown, 'hide'); spyOn(this.dropdown, 'addSelectedClass'); spyOn(this.list, 'dispatchEvent'); spyOn(this.event, 'preventDefault'); spyOn(window, 'CustomEvent').and.returnValue(this.customEvent); - spyOn(utils, 'closest').and.returnValues(this.closestElement, undefined); this.classList.contains.and.returnValue(false); + }); + it('should call event.target.closest', function () { DropDown.prototype.clickEvent.call(this.dropdown, this.event); - }); - it('should call utils.closest', function () { - expect(utils.closest).toHaveBeenCalledWith(this.event.target, 'LI'); + expect(this.event.target.closest).toHaveBeenCalledWith('.droplab-item-ignore'); + expect(this.event.target.closest).toHaveBeenCalledWith('li'); }); it('should call addSelectedClass', function () { - expect(this.dropdown.addSelectedClass).toHaveBeenCalledWith(this.closestElement); + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + + expect(this.dropdown.addSelectedClass).toHaveBeenCalledWith(this.dummyListItem); }); it('should call .preventDefault', function () { + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + expect(this.event.preventDefault).toHaveBeenCalled(); }); it('should call .hide', function () { + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + expect(this.dropdown.hide).toHaveBeenCalled(); }); it('should construct CustomEvent', function () { - expect(window.CustomEvent).toHaveBeenCalledWith('click.dl', jasmine.any(Object)); - }); + DropDown.prototype.clickEvent.call(this.dropdown, this.event); - it('should call .classList.contains checking for IGNORE_CLASS', function () { - expect(this.classList.contains).toHaveBeenCalledWith(IGNORE_CLASS); + expect(window.CustomEvent).toHaveBeenCalledWith('click.dl', jasmine.any(Object)); }); it('should call .dispatchEvent with the customEvent', function () { + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + expect(this.list.dispatchEvent).toHaveBeenCalledWith(this.customEvent); }); describe('if the target is a UL element', function () { beforeEach(function () { - this.event = { preventDefault: () => {}, target: { tagName: 'UL', classList: this.classList } }; - - spyOn(this.event, 'preventDefault'); - utils.closest.calls.reset(); + this.event.target = document.createElement('ul'); - DropDown.prototype.clickEvent.call(this.dropdown, this.event); + spyOn(this.event.target, 'closest'); }); it('should return immediately', function () { - expect(utils.closest).not.toHaveBeenCalled(); + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + + expect(this.event.target.closest).not.toHaveBeenCalled(); + expect(this.dropdown.addSelectedClass).not.toHaveBeenCalled(); }); }); - describe('if the target has the IGNORE_CLASS class', function () { + describe('if the target has the droplab-item-ignore class', function () { beforeEach(function () { - this.event = { preventDefault: () => {}, target: { tagName: 'LI', classList: this.classList } }; + this.ignoredButton = document.createElement('button'); + this.ignoredButton.classList.add('droplab-item-ignore'); + this.event.target = this.ignoredButton; - spyOn(this.event, 'preventDefault'); - this.classList.contains.and.returnValue(true); - utils.closest.calls.reset(); + spyOn(this.ignoredButton, 'closest').and.callThrough(); + }); + it('does not select element', function () { DropDown.prototype.clickEvent.call(this.dropdown, this.event); - }); - it('should return immediately', function () { - expect(utils.closest).not.toHaveBeenCalled(); + expect(this.ignoredButton.closest.calls.count()).toBe(1); + expect(this.ignoredButton.closest).toHaveBeenCalledWith('.droplab-item-ignore'); + expect(this.dropdown.addSelectedClass).not.toHaveBeenCalled(); }); }); describe('if no selected element exists', function () { beforeEach(function () { this.event.preventDefault.calls.reset(); - this.clickEvent = DropDown.prototype.clickEvent.call(this.dropdown, this.event); - }); - - it('should return undefined', function () { - expect(this.clickEvent).toBe(undefined); + this.dummyListItem = null; }); it('should return before .preventDefault is called', function () { + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + expect(this.event.preventDefault).not.toHaveBeenCalled(); + expect(this.dropdown.addSelectedClass).not.toHaveBeenCalled(); + }); + }); + + describe('if hideOnClick is false', () => { + beforeEach(function () { + this.dropdown.hideOnClick = false; + this.dropdown.hide.calls.reset(); + }); + + it('should not call .hide', function () { + DropDown.prototype.clickEvent.call(this.dropdown, this.event); + + expect(this.dropdown.hide).not.toHaveBeenCalled(); }); }); }); @@ -278,20 +316,23 @@ describe('DropDown', function () { describe('addEvents', function () { beforeEach(function () { - this.list = { addEventListener: () => {} }; + this.list = { + addEventListener: () => {}, + querySelectorAll: () => [], + }; this.dropdown = { list: this.list, clickEvent: () => {}, closeDropdown: () => {}, eventWrapper: {}, }; + }); + it('should call .addEventListener', function () { spyOn(this.list, 'addEventListener'); DropDown.prototype.addEvents.call(this.dropdown); - }); - it('should call .addEventListener', function () { expect(this.list.addEventListener).toHaveBeenCalledWith('click', jasmine.any(Function)); expect(this.list.addEventListener).toHaveBeenCalledWith('keyup', jasmine.any(Function)); }); diff --git a/spec/javascripts/environments/emtpy_state_spec.js b/spec/javascripts/environments/emtpy_state_spec.js index 82de35933f5..10a19af4175 100644 --- a/spec/javascripts/environments/emtpy_state_spec.js +++ b/spec/javascripts/environments/emtpy_state_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import emptyState from '~/environments/components/empty_state.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('environments empty state', () => { let vm; diff --git a/spec/javascripts/environments/environment_table_spec.js b/spec/javascripts/environments/environment_table_spec.js index 9bd42863759..0e5e50a59a5 100644 --- a/spec/javascripts/environments/environment_table_spec.js +++ b/spec/javascripts/environments/environment_table_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import environmentTableComp from '~/environments/components/environments_table.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Environment table', () => { let Component; diff --git a/spec/javascripts/environments/environments_app_spec.js b/spec/javascripts/environments/environments_app_spec.js index a41a4e5a3f7..5bb37304372 100644 --- a/spec/javascripts/environments/environments_app_spec.js +++ b/spec/javascripts/environments/environments_app_spec.js @@ -1,9 +1,9 @@ import _ from 'underscore'; import Vue from 'vue'; import environmentsComponent from '~/environments/components/environments_app.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import { headersInterceptor } from 'spec/helpers/vue_resource_helper'; import { environment, folder } from './mock_data'; -import { headersInterceptor } from '../helpers/vue_resource_helper'; -import mountComponent from '../helpers/vue_mount_component_helper'; describe('Environment', () => { const mockData = { diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js index a085074d312..906a1116974 100644 --- a/spec/javascripts/environments/folder/environments_folder_view_spec.js +++ b/spec/javascripts/environments/folder/environments_folder_view_spec.js @@ -1,9 +1,9 @@ import _ from 'underscore'; import Vue from 'vue'; import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue'; +import { headersInterceptor } from 'spec/helpers/vue_resource_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { environmentsList } from '../mock_data'; -import { headersInterceptor } from '../../helpers/vue_resource_helper'; -import mountComponent from '../../helpers/vue_mount_component_helper'; describe('Environments Folder View', () => { let Component; diff --git a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js index 34ffc7b1016..1b1f28f3ddb 100644 --- a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js +++ b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js @@ -8,7 +8,7 @@ import { mouseenter, inserted, } from '~/feature_highlight/feature_highlight_helper'; -import getSetTimeoutPromise from '../helpers/set_timeout_promise_helper'; +import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper'; describe('feature highlight helper', () => { describe('getSelector', () => { diff --git a/spec/javascripts/feature_highlight/feature_highlight_spec.js b/spec/javascripts/feature_highlight/feature_highlight_spec.js index 6e1b0429ab7..f3f80cb3771 100644 --- a/spec/javascripts/feature_highlight/feature_highlight_spec.js +++ b/spec/javascripts/feature_highlight/feature_highlight_spec.js @@ -1,11 +1,13 @@ import * as featureHighlightHelper from '~/feature_highlight/feature_highlight_helper'; import * as featureHighlight from '~/feature_highlight/feature_highlight'; +import axios from '~/lib/utils/axios_utils'; +import MockAdapter from 'axios-mock-adapter'; describe('feature highlight', () => { beforeEach(() => { setFixtures(` <div> - <div class="js-feature-highlight" data-highlight="test" data-highlight-priority="10" disabled> + <div class="js-feature-highlight" data-highlight="test" data-highlight-priority="10" data-dismiss-endpoint="/test" disabled> Trigger </div> </div> @@ -19,13 +21,21 @@ describe('feature highlight', () => { }); describe('setupFeatureHighlightPopover', () => { + let mock; const selector = '.js-feature-highlight[data-highlight=test]'; + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet('/test').reply(200); spyOn(window, 'addEventListener'); spyOn(window, 'removeEventListener'); featureHighlight.setupFeatureHighlightPopover('test', 0); }); + afterEach(() => { + mock.restore(); + }); + it('setup popover content', () => { const $popoverContent = $('.feature-highlight-popover-content'); const outerHTML = $popoverContent.prop('outerHTML'); @@ -51,15 +61,6 @@ describe('feature highlight', () => { }, 0); }); - it('setup inserted.bs.popover', () => { - $(selector).trigger('mouseenter'); - const popoverId = $(selector).attr('aria-describedby'); - const spyEvent = spyOnEvent(`#${popoverId} .dismiss-feature-highlight`, 'click'); - - $(`#${popoverId} .dismiss-feature-highlight`).click(); - expect(spyEvent).toHaveBeenTriggered(); - }); - it('setup show.bs.popover', () => { $(selector).trigger('show.bs.popover'); expect(window.addEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function)); @@ -75,9 +76,19 @@ describe('feature highlight', () => { }); it('displays popover', () => { - expect($(selector).attr('aria-describedby')).toBeFalsy(); + expect(document.querySelector(selector).getAttribute('aria-describedby')).toBeFalsy(); $(selector).trigger('mouseenter'); - expect($(selector).attr('aria-describedby')).toBeTruthy(); + expect(document.querySelector(selector).getAttribute('aria-describedby')).toBeTruthy(); + }); + + it('toggles when clicked', () => { + $(selector).trigger('mouseenter'); + const popoverId = $(selector).attr('aria-describedby'); + const toggleSpy = spyOn(featureHighlightHelper.togglePopover, 'call'); + + $(`#${popoverId} .dismiss-feature-highlight`).click(); + + expect(toggleSpy).toHaveBeenCalled(); }); }); diff --git a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js index 79447787fc9..59bd2650081 100644 --- a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js +++ b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js @@ -1,8 +1,7 @@ import Vue from 'vue'; import eventHub from '~/filtered_search/event_hub'; -import RecentSearchesDropdownContent from '~/filtered_search/components/recent_searches_dropdown_content'; - -import '~/filtered_search/filtered_search_token_keys'; +import RecentSearchesDropdownContent from '~/filtered_search/components/recent_searches_dropdown_content.vue'; +import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys'; const createComponent = (propsData) => { const Component = Vue.extend(RecentSearchesDropdownContent); @@ -19,14 +18,14 @@ const trimMarkupWhitespace = text => text.replace(/(\n|\s)+/gm, ' ').trim(); describe('RecentSearchesDropdownContent', () => { const propsDataWithoutItems = { items: [], - allowedKeys: gl.FilteredSearchTokenKeys.getKeys(), + allowedKeys: FilteredSearchTokenKeys.getKeys(), }; const propsDataWithItems = { items: [ 'foo', 'author:@root label:~foo bar', ], - allowedKeys: gl.FilteredSearchTokenKeys.getKeys(), + allowedKeys: FilteredSearchTokenKeys.getKeys(), }; let vm; diff --git a/spec/javascripts/filtered_search/dropdown_user_spec.js b/spec/javascripts/filtered_search/dropdown_user_spec.js index 02415485d19..c37a964975d 100644 --- a/spec/javascripts/filtered_search/dropdown_user_spec.js +++ b/spec/javascripts/filtered_search/dropdown_user_spec.js @@ -1,25 +1,25 @@ -import '~/filtered_search/dropdown_utils'; -import '~/filtered_search/filtered_search_tokenizer'; -import '~/filtered_search/filtered_search_dropdown'; -import '~/filtered_search/dropdown_user'; +import DropdownUtils from '~/filtered_search/dropdown_utils'; +import DropdownUser from '~/filtered_search/dropdown_user'; +import FilteredSearchTokenizer from '~/filtered_search/filtered_search_tokenizer'; +import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys'; describe('Dropdown User', () => { describe('getSearchInput', () => { let dropdownUser; beforeEach(() => { - spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); - spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); - spyOn(gl.DropdownUser.prototype, 'getGroupId').and.callFake(() => {}); - spyOn(gl.DropdownUtils, 'getSearchInput').and.callFake(() => {}); + spyOn(DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); + spyOn(DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); + spyOn(DropdownUser.prototype, 'getGroupId').and.callFake(() => {}); + spyOn(DropdownUtils, 'getSearchInput').and.callFake(() => {}); - dropdownUser = new gl.DropdownUser({ - tokenKeys: gl.FilteredSearchTokenKeys, + dropdownUser = new DropdownUser({ + tokenKeys: FilteredSearchTokenKeys, }); }); it('should not return the double quote found in value', () => { - spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.returnValue({ + spyOn(FilteredSearchTokenizer, 'processTokens').and.returnValue({ lastToken: '"johnny appleseed', }); @@ -27,7 +27,7 @@ describe('Dropdown User', () => { }); it('should not return the single quote found in value', () => { - spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.returnValue({ + spyOn(FilteredSearchTokenizer, 'processTokens').and.returnValue({ lastToken: '\'larry boy', }); @@ -37,22 +37,22 @@ describe('Dropdown User', () => { describe('config AjaxFilter\'s endpoint', () => { beforeEach(() => { - spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); - spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); - spyOn(gl.DropdownUser.prototype, 'getGroupId').and.callFake(() => {}); + spyOn(DropdownUser.prototype, 'bindEvents').and.callFake(() => {}); + spyOn(DropdownUser.prototype, 'getProjectId').and.callFake(() => {}); + spyOn(DropdownUser.prototype, 'getGroupId').and.callFake(() => {}); }); it('should return endpoint', () => { window.gon = { relative_url_root: '', }; - const dropdown = new gl.DropdownUser(); + const dropdown = new DropdownUser(); expect(dropdown.config.AjaxFilter.endpoint).toBe('/autocomplete/users.json'); }); it('should return endpoint when relative_url_root is undefined', () => { - const dropdown = new gl.DropdownUser(); + const dropdown = new DropdownUser(); expect(dropdown.config.AjaxFilter.endpoint).toBe('/autocomplete/users.json'); }); @@ -61,7 +61,7 @@ describe('Dropdown User', () => { window.gon = { relative_url_root: '/gitlab_directory', }; - const dropdown = new gl.DropdownUser(); + const dropdown = new DropdownUser(); expect(dropdown.config.AjaxFilter.endpoint).toBe('/gitlab_directory/autocomplete/users.json'); }); @@ -82,7 +82,7 @@ describe('Dropdown User', () => { loadFixtures(fixtureTemplate); authorFilterDropdownElement = document.querySelector('#js-dropdown-author'); const dummyInput = document.createElement('div'); - dropdown = new gl.DropdownUser({ + dropdown = new DropdownUser({ dropdown: authorFilterDropdownElement, input: dummyInput, }); diff --git a/spec/javascripts/filtered_search/dropdown_utils_spec.js b/spec/javascripts/filtered_search/dropdown_utils_spec.js index b1b3d43f241..3d6dec19eca 100644 --- a/spec/javascripts/filtered_search/dropdown_utils_spec.js +++ b/spec/javascripts/filtered_search/dropdown_utils_spec.js @@ -1,6 +1,6 @@ -import '~/filtered_search/dropdown_utils'; -import '~/filtered_search/filtered_search_tokenizer'; -import '~/filtered_search/filtered_search_dropdown_manager'; +import DropdownUtils from '~/filtered_search/dropdown_utils'; +import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager'; +import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys'; import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper'; describe('Dropdown Utils', () => { @@ -9,25 +9,25 @@ describe('Dropdown Utils', () => { describe('getEscapedText', () => { it('should return same word when it has no space', () => { - const escaped = gl.DropdownUtils.getEscapedText('textWithoutSpace'); + const escaped = DropdownUtils.getEscapedText('textWithoutSpace'); expect(escaped).toBe('textWithoutSpace'); }); it('should escape with double quotes', () => { - let escaped = gl.DropdownUtils.getEscapedText('text with space'); + let escaped = DropdownUtils.getEscapedText('text with space'); expect(escaped).toBe('"text with space"'); - escaped = gl.DropdownUtils.getEscapedText('won\'t fix'); + escaped = DropdownUtils.getEscapedText('won\'t fix'); expect(escaped).toBe('"won\'t fix"'); }); it('should escape with single quotes', () => { - const escaped = gl.DropdownUtils.getEscapedText('won"t fix'); + const escaped = DropdownUtils.getEscapedText('won"t fix'); expect(escaped).toBe('\'won"t fix\''); }); it('should escape with single quotes by default', () => { - const escaped = gl.DropdownUtils.getEscapedText('won"t\' fix'); + const escaped = DropdownUtils.getEscapedText('won"t\' fix'); expect(escaped).toBe('\'won"t\' fix\''); }); }); @@ -49,14 +49,14 @@ describe('Dropdown Utils', () => { it('should filter without symbol', () => { input.value = 'roo'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('@', input, item); + const updatedItem = DropdownUtils.filterWithSymbol('@', input, item); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with symbol', () => { input.value = '@roo'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('@', input, item); + const updatedItem = DropdownUtils.filterWithSymbol('@', input, item); expect(updatedItem.droplab_hidden).toBe(false); }); @@ -68,56 +68,56 @@ describe('Dropdown Utils', () => { it('should filter with double quote', () => { input.value = '"'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with double quote and symbol', () => { input.value = '~"'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with double quote and multiple words', () => { input.value = '"community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with double quote, symbol and multiple words', () => { input.value = '~"community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with single quote', () => { input.value = '\''; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with single quote and symbol', () => { input.value = '~\''; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with single quote and multiple words', () => { input.value = '\'community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); it('should filter with single quote, symbol and multiple words', () => { input.value = '~\'community con'; - const updatedItem = gl.DropdownUtils.filterWithSymbol('~', input, multipleWordItem); + const updatedItem = DropdownUtils.filterWithSymbol('~', input, multipleWordItem); expect(updatedItem.droplab_hidden).toBe(false); }); }); @@ -137,7 +137,7 @@ describe('Dropdown Utils', () => { `); input = document.getElementById('test'); - allowedKeys = gl.FilteredSearchTokenKeys.getKeys(); + allowedKeys = FilteredSearchTokenKeys.getKeys(); }); function config() { @@ -149,26 +149,26 @@ describe('Dropdown Utils', () => { it('should filter', () => { input.value = 'l'; - let updatedItem = gl.DropdownUtils.filterHint(config(), { + let updatedItem = DropdownUtils.filterHint(config(), { hint: 'label', }); expect(updatedItem.droplab_hidden).toBe(false); input.value = 'o'; - updatedItem = gl.DropdownUtils.filterHint(config(), { + updatedItem = DropdownUtils.filterHint(config(), { hint: 'label', }); expect(updatedItem.droplab_hidden).toBe(true); }); it('should return droplab_hidden false when item has no hint', () => { - const updatedItem = gl.DropdownUtils.filterHint(config(), {}, ''); + const updatedItem = DropdownUtils.filterHint(config(), {}, ''); expect(updatedItem.droplab_hidden).toBe(false); }); it('should allow multiple if item.type is array', () => { input.value = 'label:~first la'; - const updatedItem = gl.DropdownUtils.filterHint(config(), { + const updatedItem = DropdownUtils.filterHint(config(), { hint: 'label', type: 'array', }); @@ -177,12 +177,12 @@ describe('Dropdown Utils', () => { it('should prevent multiple if item.type is not array', () => { input.value = 'milestone:~first mile'; - let updatedItem = gl.DropdownUtils.filterHint(config(), { + let updatedItem = DropdownUtils.filterHint(config(), { hint: 'milestone', }); expect(updatedItem.droplab_hidden).toBe(true); - updatedItem = gl.DropdownUtils.filterHint(config(), { + updatedItem = DropdownUtils.filterHint(config(), { hint: 'milestone', type: 'string', }); @@ -204,7 +204,7 @@ describe('Dropdown Utils', () => { color: '#000000', }; - const updated = gl.DropdownUtils.mergeDuplicateLabels(dataMap, newLabel); + const updated = DropdownUtils.mergeDuplicateLabels(dataMap, newLabel); expect(updated[newLabel.title]).toEqual(newLabel); }); @@ -214,36 +214,36 @@ describe('Dropdown Utils', () => { color: '#000000', }; - const updated = gl.DropdownUtils.mergeDuplicateLabels(dataMap, duplicate); + const updated = DropdownUtils.mergeDuplicateLabels(dataMap, duplicate); expect(updated.label.multipleColors).toEqual([dataMap.label.color, duplicate.color]); }); }); describe('duplicateLabelColor', () => { it('should linear-gradient 2 colors', () => { - const gradient = gl.DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000']); + const gradient = DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000']); expect(gradient).toEqual('linear-gradient(#FFFFFF 0%, #FFFFFF 50%, #000000 50%, #000000 100%)'); }); it('should linear-gradient 3 colors', () => { - const gradient = gl.DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000', '#333333']); + const gradient = DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000', '#333333']); expect(gradient).toEqual('linear-gradient(#FFFFFF 0%, #FFFFFF 33%, #000000 33%, #000000 66%, #333333 66%, #333333 100%)'); }); it('should linear-gradient 4 colors', () => { - const gradient = gl.DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000', '#333333', '#DDDDDD']); + const gradient = DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000', '#333333', '#DDDDDD']); expect(gradient).toEqual('linear-gradient(#FFFFFF 0%, #FFFFFF 25%, #000000 25%, #000000 50%, #333333 50%, #333333 75%, #DDDDDD 75%, #DDDDDD 100%)'); }); it('should not linear-gradient more than 4 colors', () => { - const gradient = gl.DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000', '#333333', '#DDDDDD', '#EEEEEE']); + const gradient = DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000', '#333333', '#DDDDDD', '#EEEEEE']); expect(gradient.indexOf('#EEEEEE') === -1).toEqual(true); }); }); describe('duplicateLabelPreprocessing', () => { it('should set preprocessed to true', () => { - const results = gl.DropdownUtils.duplicateLabelPreprocessing([]); + const results = DropdownUtils.duplicateLabelPreprocessing([]); expect(results.preprocessed).toEqual(true); }); @@ -255,7 +255,7 @@ describe('Dropdown Utils', () => { title: 'label2', color: '#000000', }]; - const results = gl.DropdownUtils.duplicateLabelPreprocessing(data); + const results = DropdownUtils.duplicateLabelPreprocessing(data); expect(results.length).toEqual(2); expect(results[0]).toEqual(data[0]); @@ -270,14 +270,14 @@ describe('Dropdown Utils', () => { title: 'label', color: '#000000', }]; - const results = gl.DropdownUtils.duplicateLabelPreprocessing(data); + const results = DropdownUtils.duplicateLabelPreprocessing(data); it('should merge duplicate labels', () => { expect(results.length).toEqual(1); }); it('should convert multiple colored labels into linear-gradient', () => { - expect(results[0].color).toEqual(gl.DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000'])); + expect(results[0].color).toEqual(DropdownUtils.duplicateLabelColor(['#FFFFFF', '#000000'])); }); it('should set multiple colored label text color to black', () => { @@ -288,7 +288,7 @@ describe('Dropdown Utils', () => { describe('setDataValueIfSelected', () => { beforeEach(() => { - spyOn(gl.FilteredSearchDropdownManager, 'addWordToInput') + spyOn(FilteredSearchDropdownManager, 'addWordToInput') .and.callFake(() => {}); }); @@ -297,8 +297,8 @@ describe('Dropdown Utils', () => { getAttribute: () => 'value', }; - gl.DropdownUtils.setDataValueIfSelected(null, selected); - expect(gl.FilteredSearchDropdownManager.addWordToInput.calls.count()).toEqual(1); + DropdownUtils.setDataValueIfSelected(null, selected); + expect(FilteredSearchDropdownManager.addWordToInput.calls.count()).toEqual(1); }); it('returns true when dataValue exists', () => { @@ -306,7 +306,7 @@ describe('Dropdown Utils', () => { getAttribute: () => 'value', }; - const result = gl.DropdownUtils.setDataValueIfSelected(null, selected); + const result = DropdownUtils.setDataValueIfSelected(null, selected); expect(result).toBe(true); }); @@ -315,7 +315,7 @@ describe('Dropdown Utils', () => { getAttribute: () => null, }; - const result = gl.DropdownUtils.setDataValueIfSelected(null, selected); + const result = DropdownUtils.setDataValueIfSelected(null, selected); expect(result).toBe(false); }); }); @@ -325,7 +325,7 @@ describe('Dropdown Utils', () => { const value = 'label:none '; it('should return selectionStart when cursor is at the trailing space', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 11, value, }); @@ -335,7 +335,7 @@ describe('Dropdown Utils', () => { }); it('should return input when cursor is at the start of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 0, value, }); @@ -345,7 +345,7 @@ describe('Dropdown Utils', () => { }); it('should return input when cursor is at the middle of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 7, value, }); @@ -355,7 +355,7 @@ describe('Dropdown Utils', () => { }); it('should return input when cursor is at the end of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 10, value, }); @@ -369,7 +369,7 @@ describe('Dropdown Utils', () => { const value = 'label:~"Community Contribution"'; it('should return input when cursor is after the first word', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 17, value, }); @@ -379,7 +379,7 @@ describe('Dropdown Utils', () => { }); it('should return input when cursor is before the second word', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 18, value, }); @@ -393,7 +393,7 @@ describe('Dropdown Utils', () => { const value = 'label:~"Community Contribution'; it('should return entire input when cursor is at the start of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 0, value, }); @@ -403,7 +403,7 @@ describe('Dropdown Utils', () => { }); it('should return entire input when cursor is at the end of input', () => { - const { left, right } = gl.DropdownUtils.getInputSelectionPosition({ + const { left, right } = DropdownUtils.getInputSelectionPosition({ selectionStart: 30, value, }); @@ -433,7 +433,7 @@ describe('Dropdown Utils', () => { const valueContainer = authorToken.querySelector('.value-container'); valueContainer.dataset.originalValue = originalValue; - const searchQuery = gl.DropdownUtils.getSearchQuery(); + const searchQuery = DropdownUtils.getSearchQuery(); expect(searchQuery).toBe(' search term author:original dance'); }); diff --git a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js index 5c7e9115aac..71c14582329 100644 --- a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js @@ -1,6 +1,4 @@ -import '~/filtered_search/filtered_search_visual_tokens'; -import '~/filtered_search/filtered_search_tokenizer'; -import '~/filtered_search/filtered_search_dropdown_manager'; +import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager'; describe('Filtered Search Dropdown Manager', () => { beforeEach(() => { @@ -28,7 +26,7 @@ describe('Filtered Search Dropdown Manager', () => { describe('input has no existing value', () => { it('should add just tokenName', () => { - gl.FilteredSearchDropdownManager.addWordToInput('milestone'); + FilteredSearchDropdownManager.addWordToInput('milestone'); const token = document.querySelector('.tokens-container .js-visual-token'); @@ -38,7 +36,7 @@ describe('Filtered Search Dropdown Manager', () => { }); it('should add tokenName and tokenValue', () => { - gl.FilteredSearchDropdownManager.addWordToInput('label'); + FilteredSearchDropdownManager.addWordToInput('label'); let token = document.querySelector('.tokens-container .js-visual-token'); @@ -46,9 +44,9 @@ describe('Filtered Search Dropdown Manager', () => { expect(token.querySelector('.name').innerText).toBe('label'); expect(getInputValue()).toBe(''); - gl.FilteredSearchDropdownManager.addWordToInput('label', 'none'); + FilteredSearchDropdownManager.addWordToInput('label', 'none'); // We have to get that reference again - // Because gl.FilteredSearchDropdownManager deletes the previous token + // Because FilteredSearchDropdownManager deletes the previous token token = document.querySelector('.tokens-container .js-visual-token'); expect(token.classList.contains('filtered-search-token')).toEqual(true); @@ -61,7 +59,7 @@ describe('Filtered Search Dropdown Manager', () => { describe('input has existing value', () => { it('should be able to just add tokenName', () => { setInputValue('a'); - gl.FilteredSearchDropdownManager.addWordToInput('author'); + FilteredSearchDropdownManager.addWordToInput('author'); const token = document.querySelector('.tokens-container .js-visual-token'); @@ -71,10 +69,10 @@ describe('Filtered Search Dropdown Manager', () => { }); it('should replace tokenValue', () => { - gl.FilteredSearchDropdownManager.addWordToInput('author'); + FilteredSearchDropdownManager.addWordToInput('author'); setInputValue('roo'); - gl.FilteredSearchDropdownManager.addWordToInput(null, '@root'); + FilteredSearchDropdownManager.addWordToInput(null, '@root'); const token = document.querySelector('.tokens-container .js-visual-token'); @@ -85,10 +83,10 @@ describe('Filtered Search Dropdown Manager', () => { }); it('should add tokenValues containing spaces', () => { - gl.FilteredSearchDropdownManager.addWordToInput('label'); + FilteredSearchDropdownManager.addWordToInput('label'); setInputValue('"test '); - gl.FilteredSearchDropdownManager.addWordToInput('label', '~\'"test me"\''); + FilteredSearchDropdownManager.addWordToInput('label', '~\'"test me"\''); const token = document.querySelector('.tokens-container .js-visual-token'); diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js index b8890e4cda1..95d02974bdc 100644 --- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js @@ -3,17 +3,19 @@ import * as recentSearchesStoreSrc from '~/filtered_search/stores/recent_searche import RecentSearchesService from '~/filtered_search/services/recent_searches_service'; import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error'; import RecentSearchesRoot from '~/filtered_search/recent_searches_root'; +import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys'; import '~/lib/utils/common_utils'; -import '~/filtered_search/filtered_search_token_keys'; -import '~/filtered_search/filtered_search_tokenizer'; -import '~/filtered_search/filtered_search_dropdown_manager'; -import '~/filtered_search/filtered_search_manager'; +import DropdownUtils from '~/filtered_search/dropdown_utils'; +import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual_tokens'; +import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager'; +import FilteredSearchManager from '~/filtered_search/filtered_search_manager'; import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper'; describe('Filtered Search Manager', () => { let input; let manager; let tokensContainer; + const page = 'issues'; const placeholder = 'Search or filter results...'; function dispatchBackspaceEvent(element, eventType) { @@ -48,21 +50,21 @@ describe('Filtered Search Manager', () => { </div> `); - spyOn(gl.FilteredSearchDropdownManager.prototype, 'setDropdown').and.callFake(() => {}); + spyOn(FilteredSearchDropdownManager.prototype, 'setDropdown').and.callFake(() => {}); }); const initializeManager = () => { /* eslint-disable jasmine/no-unsafe-spy */ - spyOn(gl.FilteredSearchManager.prototype, 'loadSearchParamsFromURL').and.callFake(() => {}); - spyOn(gl.FilteredSearchManager.prototype, 'tokenChange').and.callFake(() => {}); - spyOn(gl.FilteredSearchDropdownManager.prototype, 'updateDropdownOffset').and.callFake(() => {}); + spyOn(FilteredSearchManager.prototype, 'loadSearchParamsFromURL').and.callFake(() => {}); + spyOn(FilteredSearchManager.prototype, 'tokenChange').and.callFake(() => {}); + spyOn(FilteredSearchDropdownManager.prototype, 'updateDropdownOffset').and.callFake(() => {}); spyOn(gl.utils, 'getParameterByName').and.returnValue(null); - spyOn(gl.FilteredSearchVisualTokens, 'unselectTokens').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'unselectTokens').and.callThrough(); /* eslint-enable jasmine/no-unsafe-spy */ input = document.querySelector('.filtered-search'); tokensContainer = document.querySelector('.tokens-container'); - manager = new gl.FilteredSearchManager(); + manager = new FilteredSearchManager({ page }); manager.setup(); }; @@ -80,19 +82,19 @@ describe('Filtered Search Manager', () => { }); it('should instantiate RecentSearchesStore with isLocalStorageAvailable', () => { - manager = new gl.FilteredSearchManager(); + manager = new FilteredSearchManager({ page }); expect(RecentSearchesService.isAvailable).toHaveBeenCalled(); expect(recentSearchesStoreSrc.default).toHaveBeenCalledWith({ isLocalStorageAvailable, - allowedKeys: gl.FilteredSearchTokenKeys.getKeys(), + allowedKeys: FilteredSearchTokenKeys.getKeys(), }); }); }); describe('setup', () => { beforeEach(() => { - manager = new gl.FilteredSearchManager(); + manager = new FilteredSearchManager({ page }); }); it('should not instantiate Flash if an RecentSearchesServiceError is caught', () => { @@ -107,7 +109,7 @@ describe('Filtered Search Manager', () => { describe('searchState', () => { beforeEach(() => { - spyOn(gl.FilteredSearchManager.prototype, 'search').and.callFake(() => {}); + spyOn(FilteredSearchManager.prototype, 'search').and.callFake(() => {}); initializeManager(); }); @@ -133,7 +135,7 @@ describe('Filtered Search Manager', () => { }; manager.searchState(e); - expect(gl.FilteredSearchManager.prototype.search).not.toHaveBeenCalled(); + expect(FilteredSearchManager.prototype.search).not.toHaveBeenCalled(); }); it('should call search when there is state', () => { @@ -148,7 +150,7 @@ describe('Filtered Search Manager', () => { }; manager.searchState(e); - expect(gl.FilteredSearchManager.prototype.search).toHaveBeenCalledWith('opened'); + expect(FilteredSearchManager.prototype.search).toHaveBeenCalledWith('opened'); }); }); @@ -250,44 +252,44 @@ describe('Filtered Search Manager', () => { }); it('removes last token', () => { - spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); dispatchBackspaceEvent(input, 'keyup'); dispatchBackspaceEvent(input, 'keyup'); - expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled(); + expect(FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled(); }); it('sets the input', () => { - spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); dispatchDeleteEvent(input, 'keyup'); dispatchDeleteEvent(input, 'keyup'); - expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).toHaveBeenCalled(); + expect(FilteredSearchVisualTokens.getLastTokenPartial).toHaveBeenCalled(); expect(input.value).toEqual('~bug'); }); }); it('does not remove token or change input when there is existing input', () => { - spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); - spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); input.value = 'text'; dispatchDeleteEvent(input, 'keyup'); - expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled(); - expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled(); + expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled(); + expect(FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled(); expect(input.value).toEqual('text'); }); it('does not remove previous token on single backspace press', () => { - spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); - spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough(); input.value = 't'; dispatchDeleteEvent(input, 'keyup'); - expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled(); - expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled(); + expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled(); + expect(FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled(); expect(input.value).toEqual('t'); }); }); @@ -308,7 +310,7 @@ describe('Filtered Search Manager', () => { describe('unselected token', () => { beforeEach(() => { - spyOn(gl.FilteredSearchManager.prototype, 'removeSelectedToken').and.callThrough(); + spyOn(FilteredSearchManager.prototype, 'removeSelectedToken').and.callThrough(); tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML( FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none'), @@ -379,16 +381,16 @@ describe('Filtered Search Manager', () => { describe('removeSelectedToken', () => { beforeEach(() => { - spyOn(gl.FilteredSearchVisualTokens, 'removeSelectedToken').and.callThrough(); - spyOn(gl.FilteredSearchManager.prototype, 'handleInputPlaceholder').and.callThrough(); - spyOn(gl.FilteredSearchManager.prototype, 'toggleClearSearchButton').and.callThrough(); + spyOn(FilteredSearchVisualTokens, 'removeSelectedToken').and.callThrough(); + spyOn(FilteredSearchManager.prototype, 'handleInputPlaceholder').and.callThrough(); + spyOn(FilteredSearchManager.prototype, 'toggleClearSearchButton').and.callThrough(); initializeManager(); }); it('calls FilteredSearchVisualTokens.removeSelectedToken', () => { manager.removeSelectedToken(); - expect(gl.FilteredSearchVisualTokens.removeSelectedToken).toHaveBeenCalled(); + expect(FilteredSearchVisualTokens.removeSelectedToken).toHaveBeenCalled(); }); it('calls handleInputPlaceholder', () => { @@ -420,12 +422,12 @@ describe('Filtered Search Manager', () => { manager.filteredSearchInput.value = inputValue; manager.filteredSearchInput.dispatchEvent(new Event('input')); - expect(gl.DropdownUtils.getSearchQuery()).toEqual(inputValue); + expect(DropdownUtils.getSearchQuery()).toEqual(inputValue); manager.clearSearchButton.click(); expect(manager.filteredSearchInput.value).toEqual(''); - expect(gl.DropdownUtils.getSearchQuery()).toEqual(''); + expect(DropdownUtils.getSearchQuery()).toEqual(''); }); }); diff --git a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js index 69b424c3af5..fbc3926d332 100644 --- a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js @@ -1,11 +1,11 @@ -import '~/filtered_search/filtered_search_token_keys'; +import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys'; describe('Filtered Search Token Keys', () => { describe('get', () => { let tokenKeys; beforeEach(() => { - tokenKeys = gl.FilteredSearchTokenKeys.get(); + tokenKeys = FilteredSearchTokenKeys.get(); }); it('should return tokenKeys', () => { @@ -21,7 +21,7 @@ describe('Filtered Search Token Keys', () => { let conditions; beforeEach(() => { - conditions = gl.FilteredSearchTokenKeys.getConditions(); + conditions = FilteredSearchTokenKeys.getConditions(); }); it('should return conditions', () => { @@ -35,71 +35,71 @@ describe('Filtered Search Token Keys', () => { describe('searchByKey', () => { it('should return null when key not found', () => { - const tokenKey = gl.FilteredSearchTokenKeys.searchByKey('notakey'); + const tokenKey = FilteredSearchTokenKeys.searchByKey('notakey'); expect(tokenKey === null).toBe(true); }); it('should return tokenKey when found by key', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.get(); - const result = gl.FilteredSearchTokenKeys.searchByKey(tokenKeys[0].key); + const tokenKeys = FilteredSearchTokenKeys.get(); + const result = FilteredSearchTokenKeys.searchByKey(tokenKeys[0].key); expect(result).toEqual(tokenKeys[0]); }); }); describe('searchBySymbol', () => { it('should return null when symbol not found', () => { - const tokenKey = gl.FilteredSearchTokenKeys.searchBySymbol('notasymbol'); + const tokenKey = FilteredSearchTokenKeys.searchBySymbol('notasymbol'); expect(tokenKey === null).toBe(true); }); it('should return tokenKey when found by symbol', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.get(); - const result = gl.FilteredSearchTokenKeys.searchBySymbol(tokenKeys[0].symbol); + const tokenKeys = FilteredSearchTokenKeys.get(); + const result = FilteredSearchTokenKeys.searchBySymbol(tokenKeys[0].symbol); expect(result).toEqual(tokenKeys[0]); }); }); describe('searchByKeyParam', () => { it('should return null when key param not found', () => { - const tokenKey = gl.FilteredSearchTokenKeys.searchByKeyParam('notakeyparam'); + const tokenKey = FilteredSearchTokenKeys.searchByKeyParam('notakeyparam'); expect(tokenKey === null).toBe(true); }); it('should return tokenKey when found by key param', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.get(); - const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); + const tokenKeys = FilteredSearchTokenKeys.get(); + const result = FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); expect(result).toEqual(tokenKeys[0]); }); it('should return alternative tokenKey when found by key param', () => { - const tokenKeys = gl.FilteredSearchTokenKeys.getAlternatives(); - const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); + const tokenKeys = FilteredSearchTokenKeys.getAlternatives(); + const result = FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`); expect(result).toEqual(tokenKeys[0]); }); }); describe('searchByConditionUrl', () => { it('should return null when condition url not found', () => { - const condition = gl.FilteredSearchTokenKeys.searchByConditionUrl(null); + const condition = FilteredSearchTokenKeys.searchByConditionUrl(null); expect(condition === null).toBe(true); }); it('should return condition when found by url', () => { - const conditions = gl.FilteredSearchTokenKeys.getConditions(); - const result = gl.FilteredSearchTokenKeys.searchByConditionUrl(conditions[0].url); + const conditions = FilteredSearchTokenKeys.getConditions(); + const result = FilteredSearchTokenKeys.searchByConditionUrl(conditions[0].url); expect(result).toBe(conditions[0]); }); }); describe('searchByConditionKeyValue', () => { it('should return null when condition tokenKey and value not found', () => { - const condition = gl.FilteredSearchTokenKeys.searchByConditionKeyValue(null, null); + const condition = FilteredSearchTokenKeys.searchByConditionKeyValue(null, null); expect(condition === null).toBe(true); }); it('should return condition when found by tokenKey and value', () => { - const conditions = gl.FilteredSearchTokenKeys.getConditions(); - const result = gl.FilteredSearchTokenKeys + const conditions = FilteredSearchTokenKeys.getConditions(); + const result = FilteredSearchTokenKeys .searchByConditionKeyValue(conditions[0].tokenKey, conditions[0].value); expect(result).toEqual(conditions[0]); }); diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js index 585bea9b499..465f5f79931 100644 --- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js @@ -1,19 +1,19 @@ -import '~/filtered_search/filtered_search_token_keys'; -import '~/filtered_search/filtered_search_tokenizer'; +import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys'; +import FilteredSearchTokenizer from '~/filtered_search/filtered_search_tokenizer'; describe('Filtered Search Tokenizer', () => { - const allowedKeys = gl.FilteredSearchTokenKeys.getKeys(); + const allowedKeys = FilteredSearchTokenKeys.getKeys(); describe('processTokens', () => { it('returns for input containing only search value', () => { - const results = gl.FilteredSearchTokenizer.processTokens('searchTerm', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('searchTerm', allowedKeys); expect(results.searchToken).toBe('searchTerm'); expect(results.tokens.length).toBe(0); expect(results.lastToken).toBe(results.searchToken); }); it('returns for input containing only tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none', allowedKeys); expect(results.searchToken).toBe(''); expect(results.tokens.length).toBe(4); @@ -37,7 +37,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input starting with search value and ending with tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('searchTerm anotherSearchTerm milestone:none', allowedKeys); expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); expect(results.tokens.length).toBe(1); @@ -48,7 +48,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input starting with tokens and ending with search value', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('assignee:@user searchTerm', allowedKeys); expect(results.searchToken).toBe('searchTerm'); @@ -60,7 +60,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input containing search value wrapped between tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none', allowedKeys); expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); @@ -81,7 +81,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input containing search value in between tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing', allowedKeys); expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); expect(results.tokens.length).toBe(3); @@ -101,14 +101,14 @@ describe('Filtered Search Tokenizer', () => { }); it('returns search value for invalid tokens', () => { - const results = gl.FilteredSearchTokenizer.processTokens('fake:token', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('fake:token', allowedKeys); expect(results.lastToken).toBe('fake:token'); expect(results.searchToken).toBe('fake:token'); expect(results.tokens.length).toEqual(0); }); it('returns search value and token for mix of valid and invalid tokens', () => { - const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('label:real fake:token', allowedKeys); expect(results.tokens.length).toEqual(1); expect(results.tokens[0].key).toBe('label'); expect(results.tokens[0].value).toBe('real'); @@ -118,13 +118,13 @@ describe('Filtered Search Tokenizer', () => { }); it('returns search value for invalid symbols', () => { - const results = gl.FilteredSearchTokenizer.processTokens('std::includes', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('std::includes', allowedKeys); expect(results.lastToken).toBe('std::includes'); expect(results.searchToken).toBe('std::includes'); }); it('removes duplicated values', () => { - const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('label:~foo label:~foo', allowedKeys); expect(results.tokens.length).toBe(1); expect(results.tokens[0].key).toBe('label'); expect(results.tokens[0].value).toBe('foo'); diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js index 0684c3498a2..f1da5f81c0f 100644 --- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js @@ -2,11 +2,12 @@ import _ from 'underscore'; import AjaxCache from '~/lib/utils/ajax_cache'; import UsersCache from '~/lib/utils/users_cache'; -import '~/filtered_search/filtered_search_visual_tokens'; +import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual_tokens'; +import DropdownUtils from '~/filtered_search//dropdown_utils'; import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper'; describe('Filtered Search Visual Tokens', () => { - const subject = gl.FilteredSearchVisualTokens; + const subject = FilteredSearchVisualTokens; const findElements = (tokenElement) => { const tokenNameElement = tokenElement.querySelector('.name'); @@ -860,25 +861,25 @@ describe('Filtered Search Visual Tokens', () => { it('does not preprocess more than once', () => { let labels = []; - spyOn(gl.DropdownUtils, 'duplicateLabelPreprocessing').and.callFake(() => []); + spyOn(DropdownUtils, 'duplicateLabelPreprocessing').and.callFake(() => []); - labels = gl.FilteredSearchVisualTokens.preprocessLabel(endpoint, labels); - gl.FilteredSearchVisualTokens.preprocessLabel(endpoint, labels); + labels = FilteredSearchVisualTokens.preprocessLabel(endpoint, labels); + FilteredSearchVisualTokens.preprocessLabel(endpoint, labels); - expect(gl.DropdownUtils.duplicateLabelPreprocessing.calls.count()).toEqual(1); + expect(DropdownUtils.duplicateLabelPreprocessing.calls.count()).toEqual(1); }); describe('not preprocessed before', () => { it('returns preprocessed labels', () => { let labels = []; expect(labels.preprocessed).not.toEqual(true); - labels = gl.FilteredSearchVisualTokens.preprocessLabel(endpoint, labels); + labels = FilteredSearchVisualTokens.preprocessLabel(endpoint, labels); expect(labels.preprocessed).toEqual(true); }); it('overrides AjaxCache with preprocessed results', () => { spyOn(AjaxCache, 'override').and.callFake(() => {}); - gl.FilteredSearchVisualTokens.preprocessLabel(endpoint, []); + FilteredSearchVisualTokens.preprocessLabel(endpoint, []); expect(AjaxCache.override.calls.count()).toEqual(1); }); }); @@ -926,7 +927,7 @@ describe('Filtered Search Visual Tokens', () => { }; const findLabel = tokenValue => labelData.find( - label => tokenValue === `~${gl.DropdownUtils.getEscapedText(label.title)}`, + label => tokenValue === `~${DropdownUtils.getEscapedText(label.title)}`, ); it('updates the color of a label token', (done) => { diff --git a/spec/javascripts/fixtures/merge_requests.rb b/spec/javascripts/fixtures/merge_requests.rb index 3fd16d76f51..ee60489eb7c 100644 --- a/spec/javascripts/fixtures/merge_requests.rb +++ b/spec/javascripts/fixtures/merge_requests.rb @@ -70,8 +70,50 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont render_merge_request(example.description, merge_request) end + it 'merge_requests/discussions.json' do |example| + create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request) + render_discussions_json(merge_request, example.description) + end + + it 'merge_requests/diff_discussion.json' do |example| + create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request) + render_discussions_json(merge_request, example.description) + end + + context 'with image diff' do + let(:merge_request2) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project, title: "Added images") } + let(:image_path) { "files/images/ee_repo_logo.png" } + let(:image_position) do + Gitlab::Diff::Position.new( + old_path: image_path, + new_path: image_path, + width: 100, + height: 100, + x: 1, + y: 1, + position_type: "image", + diff_refs: merge_request2.diff_refs + ) + end + + it 'merge_requests/image_diff_discussion.json' do |example| + create(:diff_note_on_merge_request, project: project, noteable: merge_request2, position: image_position) + render_discussions_json(merge_request2, example.description) + end + end + private + def render_discussions_json(merge_request, fixture_file_name) + get :discussions, + namespace_id: project.namespace.to_param, + project_id: project, + id: merge_request.to_param, + format: :json + + store_frontend_fixture(response, fixture_file_name) + end + def render_merge_request(fixture_file_name, merge_request) get :show, namespace_id: project.namespace.to_param, diff --git a/spec/javascripts/gl_dropdown_spec.js b/spec/javascripts/gl_dropdown_spec.js index b13d1bf8dff..67b854f61c0 100644 --- a/spec/javascripts/gl_dropdown_spec.js +++ b/spec/javascripts/gl_dropdown_spec.js @@ -64,8 +64,8 @@ describe('glDropdown', function describeDropdown() { }); afterEach(() => { - $('body').unbind('keydown'); - this.dropdownContainerElement.unbind('keyup'); + $('body').off('keydown'); + this.dropdownContainerElement.off('keyup'); }); it('should open on click', () => { diff --git a/spec/javascripts/gl_form_spec.js b/spec/javascripts/gl_form_spec.js index 5a8009e57fd..9c1fc0fda9e 100644 --- a/spec/javascripts/gl_form_spec.js +++ b/spec/javascripts/gl_form_spec.js @@ -1,10 +1,8 @@ -import Autosize from 'autosize'; +import autosize from 'autosize'; import GLForm from '~/gl_form'; import '~/lib/utils/text_utility'; import '~/lib/utils/common_utils'; -window.autosize = Autosize; - describe('GLForm', () => { describe('when instantiated', function () { beforeEach((done) => { @@ -13,14 +11,12 @@ describe('GLForm', () => { spyOn($.prototype, 'off').and.returnValue(this.textarea); spyOn($.prototype, 'on').and.returnValue(this.textarea); spyOn($.prototype, 'css'); - spyOn(window, 'autosize'); - this.glForm = new GLForm(this.form); + this.glForm = new GLForm(this.form, false); setTimeout(() => { $.prototype.off.calls.reset(); $.prototype.on.calls.reset(); $.prototype.css.calls.reset(); - window.autosize.calls.reset(); done(); }); }); @@ -43,10 +39,6 @@ describe('GLForm', () => { expect($.prototype.on).toHaveBeenCalledWith('mouseup.autosize', jasmine.any(Function)); }); - it('should autosize the textarea', () => { - expect(window.autosize).toHaveBeenCalledWith(jasmine.any(Object)); - }); - it('should set the resize css property to vertical', () => { expect($.prototype.css).toHaveBeenCalledWith('resize', 'vertical'); }); @@ -74,7 +66,7 @@ describe('GLForm', () => { spyOn($.prototype, 'data'); spyOn($.prototype, 'outerHeight').and.returnValue(200); spyOn(window, 'outerHeight').and.returnValue(400); - spyOn(window.autosize, 'destroy'); + spyOn(autosize, 'destroy'); this.glForm.destroyAutosize(); }); @@ -88,7 +80,7 @@ describe('GLForm', () => { }); it('should call autosize destroy', () => { - expect(window.autosize.destroy).toHaveBeenCalledWith(this.textarea); + expect(autosize.destroy).toHaveBeenCalledWith(this.textarea); }); it('should set the data-height attribute', () => { @@ -107,9 +99,9 @@ describe('GLForm', () => { it('should return undefined if the data-height equals the outerHeight', () => { spyOn($.prototype, 'outerHeight').and.returnValue(200); spyOn($.prototype, 'data').and.returnValue(200); - spyOn(window.autosize, 'destroy'); + spyOn(autosize, 'destroy'); expect(this.glForm.destroyAutosize()).toBeUndefined(); - expect(window.autosize.destroy).not.toHaveBeenCalled(); + expect(autosize.destroy).not.toHaveBeenCalled(); }); }); }); diff --git a/spec/javascripts/gpg_badges_spec.js b/spec/javascripts/gpg_badges_spec.js index 7a826487bf9..5decb5e6bbd 100644 --- a/spec/javascripts/gpg_badges_spec.js +++ b/spec/javascripts/gpg_badges_spec.js @@ -1,6 +1,9 @@ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import GpgBadges from '~/gpg_badges'; describe('GpgBadges', () => { + let mock; const dummyCommitSha = 'n0m0rec0ffee'; const dummyBadgeHtml = 'dummy html'; const dummyResponse = { @@ -11,38 +14,43 @@ describe('GpgBadges', () => { }; beforeEach(() => { + mock = new MockAdapter(axios); setFixtures(` + <form + class="commits-search-form" data-signatures-path="/hello" action="/hello" + method="get"> + <input name="utf8" type="hidden" value="✓"> + <input type="search" name="search" id="commits-search"class="form-control search-text-input input-short"> + </form> <div class="parent-container"> <div class="js-loading-gpg-badge" data-commit-sha="${dummyCommitSha}"></div> </div> `); }); - it('displays a loading spinner', () => { - spyOn($, 'get').and.returnValue({ - done() { - // intentionally left blank - }, - }); + afterEach(() => { + mock.restore(); + }); - GpgBadges.fetch(); + it('displays a loading spinner', (done) => { + mock.onGet('/hello').reply(200); - expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null); - const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin'); - expect(spinners.length).toBe(1); + GpgBadges.fetch().then(() => { + expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null); + const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin'); + expect(spinners.length).toBe(1); + done(); + }).catch(done.fail); }); - it('replaces the loading spinner', () => { - spyOn($, 'get').and.returnValue({ - done(callback) { - callback(dummyResponse); - }, - }); - - GpgBadges.fetch(); + it('replaces the loading spinner', (done) => { + mock.onGet('/hello').reply(200, dummyResponse); - expect(document.querySelector('.js-loading-gpg-badge')).toBe(null); - const parentContainer = document.querySelector('.parent-container'); - expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml); + GpgBadges.fetch().then(() => { + expect(document.querySelector('.js-loading-gpg-badge')).toBe(null); + const parentContainer = document.querySelector('.parent-container'); + expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml); + done(); + }).catch(done.fail); }); }); diff --git a/spec/javascripts/graphs/stat_graph_contributors_graph_spec.js b/spec/javascripts/graphs/stat_graph_contributors_graph_spec.js index 6599839a526..d8a8c8cc260 100644 --- a/spec/javascripts/graphs/stat_graph_contributors_graph_spec.js +++ b/spec/javascripts/graphs/stat_graph_contributors_graph_spec.js @@ -1,7 +1,7 @@ /* eslint-disable quotes, jasmine/no-suite-dupes, vars-on-top, no-var */ import { scaleLinear, scaleTime } from 'd3-scale'; import { timeParse } from 'd3-time-format'; -import { ContributorsGraph, ContributorsMasterGraph } from '~/graphs/stat_graph_contributors_graph'; +import { ContributorsGraph, ContributorsMasterGraph } from '~/pages/projects/graphs/show/stat_graph_contributors_graph'; const d3 = { scaleLinear, scaleTime, timeParse }; diff --git a/spec/javascripts/graphs/stat_graph_contributors_spec.js b/spec/javascripts/graphs/stat_graph_contributors_spec.js index 962423462e7..e03114c1cc5 100644 --- a/spec/javascripts/graphs/stat_graph_contributors_spec.js +++ b/spec/javascripts/graphs/stat_graph_contributors_spec.js @@ -1,5 +1,5 @@ -import ContributorsStatGraph from '~/graphs/stat_graph_contributors'; -import { ContributorsGraph } from '~/graphs/stat_graph_contributors_graph'; +import ContributorsStatGraph from '~/pages/projects/graphs/show/stat_graph_contributors'; +import { ContributorsGraph } from '~/pages/projects/graphs/show/stat_graph_contributors_graph'; import { setLanguage } from '../helpers/locale_helper'; diff --git a/spec/javascripts/graphs/stat_graph_contributors_util_spec.js b/spec/javascripts/graphs/stat_graph_contributors_util_spec.js index 9b47ab62181..22a9afe1a9d 100644 --- a/spec/javascripts/graphs/stat_graph_contributors_util_spec.js +++ b/spec/javascripts/graphs/stat_graph_contributors_util_spec.js @@ -1,6 +1,6 @@ /* eslint-disable quotes, no-var, camelcase, object-property-newline, comma-dangle, max-len, vars-on-top, quote-props */ -import ContributorsStatGraphUtil from '~/graphs/stat_graph_contributors_util'; +import ContributorsStatGraphUtil from '~/pages/projects/graphs/show/stat_graph_contributors_util'; describe("ContributorsStatGraphUtil", function () { describe("#parse_log", function () { diff --git a/spec/javascripts/groups/components/app_spec.js b/spec/javascripts/groups/components/app_spec.js index 8338efe915b..3adc29262f3 100644 --- a/spec/javascripts/groups/components/app_spec.js +++ b/spec/javascripts/groups/components/app_spec.js @@ -268,10 +268,10 @@ describe('AppComponent', () => { it('updates props which show modal confirmation dialog', () => { const group = Object.assign({}, mockParentGroupItem); - expect(vm.showModal).toBeFalsy(); + expect(vm.updateModal).toBeFalsy(); expect(vm.groupLeaveConfirmationMessage).toBe(''); vm.showLeaveGroupModal(group, mockParentGroupItem); - expect(vm.showModal).toBeTruthy(); + expect(vm.updateModal).toBeTruthy(); expect(vm.groupLeaveConfirmationMessage).toBe(`Are you sure you want to leave the "${group.fullName}" group?`); }); }); @@ -280,9 +280,9 @@ describe('AppComponent', () => { it('hides modal confirmation which is shown before leaving the group', () => { const group = Object.assign({}, mockParentGroupItem); vm.showLeaveGroupModal(group, mockParentGroupItem); - expect(vm.showModal).toBeTruthy(); + expect(vm.updateModal).toBeTruthy(); vm.hideLeaveGroupModal(); - expect(vm.showModal).toBeFalsy(); + expect(vm.updateModal).toBeFalsy(); }); }); @@ -307,7 +307,7 @@ describe('AppComponent', () => { spyOn($, 'scrollTo'); vm.leaveGroup(); - expect(vm.showModal).toBeFalsy(); + expect(vm.updateModal).toBeFalsy(); expect(vm.targetGroup.isBeingRemoved).toBeTruthy(); expect(vm.service.leaveGroup).toHaveBeenCalledWith(vm.targetGroup.leavePath); setTimeout(() => { @@ -475,7 +475,7 @@ describe('AppComponent', () => { it('renders modal confirmation dialog', () => { vm.groupLeaveConfirmationMessage = 'Are you sure you want to leave the "foo" group?'; - vm.showModal = true; + vm.updateModal = true; const modalDialogEl = vm.$el.querySelector('.modal'); expect(modalDialogEl).not.toBe(null); expect(modalDialogEl.querySelector('.modal-title').innerText.trim()).toBe('Are you sure?'); diff --git a/spec/javascripts/groups/components/group_item_spec.js b/spec/javascripts/groups/components/group_item_spec.js index 618d0022e4f..e3c942597a3 100644 --- a/spec/javascripts/groups/components/group_item_spec.js +++ b/spec/javascripts/groups/components/group_item_spec.js @@ -3,10 +3,9 @@ import * as urlUtils from '~/lib/utils/url_utility'; import groupItemComponent from '~/groups/components/group_item.vue'; import groupFolderComponent from '~/groups/components/group_folder.vue'; import eventHub from '~/groups/event_hub'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { mockParentGroupItem, mockChildren } from '../mock_data'; -import mountComponent from '../../helpers/vue_mount_component_helper'; - const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren[0]) => { const Component = Vue.extend(groupItemComponent); diff --git a/spec/javascripts/groups/components/groups_spec.js b/spec/javascripts/groups/components/groups_spec.js index 90e818c1545..793c4909d89 100644 --- a/spec/javascripts/groups/components/groups_spec.js +++ b/spec/javascripts/groups/components/groups_spec.js @@ -4,10 +4,9 @@ import groupsComponent from '~/groups/components/groups.vue'; import groupFolderComponent from '~/groups/components/group_folder.vue'; import groupItemComponent from '~/groups/components/group_item.vue'; import eventHub from '~/groups/event_hub'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { mockGroups, mockPageInfo } from '../mock_data'; -import mountComponent from '../../helpers/vue_mount_component_helper'; - const createComponent = (searchEmpty = false) => { const Component = Vue.extend(groupsComponent); diff --git a/spec/javascripts/groups/components/item_actions_spec.js b/spec/javascripts/groups/components/item_actions_spec.js index acccbe639c4..15fd37ebcd2 100644 --- a/spec/javascripts/groups/components/item_actions_spec.js +++ b/spec/javascripts/groups/components/item_actions_spec.js @@ -2,10 +2,9 @@ import Vue from 'vue'; import itemActionsComponent from '~/groups/components/item_actions.vue'; import eventHub from '~/groups/event_hub'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { mockParentGroupItem, mockChildren } from '../mock_data'; -import mountComponent from '../../helpers/vue_mount_component_helper'; - const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren[0]) => { const Component = Vue.extend(itemActionsComponent); diff --git a/spec/javascripts/groups/components/item_caret_spec.js b/spec/javascripts/groups/components/item_caret_spec.js index 8faad455825..36f838a104f 100644 --- a/spec/javascripts/groups/components/item_caret_spec.js +++ b/spec/javascripts/groups/components/item_caret_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import itemCaretComponent from '~/groups/components/item_caret.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const createComponent = (isGroupOpen = false) => { const Component = Vue.extend(itemCaretComponent); diff --git a/spec/javascripts/groups/components/item_stats_spec.js b/spec/javascripts/groups/components/item_stats_spec.js index 55a7a713ca6..ee7ee18259e 100644 --- a/spec/javascripts/groups/components/item_stats_spec.js +++ b/spec/javascripts/groups/components/item_stats_spec.js @@ -1,6 +1,7 @@ import Vue from 'vue'; import itemStatsComponent from '~/groups/components/item_stats.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { mockParentGroupItem, ITEM_TYPE, @@ -9,8 +10,6 @@ import { PROJECT_VISIBILITY_TYPE, } from '../mock_data'; -import mountComponent from '../../helpers/vue_mount_component_helper'; - const createComponent = (item = mockParentGroupItem) => { const Component = Vue.extend(itemStatsComponent); diff --git a/spec/javascripts/groups/components/item_stats_value_spec.js b/spec/javascripts/groups/components/item_stats_value_spec.js index e990870aaa6..5e35ae4d36c 100644 --- a/spec/javascripts/groups/components/item_stats_value_spec.js +++ b/spec/javascripts/groups/components/item_stats_value_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import itemStatsValueComponent from '~/groups/components/item_stats_value.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const createComponent = ({ title, cssClass, iconName, tooltipPlacement, value }) => { const Component = Vue.extend(itemStatsValueComponent); diff --git a/spec/javascripts/groups/components/item_type_icon_spec.js b/spec/javascripts/groups/components/item_type_icon_spec.js index 495cc97b475..24380689b29 100644 --- a/spec/javascripts/groups/components/item_type_icon_spec.js +++ b/spec/javascripts/groups/components/item_type_icon_spec.js @@ -1,10 +1,9 @@ import Vue from 'vue'; import itemTypeIconComponent from '~/groups/components/item_type_icon.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { ITEM_TYPE } from '../mock_data'; -import mountComponent from '../../helpers/vue_mount_component_helper'; - const createComponent = (itemType = ITEM_TYPE.GROUP, isGroupOpen = false) => { const Component = Vue.extend(itemTypeIconComponent); diff --git a/spec/javascripts/importer_status_spec.js b/spec/javascripts/importer_status_spec.js new file mode 100644 index 00000000000..71a2cd51f63 --- /dev/null +++ b/spec/javascripts/importer_status_spec.js @@ -0,0 +1,107 @@ +import { ImporterStatus } from '~/importer_status'; +import axios from '~/lib/utils/axios_utils'; +import MockAdapter from 'axios-mock-adapter'; + +describe('Importer Status', () => { + let instance; + let mock; + + beforeEach(() => { + mock = new MockAdapter(axios); + }); + + afterEach(() => { + mock.restore(); + }); + + describe('addToImport', () => { + const importUrl = '/import_url'; + + beforeEach(() => { + setFixtures(` + <tr id="repo_123"> + <td class="import-target"></td> + <td class="import-actions job-status"> + <button name="button" type="submit" class="btn btn-import js-add-to-import"> + </button> + </td> + </tr> + `); + spyOn(ImporterStatus.prototype, 'initStatusPage').and.callFake(() => {}); + spyOn(ImporterStatus.prototype, 'setAutoUpdate').and.callFake(() => {}); + instance = new ImporterStatus('', importUrl); + }); + + it('sets table row to active after post request', (done) => { + mock.onPost(importUrl).reply(200, { + id: 1, + full_path: '/full_path', + }); + + instance.addToImport({ + currentTarget: document.querySelector('.js-add-to-import'), + }) + .then(() => { + expect(document.querySelector('tr').classList.contains('active')).toEqual(true); + done(); + }) + .catch(done.fail); + }); + }); + + describe('autoUpdate', () => { + const jobsUrl = '/jobs_url'; + + beforeEach(() => { + const div = document.createElement('div'); + div.innerHTML = ` + <div id="project_1"> + <div class="job-status"> + </div> + </div> + `; + + document.body.appendChild(div); + + spyOn(ImporterStatus.prototype, 'initStatusPage').and.callFake(() => {}); + spyOn(ImporterStatus.prototype, 'setAutoUpdate').and.callFake(() => {}); + instance = new ImporterStatus(jobsUrl); + }); + + function setupMock(importStatus) { + mock.onGet(jobsUrl).reply(200, [{ + id: 1, + import_status: importStatus, + }]); + } + + function expectJobStatus(done, status) { + instance.autoUpdate() + .then(() => { + expect(document.querySelector('#project_1').innerText.trim()).toEqual(status); + done(); + }) + .catch(done.fail); + } + + it('sets the job status to done', (done) => { + setupMock('finished'); + expectJobStatus(done, 'done'); + }); + + it('sets the job status to scheduled', (done) => { + setupMock('scheduled'); + expectJobStatus(done, 'scheduled'); + }); + + it('sets the job status to started', (done) => { + setupMock('started'); + expectJobStatus(done, 'started'); + }); + + it('sets the job status to custom status', (done) => { + setupMock('custom status'); + expectJobStatus(done, 'custom status'); + }); + }); +}); diff --git a/spec/javascripts/issuable_time_tracker_spec.js b/spec/javascripts/issuable_time_tracker_spec.js index 8ff93c4f918..365e9fe6a4b 100644 --- a/spec/javascripts/issuable_time_tracker_spec.js +++ b/spec/javascripts/issuable_time_tracker_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; -import timeTracker from '~/sidebar/components/time_tracking/time_tracker'; +import timeTracker from '~/sidebar/components/time_tracking/time_tracker.vue'; function initTimeTrackingComponent(opts) { setFixtures(` diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js index 1c9f48028f2..584db6c6632 100644 --- a/spec/javascripts/issue_show/components/app_spec.js +++ b/spec/javascripts/issue_show/components/app_spec.js @@ -6,8 +6,8 @@ import '~/render_gfm'; import * as urlUtils from '~/lib/utils/url_utility'; import issuableApp from '~/issue_show/components/app.vue'; import eventHub from '~/issue_show/event_hub'; +import setTimeoutPromise from 'spec/helpers/set_timeout_promise_helper'; import issueShowData from '../mock_data'; -import setTimeoutPromise from '../../helpers/set_timeout_promise_helper'; function formatText(text) { return text.trim().replace(/\s\s+/g, ' '); diff --git a/spec/javascripts/issue_show/components/description_spec.js b/spec/javascripts/issue_show/components/description_spec.js index 0da25bdca9c..ff7f99eec14 100644 --- a/spec/javascripts/issue_show/components/description_spec.js +++ b/spec/javascripts/issue_show/components/description_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import descriptionComponent from '~/issue_show/components/description.vue'; import * as taskList from '~/task_list'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Description component', () => { let vm; diff --git a/spec/javascripts/job_spec.js b/spec/javascripts/job_spec.js index 03b58e9c1d0..b4599688c6d 100644 --- a/spec/javascripts/job_spec.js +++ b/spec/javascripts/job_spec.js @@ -10,6 +10,7 @@ describe('Job', () => { const JOB_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1`; let mock; let response; + let job; function waitForPromise() { return new Promise(resolve => requestAnimationFrame(resolve)); @@ -22,6 +23,8 @@ describe('Job', () => { spyOn(urlUtils, 'visitUrl'); + response = {}; + mock = new MockAdapter(axios); mock.onGet(new RegExp(`${JOB_URL}/trace.json?(.*)`)).reply(() => [200, response]); @@ -30,7 +33,7 @@ describe('Job', () => { afterEach(() => { mock.restore(); - response = {}; + clearTimeout(job.timeout); }); describe('class constructor', () => { @@ -43,15 +46,19 @@ describe('Job', () => { }); describe('setup', () => { - beforeEach(function () { - this.job = new Job(); + beforeEach(function (done) { + job = new Job(); + + waitForPromise() + .then(done) + .catch(done.fail); }); it('copies build options', function () { - expect(this.job.pagePath).toBe(JOB_URL); - expect(this.job.buildStatus).toBe('success'); - expect(this.job.buildStage).toBe('test'); - expect(this.job.state).toBe(''); + expect(job.pagePath).toBe(JOB_URL); + expect(job.buildStatus).toBe('success'); + expect(job.buildStage).toBe('test'); + expect(job.state).toBe(''); }); it('only shows the jobs matching the current stage', () => { @@ -84,12 +91,12 @@ describe('Job', () => { complete: false, }; - this.job = new Job(); + job = new Job(); waitForPromise() .then(() => { expect($('#build-trace .js-build-output').text()).toMatch(/Update/); - expect(this.job.state).toBe('newstate'); + expect(job.state).toBe('newstate'); response = { html: '<span>More</span>', @@ -103,7 +110,7 @@ describe('Job', () => { .then(waitForPromise) .then(() => { expect($('#build-trace .js-build-output').text()).toMatch(/UpdateMore/); - expect(this.job.state).toBe('finalstate'); + expect(job.state).toBe('finalstate'); }) .then(done) .catch(done.fail); @@ -117,7 +124,7 @@ describe('Job', () => { complete: false, }; - this.job = new Job(); + job = new Job(); waitForPromise() .then(() => { @@ -151,7 +158,7 @@ describe('Job', () => { total: 100, }; - this.job = new Job(); + job = new Job(); waitForPromise() .then(() => { @@ -172,7 +179,7 @@ describe('Job', () => { total: 100, }; - this.job = new Job(); + job = new Job(); waitForPromise() .then(() => { @@ -191,9 +198,10 @@ describe('Job', () => { append: false, size: 50, total: 100, + complete: false, }; - this.job = new Job(); + job = new Job(); waitForPromise() .then(() => { @@ -207,6 +215,7 @@ describe('Job', () => { append: true, size: 10, total: 100, + complete: true, }; }) .then(() => jasmine.clock().tick(4001)) @@ -229,7 +238,7 @@ describe('Job', () => { total: 100, }; - this.job = new Job(); + job = new Job(); expect( document.querySelector('.js-raw-link').textContent.trim(), @@ -247,7 +256,7 @@ describe('Job', () => { total: 100, }; - this.job = new Job(); + job = new Job(); waitForPromise() .then(() => { @@ -269,7 +278,7 @@ describe('Job', () => { total: 100, }; - this.job = new Job(); + job = new Job(); waitForPromise() .then(done) @@ -296,7 +305,7 @@ describe('Job', () => { it('should request build trace with state parameter', (done) => { spyOn(axios, 'get').and.callThrough(); // eslint-disable-next-line no-new - new Job(); + job = new Job(); setTimeout(() => { expect(axios.get).toHaveBeenCalledWith( diff --git a/spec/javascripts/jobs/header_spec.js b/spec/javascripts/jobs/header_spec.js index a9df0418d5d..0961605ce5c 100644 --- a/spec/javascripts/jobs/header_spec.js +++ b/spec/javascripts/jobs/header_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import headerComponent from '~/jobs/components/header.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Job details header', () => { let HeaderComponent; diff --git a/spec/javascripts/labels_select_spec.js b/spec/javascripts/labels_select_spec.js new file mode 100644 index 00000000000..b8f7b1dc855 --- /dev/null +++ b/spec/javascripts/labels_select_spec.js @@ -0,0 +1,43 @@ +import LabelsSelect from '~/labels_select'; + +const mockUrl = '/foo/bar/url'; + +const mockLabels = [ + { + id: 26, + title: 'Foo Label', + description: 'Foobar', + color: '#BADA55', + text_color: '#FFFFFF', + }, +]; + +describe('LabelsSelect', () => { + describe('getLabelTemplate', () => { + const label = mockLabels[0]; + let $labelEl; + + beforeEach(() => { + $labelEl = $(LabelsSelect.getLabelTemplate({ + labels: mockLabels, + issueUpdateURL: mockUrl, + })); + }); + + it('generated label item template has correct label URL', () => { + expect($labelEl.attr('href')).toBe('/foo/bar?label_name[]=Foo%20Label'); + }); + + it('generated label item template has correct label title', () => { + expect($labelEl.find('span.label').text()).toBe(label.title); + }); + + it('generated label item template has label description as title attribute', () => { + expect($labelEl.find('span.label').attr('title')).toBe(label.description); + }); + + it('generated label item template has correct label styles', () => { + expect($labelEl.find('span.label').attr('style')).toBe(`background-color: ${label.color}; color: ${label.text_color};`); + }); + }); +}); diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js index 80430011aed..49799c31995 100644 --- a/spec/javascripts/lib/utils/common_utils_spec.js +++ b/spec/javascripts/lib/utils/common_utils_spec.js @@ -480,4 +480,33 @@ describe('common_utils', () => { expect(commonUtils.spriteIcon('test', 'fa fa-test')).toEqual('<svg class="fa fa-test"><use xlink:href="icons.svg#test" /></svg>'); }); }); + + describe('convertObjectPropsToCamelCase', () => { + it('returns new object with camelCase property names by converting object with snake_case names', () => { + const snakeRegEx = /(_\w)/g; + const mockObj = { + id: 1, + group_name: 'GitLab.org', + absolute_web_url: 'https://gitlab.com/gitlab-org/', + }; + const mappings = { + id: 'id', + groupName: 'group_name', + absoluteWebUrl: 'absolute_web_url', + }; + + const convertedObj = commonUtils.convertObjectPropsToCamelCase(mockObj); + + Object.keys(convertedObj).forEach((prop) => { + expect(snakeRegEx.test(prop)).toBeFalsy(); + expect(convertedObj[prop]).toBe(mockObj[mappings[prop]]); + }); + }); + + it('return empty object if method is called with null or undefined', () => { + expect(Object.keys(commonUtils.convertObjectPropsToCamelCase(null)).length).toBe(0); + expect(Object.keys(commonUtils.convertObjectPropsToCamelCase()).length).toBe(0); + expect(Object.keys(commonUtils.convertObjectPropsToCamelCase({})).length).toBe(0); + }); + }); }); diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/javascripts/lib/utils/text_utility_spec.js index 69a23d7b2f3..e57a55fa71a 100644 --- a/spec/javascripts/lib/utils/text_utility_spec.js +++ b/spec/javascripts/lib/utils/text_utility_spec.js @@ -72,4 +72,10 @@ describe('text_utility', () => { expect(textUtils.stripHtml('This is a text with <p>html</p>.', ' ')).toEqual('This is a text with html .'); }); }); + + describe('convertToCamelCase', () => { + it('converts snake_case string to camelCase string', () => { + expect(textUtils.convertToCamelCase('snake_case')).toBe('snakeCase'); + }); + }); }); diff --git a/spec/javascripts/merge_request_notes_spec.js b/spec/javascripts/merge_request_notes_spec.js index 5d0ee91d977..0d16b23302f 100644 --- a/spec/javascripts/merge_request_notes_spec.js +++ b/spec/javascripts/merge_request_notes_spec.js @@ -23,7 +23,7 @@ describe('Merge request notes', () => { gl.utils.disableButtonIfEmptyField = _.noop; window.project_uploads_path = 'http://test.host/uploads'; $('body').attr('data-page', 'projects:merge_requests:show'); - window.gon.current_user_id = $('.note:last').data('author-id'); + window.gon.current_user_id = $('.note:last').data('authorId'); return new Notes('', []); }); @@ -76,7 +76,7 @@ describe('Merge request notes', () => { </form>`; setFixtures(diffsResponse.html + noteFormHtml); $('body').attr('data-page', 'projects:merge_requests:show'); - window.gon.current_user_id = $('.note:last').data('author-id'); + window.gon.current_user_id = $('.note:last').data('authorId'); return new Notes('', []); }); diff --git a/spec/javascripts/monitoring/dashboard_state_spec.js b/spec/javascripts/monitoring/dashboard_state_spec.js index 3319eeb3f31..df3198dd3e2 100644 --- a/spec/javascripts/monitoring/dashboard_state_spec.js +++ b/spec/javascripts/monitoring/dashboard_state_spec.js @@ -29,34 +29,6 @@ describe('EmptyState', () => { expect(component.currentState).toBe(component.states.gettingStarted); }); - it('buttonPath returns settings path for the state "gettingStarted"', () => { - const component = createComponent({ - selectedState: 'gettingStarted', - settingsPath: statePaths.settingsPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', - }); - - expect(component.buttonPath).toEqual(statePaths.settingsPath); - expect(component.buttonPath).not.toEqual(statePaths.documentationPath); - }); - - it('buttonPath returns documentation path for any of the other states', () => { - const component = createComponent({ - selectedState: 'loading', - settingsPath: statePaths.settingsPath, - documentationPath: statePaths.documentationPath, - emptyGettingStartedSvgPath: 'foo', - emptyLoadingSvgPath: 'foo', - emptyUnableToConnectSvgPath: 'foo', - }); - - expect(component.buttonPath).toEqual(statePaths.documentationPath); - expect(component.buttonPath).not.toEqual(statePaths.settingsPath); - }); - it('showButtonDescription returns a description with a link for the unableToConnect state', () => { const component = createComponent({ selectedState: 'unableToConnect', @@ -88,6 +60,7 @@ describe('EmptyState', () => { const component = createComponent({ selectedState: 'gettingStarted', settingsPath: statePaths.settingsPath, + clustersPath: statePaths.clustersPath, documentationPath: statePaths.documentationPath, emptyGettingStartedSvgPath: 'foo', emptyLoadingSvgPath: 'foo', diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js index 2bbe963e393..f30208b27b6 100644 --- a/spec/javascripts/monitoring/mock_data.js +++ b/spec/javascripts/monitoring/mock_data.js @@ -2471,6 +2471,7 @@ export const deploymentData = [ export const statePaths = { settingsPath: '/root/hello-prometheus/services/prometheus/edit', + clustersPath: '/root/hello-prometheus/clusters', documentationPath: '/help/administration/monitoring/prometheus/index.md', }; diff --git a/spec/javascripts/notebook/cells/markdown_spec.js b/spec/javascripts/notebook/cells/markdown_spec.js index 02304bf5d7d..8f8ba231ae8 100644 --- a/spec/javascripts/notebook/cells/markdown_spec.js +++ b/spec/javascripts/notebook/cells/markdown_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import MarkdownComponent from '~/notebook/cells/markdown.vue'; -import katex from 'vendor/katex'; +import katex from 'katex'; const Component = Vue.extend(MarkdownComponent); diff --git a/spec/javascripts/notes/components/comment_form_spec.js b/spec/javascripts/notes/components/comment_form_spec.js index 104d03377b6..6a7131528a3 100644 --- a/spec/javascripts/notes/components/comment_form_spec.js +++ b/spec/javascripts/notes/components/comment_form_spec.js @@ -1,17 +1,20 @@ import Vue from 'vue'; import Autosize from 'autosize'; import store from '~/notes/stores'; -import issueCommentForm from '~/notes/components/comment_form.vue'; +import CommentForm from '~/notes/components/comment_form.vue'; import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock } from '../mock_data'; import { keyboardDownEvent } from '../../issue_show/helpers'; describe('issue_comment_form component', () => { let vm; - const Component = Vue.extend(issueCommentForm); + const Component = Vue.extend(CommentForm); let mountComponent; beforeEach(() => { - mountComponent = () => new Component({ + mountComponent = (noteableType = 'issue') => new Component({ + propsData: { + noteableType, + }, store, }).$mount(); }); @@ -136,6 +139,11 @@ describe('issue_comment_form component', () => { expect(vm.editCurrentUserLastNote).toHaveBeenCalled(); }); + + it('inits autosave', () => { + expect(vm.autosave).toBeDefined(); + expect(vm.autosave.key).toEqual(`autosave/Note/Issue/${noteableDataMock.id}`); + }); }); describe('event enter', () => { @@ -182,6 +190,15 @@ describe('issue_comment_form component', () => { done(); }); }); + + it('updates button text with noteable type', (done) => { + vm.noteableType = 'merge_request'; + + Vue.nextTick(() => { + expect(vm.$el.querySelector('.btn-comment-and-close').textContent.trim()).toEqual('Close merge request'); + done(); + }); + }); }); describe('issue is confidential', () => { diff --git a/spec/javascripts/notes/components/diff_file_header_spec.js b/spec/javascripts/notes/components/diff_file_header_spec.js new file mode 100644 index 00000000000..aed30a087a6 --- /dev/null +++ b/spec/javascripts/notes/components/diff_file_header_spec.js @@ -0,0 +1,93 @@ +import Vue from 'vue'; +import DiffFileHeader from '~/notes/components/diff_file_header.vue'; +import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; +import mountComponent from '../../helpers/vue_mount_component_helper'; + +const discussionFixture = 'merge_requests/diff_discussion.json'; + +describe('diff_file_header', () => { + let vm; + const diffDiscussionMock = getJSONFixture(discussionFixture)[0]; + const diffFile = convertObjectPropsToCamelCase(diffDiscussionMock.diff_file); + const props = { + diffFile, + }; + const Component = Vue.extend(DiffFileHeader); + const selectors = { + get copyButton() { + return vm.$el.querySelector('button[data-original-title="Copy file path to clipboard"]'); + }, + get fileName() { + return vm.$el.querySelector('.file-title-name'); + }, + get titleWrapper() { + return vm.$refs.titleWrapper; + }, + }; + + describe('submodule', () => { + beforeEach(() => { + props.diffFile.submodule = true; + props.diffFile.submoduleLink = '<a href="/bha">Submodule</a>'; + + vm = mountComponent(Component, props); + }); + + it('shows submoduleLink', () => { + expect(selectors.fileName.innerHTML).toBe(props.diffFile.submoduleLink); + }); + + it('has button to copy blob path', () => { + expect(selectors.copyButton).toExist(); + expect(selectors.copyButton.getAttribute('data-clipboard-text')).toBe(props.diffFile.submoduleLink); + }); + }); + + describe('changed file', () => { + beforeEach(() => { + props.diffFile.submodule = false; + props.diffFile.discussionPath = 'some/discussion/id'; + + vm = mountComponent(Component, props); + }); + + it('shows file type icon', () => { + expect(vm.$el.innerHTML).toContain('fa-file-text-o'); + }); + + it('links to discussion path', () => { + expect(selectors.titleWrapper).toExist(); + expect(selectors.titleWrapper.tagName).toBe('A'); + expect(selectors.titleWrapper.getAttribute('href')).toBe(props.diffFile.discussionPath); + }); + + it('shows plain title if no link given', () => { + props.diffFile.discussionPath = undefined; + vm = mountComponent(Component, props); + + expect(selectors.titleWrapper.tagName).not.toBe('A'); + expect(selectors.titleWrapper.href).toBeFalsy(); + }); + + it('has button to copy file path', () => { + expect(selectors.copyButton).toExist(); + expect(selectors.copyButton.getAttribute('data-clipboard-text')).toBe(props.diffFile.filePath); + }); + + it('shows file mode change', (done) => { + vm.diffFile = { + ...props.diffFile, + modeChanged: true, + aMode: '100755', + bMode: '100644', + }; + + Vue.nextTick(() => { + expect( + vm.$refs.fileMode.textContent.trim(), + ).toBe('100755 → 100644'); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/notes/components/diff_with_note_spec.js b/spec/javascripts/notes/components/diff_with_note_spec.js new file mode 100644 index 00000000000..7f1f4bf0bcd --- /dev/null +++ b/spec/javascripts/notes/components/diff_with_note_spec.js @@ -0,0 +1,64 @@ +import Vue from 'vue'; +import DiffWithNote from '~/notes/components/diff_with_note.vue'; +import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; +import mountComponent from '../../helpers/vue_mount_component_helper'; + +const discussionFixture = 'merge_requests/diff_discussion.json'; +const imageDiscussionFixture = 'merge_requests/image_diff_discussion.json'; + +describe('diff_with_note', () => { + let vm; + const diffDiscussionMock = getJSONFixture(discussionFixture)[0]; + const diffDiscussion = convertObjectPropsToCamelCase(diffDiscussionMock); + const Component = Vue.extend(DiffWithNote); + const props = { + discussion: diffDiscussion, + }; + const selectors = { + get container() { + return vm.$refs.fileHolder; + }, + get diffTable() { + return this.container.querySelector('.diff-content table'); + }, + get diffRows() { + return this.container.querySelectorAll('.diff-content .line_holder'); + }, + get noteRow() { + return this.container.querySelector('.diff-content .notes_holder'); + }, + }; + + describe('text diff', () => { + beforeEach(() => { + vm = mountComponent(Component, props); + }); + + it('shows text diff', () => { + expect(selectors.container).toHaveClass('text-file'); + expect(selectors.diffTable).toExist(); + }); + + it('shows diff lines', () => { + expect(selectors.diffRows.length).toBe(12); + }); + + it('shows notes row', () => { + expect(selectors.noteRow).toExist(); + }); + }); + + describe('image diff', () => { + beforeEach(() => { + const imageDiffDiscussionMock = getJSONFixture(imageDiscussionFixture)[0]; + props.discussion = convertObjectPropsToCamelCase(imageDiffDiscussionMock); + }); + + it('shows image diff', () => { + vm = mountComponent(Component, props); + + expect(selectors.container).toHaveClass('js-image-file'); + expect(selectors.diffTable).not.toExist(); + }); + }); +}); diff --git a/spec/javascripts/notes/components/note_app_spec.js b/spec/javascripts/notes/components/note_app_spec.js index 36c56cd3862..e1c612f5100 100644 --- a/spec/javascripts/notes/components/note_app_spec.js +++ b/spec/javascripts/notes/components/note_app_spec.js @@ -2,14 +2,30 @@ import _ from 'underscore'; import Vue from 'vue'; import notesApp from '~/notes/components/notes_app.vue'; import service from '~/notes/services/notes_service'; +import '~/render_gfm'; import * as mockData from '../mock_data'; -import getSetTimeoutPromise from '../../helpers/set_timeout_promise_helper'; + +const vueMatchers = { + toIncludeElement() { + return { + compare(vm, selector) { + const result = { + pass: vm.$el.querySelector(selector) !== null, + }; + return result; + }, + }; + }, +}; describe('note_app', () => { let mountComponent; let vm; beforeEach(() => { + jasmine.addMatchers(vueMatchers); + $('body').attr('data-page', 'projects:merge_requests:show'); + const IssueNotesApp = Vue.extend(notesApp); mountComponent = (data) => { @@ -104,8 +120,8 @@ describe('note_app', () => { vm = mountComponent(); }); - it('should render loading icon', () => { - expect(vm.$el.querySelector('.js-loading')).toBeDefined(); + it('renders skeleton notes', () => { + expect(vm).toIncludeElement('.animation-container'); }); it('should render form', () => { @@ -118,10 +134,14 @@ describe('note_app', () => { describe('update note', () => { describe('individual note', () => { - beforeEach(() => { + beforeEach((done) => { Vue.http.interceptors.push(mockData.individualNoteInterceptor); spyOn(service, 'updateNote').and.callThrough(); vm = mountComponent(); + setTimeout(() => { + vm.$el.querySelector('.js-note-edit').click(); + Vue.nextTick(done); + }, 0); }); afterEach(() => { @@ -131,40 +151,32 @@ describe('note_app', () => { ); }); - it('renders edit form', (done) => { - setTimeout(() => { - vm.$el.querySelector('.js-note-edit').click(); - Vue.nextTick(() => { - expect(vm.$el.querySelector('.js-vue-issue-note-form')).toBeDefined(); - done(); - }); - }, 0); + it('renders edit form', () => { + expect(vm).toIncludeElement('.js-vue-issue-note-form'); }); it('calls the service to update the note', (done) => { - getSetTimeoutPromise() - .then(() => { - vm.$el.querySelector('.js-note-edit').click(); - }) - .then(Vue.nextTick) - .then(() => { - vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note'; - vm.$el.querySelector('.js-vue-issue-save').click(); - - expect(service.updateNote).toHaveBeenCalled(); - }) - // Wait for the requests to finish before destroying - .then(Vue.nextTick) + vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note'; + vm.$el.querySelector('.js-vue-issue-save').click(); + + expect(service.updateNote).toHaveBeenCalled(); + // Wait for the requests to finish before destroying + Vue.nextTick() .then(done) .catch(done.fail); }); }); - describe('dicussion note', () => { - beforeEach(() => { + describe('discussion note', () => { + beforeEach((done) => { Vue.http.interceptors.push(mockData.discussionNoteInterceptor); spyOn(service, 'updateNote').and.callThrough(); vm = mountComponent(); + + setTimeout(() => { + vm.$el.querySelector('.js-note-edit').click(); + Vue.nextTick(done); + }, 0); }); afterEach(() => { @@ -174,30 +186,17 @@ describe('note_app', () => { ); }); - it('renders edit form', (done) => { - setTimeout(() => { - vm.$el.querySelector('.js-note-edit').click(); - Vue.nextTick(() => { - expect(vm.$el.querySelector('.js-vue-issue-note-form')).toBeDefined(); - done(); - }); - }, 0); + it('renders edit form', () => { + expect(vm).toIncludeElement('.js-vue-issue-note-form'); }); it('updates the note and resets the edit form', (done) => { - getSetTimeoutPromise() - .then(() => { - vm.$el.querySelector('.js-note-edit').click(); - }) - .then(Vue.nextTick) - .then(() => { - vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note'; - vm.$el.querySelector('.js-vue-issue-save').click(); - - expect(service.updateNote).toHaveBeenCalled(); - }) - // Wait for the requests to finish before destroying - .then(Vue.nextTick) + vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note'; + vm.$el.querySelector('.js-vue-issue-save').click(); + + expect(service.updateNote).toHaveBeenCalled(); + // Wait for the requests to finish before destroying + Vue.nextTick() .then(done) .catch(done.fail); }); diff --git a/spec/javascripts/notes/components/note_body_spec.js b/spec/javascripts/notes/components/note_body_spec.js index b42e7943b98..0ff804f0e55 100644 --- a/spec/javascripts/notes/components/note_body_spec.js +++ b/spec/javascripts/notes/components/note_body_spec.js @@ -30,17 +30,26 @@ describe('issue_note_body component', () => { expect(vm.$el.querySelector('.note-text').innerHTML).toEqual(note.note_html); }); - it('should be render form if user is editing', (done) => { - vm.isEditing = true; + it('should render awards list', () => { + expect(vm.$el.querySelector('.js-awards-block button [data-name="baseball"]')).not.toBeNull(); + expect(vm.$el.querySelector('.js-awards-block button [data-name="bath_tone3"]')).not.toBeNull(); + }); - Vue.nextTick(() => { - expect(vm.$el.querySelector('textarea.js-task-list-field')).toBeDefined(); - done(); + describe('isEditing', () => { + beforeEach((done) => { + vm.isEditing = true; + Vue.nextTick(done); }); - }); - it('should render awards list', () => { - expect(vm.$el.querySelector('.js-awards-block button [data-name="baseball"]')).toBeDefined(); - expect(vm.$el.querySelector('.js-awards-block button [data-name="bath_tone3"]')).toBeDefined(); + it('renders edit form', () => { + expect(vm.$el.querySelector('textarea.js-task-list-field')).not.toBeNull(); + }); + + it('adds autosave', () => { + const autosaveKey = `autosave/Note/${note.noteable_type}/${note.id}`; + + expect(vm.autosave).toExist(); + expect(vm.autosave.key).toEqual(autosaveKey); + }); }); }); diff --git a/spec/javascripts/notes/components/note_header_spec.js b/spec/javascripts/notes/components/note_header_spec.js index 16a76b11321..5636f8d1a9f 100644 --- a/spec/javascripts/notes/components/note_header_spec.js +++ b/spec/javascripts/notes/components/note_header_spec.js @@ -32,6 +32,7 @@ describe('note_header component', () => { createdAt: '2017-08-02T10:51:58.559Z', includeToggle: false, noteId: 1394, + expanded: true, }, }).$mount(); }); @@ -68,6 +69,7 @@ describe('note_header component', () => { createdAt: '2017-08-02T10:51:58.559Z', includeToggle: true, noteId: 1395, + expanded: true, }, }).$mount(); }); @@ -76,17 +78,35 @@ describe('note_header component', () => { expect(vm.$el.querySelector('.js-vue-toggle-button')).toBeDefined(); }); - it('should toggle the disucssion icon', (done) => { - expect( - vm.$el.querySelector('.js-vue-toggle-button i').classList.contains('fa-chevron-up'), - ).toEqual(true); + it('emits toggle event on click', (done) => { + spyOn(vm, '$emit'); vm.$el.querySelector('.js-vue-toggle-button').click(); Vue.nextTick(() => { + expect(vm.$emit).toHaveBeenCalledWith('toggleHandler'); + done(); + }); + }); + + it('renders up arrow when open', (done) => { + vm.expanded = true; + + Vue.nextTick(() => { + expect( + vm.$el.querySelector('.js-vue-toggle-button i').classList, + ).toContain('fa-chevron-up'); + done(); + }); + }); + + it('renders down arrow when closed', (done) => { + vm.expanded = false; + + Vue.nextTick(() => { expect( - vm.$el.querySelector('.js-vue-toggle-button i').classList.contains('fa-chevron-down'), - ).toEqual(true); + vm.$el.querySelector('.js-vue-toggle-button i').classList, + ).toContain('fa-chevron-down'); done(); }); }); diff --git a/spec/javascripts/notes/components/noteable_note_spec.js b/spec/javascripts/notes/components/noteable_note_spec.js index cb63b64724d..88a7ffb0b9c 100644 --- a/spec/javascripts/notes/components/noteable_note_spec.js +++ b/spec/javascripts/notes/components/noteable_note_spec.js @@ -56,4 +56,25 @@ describe('issue_note', () => { done(); }, 0); }); + + describe('cancel edit', () => { + it('restores content of updated note', (done) => { + const noteBody = 'updated note text'; + vm.updateNote = () => Promise.resolve(); + + vm.formUpdateHandler(noteBody, null, $.noop); + + setTimeout(() => { + expect(vm.note.note_html).toEqual(noteBody); + + vm.formCancelHandler(); + + setTimeout(() => { + expect(vm.note.note_html).toEqual(noteBody); + + done(); + }); + }); + }); + }); }); diff --git a/spec/javascripts/notes/helpers.js b/spec/javascripts/notes/helpers.js new file mode 100644 index 00000000000..a7663710a56 --- /dev/null +++ b/spec/javascripts/notes/helpers.js @@ -0,0 +1,12 @@ +// eslint-disable-next-line import/prefer-default-export +export const resetStore = (store) => { + store.replaceState({ + notes: [], + targetNoteHash: null, + lastFetchedAt: null, + + notesData: {}, + userData: {}, + noteableData: {}, + }); +}; diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js index f0c800c759d..bf60cb12f52 100644 --- a/spec/javascripts/notes/mock_data.js +++ b/spec/javascripts/notes/mock_data.js @@ -7,6 +7,9 @@ export const notesDataMock = { notesPath: '/gitlab-org/gitlab-ce/noteable/issue/98/notes', quickActionsDocsPath: '/help/user/project/quick_actions', registerPath: '/users/sign_in?redirect_to_referer=yes#register-pane', + totalNotes: 1, + closePath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=close', + reopenPath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=reopen', }; export const userDataMock = { diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js index e092320f9a3..ab80ed7bbfb 100644 --- a/spec/javascripts/notes/stores/actions_spec.js +++ b/spec/javascripts/notes/stores/actions_spec.js @@ -1,8 +1,16 @@ +import Vue from 'vue'; +import _ from 'underscore'; import * as actions from '~/notes/stores/actions'; +import store from '~/notes/stores'; import testAction from '../../helpers/vuex_action_helper'; +import { resetStore } from '../helpers'; import { discussionMock, notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data'; describe('Actions Notes Store', () => { + afterEach(() => { + resetStore(store); + }); + describe('setNotesData', () => { it('should set received notes data', (done) => { testAction(actions.setNotesData, null, { notesData: {} }, [ @@ -58,4 +66,67 @@ describe('Actions Notes Store', () => { ], done); }); }); + + describe('async methods', () => { + const interceptor = (request, next) => { + next(request.respondWith(JSON.stringify({}), { + status: 200, + })); + }; + + beforeEach(() => { + Vue.http.interceptors.push(interceptor); + }); + + afterEach(() => { + Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor); + }); + + describe('closeIssue', () => { + it('sets state as closed', (done) => { + store.dispatch('closeIssue', { notesData: { closeIssuePath: '' } }) + .then(() => { + expect(store.state.noteableData.state).toEqual('closed'); + done(); + }) + .catch(done.fail); + }); + }); + + describe('reopenIssue', () => { + it('sets state as reopened', (done) => { + store.dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } }) + .then(() => { + expect(store.state.noteableData.state).toEqual('reopened'); + done(); + }) + .catch(done.fail); + }); + }); + }); + + describe('emitStateChangedEvent', () => { + it('emits an event on the document', () => { + document.addEventListener('issuable_vue_app:change', (event) => { + expect(event.detail.data).toEqual({ id: '1', state: 'closed' }); + expect(event.detail.isClosed).toEqual(false); + }); + + store.dispatch('emitStateChangedEvent', { id: '1', state: 'closed' }); + }); + }); + + describe('toggleIssueLocalState', () => { + it('sets issue state as closed', (done) => { + testAction(actions.toggleIssueLocalState, 'closed', {}, [ + { type: 'CLOSE_ISSUE', payload: 'closed' }, + ], done); + }); + + it('sets issue state as reopened', (done) => { + testAction(actions.toggleIssueLocalState, 'reopened', {}, [ + { type: 'REOPEN_ISSUE', payload: 'reopened' }, + ], done); + }); + }); }); diff --git a/spec/javascripts/notes/stores/getters_spec.js b/spec/javascripts/notes/stores/getters_spec.js index c5a84b71788..8b2a8d2cd7a 100644 --- a/spec/javascripts/notes/stores/getters_spec.js +++ b/spec/javascripts/notes/stores/getters_spec.js @@ -55,4 +55,10 @@ describe('Getters Notes Store', () => { expect(getters.getCurrentUserLastNote(state)).toEqual(individualNote.notes[0]); }); }); + + describe('openState', () => { + it('should return the issue state', () => { + expect(getters.openState(state)).toEqual(noteableDataMock.state); + }); + }); }); diff --git a/spec/javascripts/notes/stores/mutation_spec.js b/spec/javascripts/notes/stores/mutation_spec.js index 22d99998a7d..e4baefc5bfc 100644 --- a/spec/javascripts/notes/stores/mutation_spec.js +++ b/spec/javascripts/notes/stores/mutation_spec.js @@ -1,7 +1,7 @@ import mutations from '~/notes/stores/mutations'; import { note, discussionMock, notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data'; -describe('Mutation Notes Store', () => { +describe('Notes Store mutations', () => { describe('ADD_NEW_NOTE', () => { let state; let noteData; @@ -103,7 +103,8 @@ describe('Mutation Notes Store', () => { }; mutations.SET_INITIAL_NOTES(state, [note]); - expect(state.notes).toEqual([note]); + expect(state.notes[0].id).toEqual(note.id); + expect(state.notes.length).toEqual(1); }); }); diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js index 274d7591c71..d4a148e6ab1 100644 --- a/spec/javascripts/notes_spec.js +++ b/spec/javascripts/notes_spec.js @@ -34,6 +34,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; describe('Notes', function() { const FLASH_TYPE_ALERT = 'alert'; + const NOTES_POST_PATH = /(.*)\/notes\?html=true$/; var commentsTemplate = 'merge_requests/merge_request_with_comment.html.raw'; preloadFixtures(commentsTemplate); @@ -154,7 +155,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; $form.find('textarea.js-note-text').val(sampleComment); mock = new MockAdapter(axios); - mock.onPost(/(.*)\/notes$/).reply(200, noteEntity); + mock.onPost(NOTES_POST_PATH).reply(200, noteEntity); }); afterEach(() => { @@ -506,11 +507,11 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; let mock; function mockNotesPost() { - mock.onPost(/(.*)\/notes$/).reply(200, note); + mock.onPost(NOTES_POST_PATH).reply(200, note); } function mockNotesPostError() { - mock.onPost(/(.*)\/notes$/).networkError(); + mock.onPost(NOTES_POST_PATH).networkError(); } beforeEach(() => { @@ -631,7 +632,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; beforeEach(() => { mock = new MockAdapter(axios); - mock.onPost(/(.*)\/notes$/).reply(200, note); + mock.onPost(NOTES_POST_PATH).reply(200, note); this.notes = new Notes('', []); window.gon.current_username = 'root'; @@ -684,7 +685,7 @@ import timeoutPromise from './helpers/set_timeout_promise_helper'; beforeEach(() => { mock = new MockAdapter(axios); - mock.onPost(/(.*)\/notes$/).reply(200, note); + mock.onPost(NOTES_POST_PATH).reply(200, note); this.notes = new Notes('', []); window.gon.current_username = 'root'; diff --git a/spec/javascripts/pages/admin/abuse_reports/abuse_reports_spec.js b/spec/javascripts/pages/admin/abuse_reports/abuse_reports_spec.js index d2386077aa6..349549b9e1f 100644 --- a/spec/javascripts/pages/admin/abuse_reports/abuse_reports_spec.js +++ b/spec/javascripts/pages/admin/abuse_reports/abuse_reports_spec.js @@ -22,19 +22,19 @@ describe('Abuse Reports', () => { it('should truncate long messages', () => { const $longMessage = findMessage('LONG MESSAGE'); - expect($longMessage.data('original-message')).toEqual(jasmine.anything()); + expect($longMessage.data('originalMessage')).toEqual(jasmine.anything()); assertMaxLength($longMessage); }); it('should not truncate short messages', () => { const $shortMessage = findMessage('SHORT MESSAGE'); - expect($shortMessage.data('original-message')).not.toEqual(jasmine.anything()); + expect($shortMessage.data('originalMessage')).not.toEqual(jasmine.anything()); }); it('should allow clicking a truncated message to expand and collapse the full message', () => { const $longMessage = findMessage('LONG MESSAGE'); $longMessage.click(); - expect($longMessage.data('original-message').length).toEqual($longMessage.text().length); + expect($longMessage.data('originalMessage').length).toEqual($longMessage.text().length); $longMessage.click(); assertMaxLength($longMessage); }); diff --git a/spec/javascripts/pages/admin/jobs/index/components/stop_jobs_modal_spec.js b/spec/javascripts/pages/admin/jobs/index/components/stop_jobs_modal_spec.js index 440a6585d57..a6fe9fb65e9 100644 --- a/spec/javascripts/pages/admin/jobs/index/components/stop_jobs_modal_spec.js +++ b/spec/javascripts/pages/admin/jobs/index/components/stop_jobs_modal_spec.js @@ -4,7 +4,7 @@ import axios from '~/lib/utils/axios_utils'; import stopJobsModal from '~/pages/admin/jobs/index/components/stop_jobs_modal.vue'; import * as urlUtility from '~/lib/utils/url_utility'; -import mountComponent from '../../../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('stop_jobs_modal.vue', () => { const props = { diff --git a/spec/javascripts/pages/milestones/shared/components/delete_milestone_modal_spec.js b/spec/javascripts/pages/milestones/shared/components/delete_milestone_modal_spec.js index 3cd33a3e900..6074e06fcec 100644 --- a/spec/javascripts/pages/milestones/shared/components/delete_milestone_modal_spec.js +++ b/spec/javascripts/pages/milestones/shared/components/delete_milestone_modal_spec.js @@ -5,7 +5,7 @@ import deleteMilestoneModal from '~/pages/milestones/shared/components/delete_mi import eventHub from '~/pages/milestones/shared/event_hub'; import * as urlUtility from '~/lib/utils/url_utility'; -import mountComponent from '../../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('delete_milestone_modal.vue', () => { const Component = Vue.extend(deleteMilestoneModal); diff --git a/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js index 040d14efed2..4655e29eed0 100644 --- a/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js +++ b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import Translate from '~/vue_shared/translate'; -import IntervalPatternInput from '~/pipeline_schedules/components/interval_pattern_input.vue'; +import IntervalPatternInput from '~/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue'; Vue.use(Translate); diff --git a/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js index ed481cb60a1..f95a7cef18a 100644 --- a/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js +++ b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import Cookies from 'js-cookie'; -import PipelineSchedulesCallout from '~/pipeline_schedules/components/pipeline_schedules_callout.vue'; +import PipelineSchedulesCallout from '~/pages/projects/pipeline_schedules/shared/components/pipeline_schedules_callout.vue'; const PipelineSchedulesCalloutComponent = Vue.extend(PipelineSchedulesCallout); const cookieKey = 'pipeline_schedules_callout_dismissed'; diff --git a/spec/javascripts/pipelines/async_button_spec.js b/spec/javascripts/pipelines/async_button_spec.js index d010d897642..e0ea3649646 100644 --- a/spec/javascripts/pipelines/async_button_spec.js +++ b/spec/javascripts/pipelines/async_button_spec.js @@ -15,6 +15,8 @@ describe('Pipelines Async Button', () => { title: 'Foo', icon: 'repeat', cssClass: 'bar', + pipelineId: 123, + type: 'explode', }, }).$mount(); }); @@ -38,9 +40,9 @@ describe('Pipelines Async Button', () => { describe('With confirm dialog', () => { it('should call the service when confimation is positive', () => { - spyOn(window, 'confirm').and.returnValue(true); - eventHub.$on('postAction', (endpoint) => { - expect(endpoint).toEqual('/foo'); + eventHub.$on('openConfirmationModal', (data) => { + expect(data.pipelineId).toEqual(123); + expect(data.type).toEqual('explode'); }); component = new AsyncButtonComponent({ @@ -49,7 +51,8 @@ describe('Pipelines Async Button', () => { title: 'Foo', icon: 'fa fa-foo', cssClass: 'bar', - confirmActionMessage: 'bar', + pipelineId: 123, + type: 'explode', }, }).$mount(); diff --git a/spec/javascripts/pipelines/graph/job_component_spec.js b/spec/javascripts/pipelines/graph/job_component_spec.js index c3dc7b53d0f..ce181a1e515 100644 --- a/spec/javascripts/pipelines/graph/job_component_spec.js +++ b/spec/javascripts/pipelines/graph/job_component_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import jobComponent from '~/pipelines/components/graph/job_component.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('pipeline graph job component', () => { let JobComponent; diff --git a/spec/javascripts/pipelines/pipeline_details_mediator_spec.js b/spec/javascripts/pipelines/pipeline_details_mediator_spec.js index bc6413a159f..e58a8018ed5 100644 --- a/spec/javascripts/pipelines/pipeline_details_mediator_spec.js +++ b/spec/javascripts/pipelines/pipeline_details_mediator_spec.js @@ -1,6 +1,6 @@ import _ from 'underscore'; import Vue from 'vue'; -import PipelineMediator from '~/pipelines/pipeline_details_mediatior'; +import PipelineMediator from '~/pipelines/pipeline_details_mediator'; describe('PipelineMdediator', () => { let mediator; diff --git a/spec/javascripts/pipelines/pipeline_store_spec.js b/spec/javascripts/pipelines/pipeline_store_spec.js index 85d13445b01..ab2287cc344 100644 --- a/spec/javascripts/pipelines/pipeline_store_spec.js +++ b/spec/javascripts/pipelines/pipeline_store_spec.js @@ -8,7 +8,6 @@ describe('Pipeline Store', () => { }); it('should set defaults', () => { - expect(store.state).toEqual({ pipeline: {} }); expect(store.state.pipeline).toEqual({}); }); diff --git a/spec/javascripts/pipelines/pipelines_spec.js b/spec/javascripts/pipelines/pipelines_spec.js index a99ebc4e51a..54d5bfd51e6 100644 --- a/spec/javascripts/pipelines/pipelines_spec.js +++ b/spec/javascripts/pipelines/pipelines_spec.js @@ -2,7 +2,7 @@ import _ from 'underscore'; import Vue from 'vue'; import pipelinesComp from '~/pipelines/components/pipelines.vue'; import Store from '~/pipelines/stores/pipelines_store'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Pipelines', () => { const jsonFixtureName = 'pipelines/pipelines.json'; diff --git a/spec/javascripts/profile/account/components/delete_account_modal_spec.js b/spec/javascripts/profile/account/components/delete_account_modal_spec.js index 588b61196a5..a0939ff5c20 100644 --- a/spec/javascripts/profile/account/components/delete_account_modal_spec.js +++ b/spec/javascripts/profile/account/components/delete_account_modal_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import deleteAccountModal from '~/profile/account/components/delete_account_modal.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('DeleteAccountModal component', () => { const actionUrl = `${gl.TEST_HOST}/delete/user`; diff --git a/spec/javascripts/projects/project_import_gitlab_project_spec.js b/spec/javascripts/projects/project_import_gitlab_project_spec.js index 2f1aae109e3..126f73103e0 100644 --- a/spec/javascripts/projects/project_import_gitlab_project_spec.js +++ b/spec/javascripts/projects/project_import_gitlab_project_spec.js @@ -10,7 +10,7 @@ describe('Import Gitlab project', () => { <input class="js-path-name" /> `); - projectImportGitlab.bindEvents(); + projectImportGitlab(); }); afterEach(() => { diff --git a/spec/javascripts/projects/project_new_spec.js b/spec/javascripts/projects/project_new_spec.js index c314ca8ab72..8731ce35d81 100644 --- a/spec/javascripts/projects/project_new_spec.js +++ b/spec/javascripts/projects/project_new_spec.js @@ -27,7 +27,7 @@ describe('New Project', () => { }); it('does not change project path for disabled $projectImportUrl', () => { - $projectImportUrl.attr('disabled', true); + $projectImportUrl.prop('disabled', true); projectNew.deriveProjectPathFromUrl($projectImportUrl); @@ -36,7 +36,7 @@ describe('New Project', () => { describe('for enabled $projectImportUrl', () => { beforeEach(() => { - $projectImportUrl.attr('disabled', false); + $projectImportUrl.prop('disabled', false); }); it('does not change project path if it is set by user', () => { diff --git a/spec/javascripts/projects_dropdown/components/app_spec.js b/spec/javascripts/projects_dropdown/components/app_spec.js index 42f0f6fc1af..2054fef790b 100644 --- a/spec/javascripts/projects_dropdown/components/app_spec.js +++ b/spec/javascripts/projects_dropdown/components/app_spec.js @@ -6,7 +6,7 @@ import eventHub from '~/projects_dropdown/event_hub'; import ProjectsStore from '~/projects_dropdown/store/projects_store'; import ProjectsService from '~/projects_dropdown/service/projects_service'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { currentSession, mockProject, mockRawProject } from '../mock_data'; const createComponent = () => { diff --git a/spec/javascripts/projects_dropdown/components/projects_list_frequent_spec.js b/spec/javascripts/projects_dropdown/components/projects_list_frequent_spec.js index fcd0f6a3630..2bafb4e81ca 100644 --- a/spec/javascripts/projects_dropdown/components/projects_list_frequent_spec.js +++ b/spec/javascripts/projects_dropdown/components/projects_list_frequent_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import projectsListFrequentComponent from '~/projects_dropdown/components/projects_list_frequent.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { mockFrequents } from '../mock_data'; const createComponent = () => { diff --git a/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js b/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js index edef150dd1e..c193258474e 100644 --- a/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js +++ b/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import projectsListItemComponent from '~/projects_dropdown/components/projects_list_item.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { mockProject } from '../mock_data'; const createComponent = () => { diff --git a/spec/javascripts/projects_dropdown/components/projects_list_search_spec.js b/spec/javascripts/projects_dropdown/components/projects_list_search_spec.js index 67f8a8946c2..c4b86d77034 100644 --- a/spec/javascripts/projects_dropdown/components/projects_list_search_spec.js +++ b/spec/javascripts/projects_dropdown/components/projects_list_search_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import projectsListSearchComponent from '~/projects_dropdown/components/projects_list_search.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { mockProject } from '../mock_data'; const createComponent = () => { diff --git a/spec/javascripts/projects_dropdown/components/search_spec.js b/spec/javascripts/projects_dropdown/components/search_spec.js index 24d8a00b254..601264258c2 100644 --- a/spec/javascripts/projects_dropdown/components/search_spec.js +++ b/spec/javascripts/projects_dropdown/components/search_spec.js @@ -3,7 +3,7 @@ import Vue from 'vue'; import searchComponent from '~/projects_dropdown/components/search.vue'; import eventHub from '~/projects_dropdown/event_hub'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const createComponent = () => { const Component = Vue.extend(searchComponent); diff --git a/spec/javascripts/registry/components/app_spec.js b/spec/javascripts/registry/components/app_spec.js index 6a8a85e3dfb..cf1d0625397 100644 --- a/spec/javascripts/registry/components/app_spec.js +++ b/spec/javascripts/registry/components/app_spec.js @@ -1,7 +1,7 @@ import _ from 'underscore'; import Vue from 'vue'; import registry from '~/registry/components/app.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { reposServerResponse } from '../mock_data'; describe('Registry List', () => { diff --git a/spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js b/spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js index debde1bb357..b509cedbe80 100644 --- a/spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js +++ b/spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import store from '~/ide/stores'; import listCollapsed from '~/ide/components/commit_sidebar/list_collapsed.vue'; -import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { file } from '../../helpers'; describe('Multi-file editor commit sidebar list collapsed', () => { diff --git a/spec/javascripts/repo/components/commit_sidebar/list_item_spec.js b/spec/javascripts/repo/components/commit_sidebar/list_item_spec.js index 4b20fdf70d6..6f1a1d874d3 100644 --- a/spec/javascripts/repo/components/commit_sidebar/list_item_spec.js +++ b/spec/javascripts/repo/components/commit_sidebar/list_item_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import listItem from '~/ide/components/commit_sidebar/list_item.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { file } from '../../helpers'; describe('Multi-file editor commit sidebar list item', () => { diff --git a/spec/javascripts/repo/components/commit_sidebar/list_spec.js b/spec/javascripts/repo/components/commit_sidebar/list_spec.js index cb5240ad118..aeb9de9ace4 100644 --- a/spec/javascripts/repo/components/commit_sidebar/list_spec.js +++ b/spec/javascripts/repo/components/commit_sidebar/list_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import store from '~/ide/stores'; import commitSidebarList from '~/ide/components/commit_sidebar/list.vue'; -import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { file } from '../../helpers'; describe('Multi-file editor commit sidebar list', () => { diff --git a/spec/javascripts/repo/components/ide_context_bar_spec.js b/spec/javascripts/repo/components/ide_context_bar_spec.js index 3f8f37d2343..935da259a99 100644 --- a/spec/javascripts/repo/components/ide_context_bar_spec.js +++ b/spec/javascripts/repo/components/ide_context_bar_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import store from '~/ide/stores'; import ideContextBar from '~/ide/components/ide_context_bar.vue'; -import { createComponentWithStore } from '../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; describe('Multi-file editor right context bar', () => { let vm; diff --git a/spec/javascripts/repo/components/ide_side_bar_spec.js b/spec/javascripts/repo/components/ide_side_bar_spec.js index 30e45169205..79c3c8128e8 100644 --- a/spec/javascripts/repo/components/ide_side_bar_spec.js +++ b/spec/javascripts/repo/components/ide_side_bar_spec.js @@ -1,8 +1,8 @@ import Vue from 'vue'; import store from '~/ide/stores'; import ideSidebar from '~/ide/components/ide_side_bar.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { resetStore } from '../helpers'; -import { createComponentWithStore } from '../../helpers/vue_mount_component_helper'; describe('IdeSidebar', () => { let vm; diff --git a/spec/javascripts/repo/components/ide_spec.js b/spec/javascripts/repo/components/ide_spec.js index acfd63eb8de..18135177b5e 100644 --- a/spec/javascripts/repo/components/ide_spec.js +++ b/spec/javascripts/repo/components/ide_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import store from '~/ide/stores'; import ide from '~/ide/components/ide.vue'; -import { createComponentWithStore } from '../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { file, resetStore } from '../helpers'; describe('ide component', () => { diff --git a/spec/javascripts/repo/components/new_branch_form_spec.js b/spec/javascripts/repo/components/new_branch_form_spec.js index cd1d073ec18..82597fc75e8 100644 --- a/spec/javascripts/repo/components/new_branch_form_spec.js +++ b/spec/javascripts/repo/components/new_branch_form_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import store from '~/ide/stores'; import newBranchForm from '~/ide/components/new_branch_form.vue'; -import { createComponentWithStore } from '../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { resetStore } from '../helpers'; describe('Multi-file editor new branch form', () => { diff --git a/spec/javascripts/repo/components/new_dropdown/index_spec.js b/spec/javascripts/repo/components/new_dropdown/index_spec.js index 6efbbf6d75e..4a8e4445e2f 100644 --- a/spec/javascripts/repo/components/new_dropdown/index_spec.js +++ b/spec/javascripts/repo/components/new_dropdown/index_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import store from '~/ide/stores'; import newDropdown from '~/ide/components/new_dropdown/index.vue'; -import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { resetStore } from '../../helpers'; describe('new dropdown component', () => { diff --git a/spec/javascripts/repo/components/new_dropdown/modal_spec.js b/spec/javascripts/repo/components/new_dropdown/modal_spec.js index 8bbc3100357..d6a1fdd115c 100644 --- a/spec/javascripts/repo/components/new_dropdown/modal_spec.js +++ b/spec/javascripts/repo/components/new_dropdown/modal_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import store from '~/ide/stores'; import service from '~/ide/services'; import modal from '~/ide/components/new_dropdown/modal.vue'; -import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { file, resetStore } from '../../helpers'; describe('new file modal component', () => { diff --git a/spec/javascripts/repo/components/new_dropdown/upload_spec.js b/spec/javascripts/repo/components/new_dropdown/upload_spec.js index 667112ab21a..ee8aab3a252 100644 --- a/spec/javascripts/repo/components/new_dropdown/upload_spec.js +++ b/spec/javascripts/repo/components/new_dropdown/upload_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import upload from '~/ide/components/new_dropdown/upload.vue'; import store from '~/ide/stores'; import service from '~/ide/services'; -import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; import { resetStore } from '../../helpers'; describe('new dropdown upload', () => { diff --git a/spec/javascripts/repo/components/repo_commit_section_spec.js b/spec/javascripts/repo/components/repo_commit_section_spec.js index 93e94b4f24c..934ada9dec2 100644 --- a/spec/javascripts/repo/components/repo_commit_section_spec.js +++ b/spec/javascripts/repo/components/repo_commit_section_spec.js @@ -3,7 +3,7 @@ import * as urlUtils from '~/lib/utils/url_utility'; import store from '~/ide/stores'; import service from '~/ide/services'; import repoCommitSection from '~/ide/components/repo_commit_section.vue'; -import getSetTimeoutPromise from '../../helpers/set_timeout_promise_helper'; +import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper'; import { file, resetStore } from '../helpers'; describe('RepoCommitSection', () => { diff --git a/spec/javascripts/settings_panels_spec.js b/spec/javascripts/settings_panels_spec.js new file mode 100644 index 00000000000..d433f8c3e07 --- /dev/null +++ b/spec/javascripts/settings_panels_spec.js @@ -0,0 +1,29 @@ +import initSettingsPanels from '~/settings_panels'; + +describe('Settings Panels', () => { + preloadFixtures('projects/ci_cd_settings.html.raw'); + + beforeEach(() => { + loadFixtures('projects/ci_cd_settings.html.raw'); + }); + + describe('initSettingsPane', () => { + afterEach(() => { + location.hash = ''; + }); + + it('should expand linked hash fragment panel', () => { + location.hash = '#js-general-pipeline-settings'; + + const pipelineSettingsPanel = document.querySelector('#js-general-pipeline-settings'); + // Our test environment automatically expands everything so we need to clear that out first + pipelineSettingsPanel.classList.remove('expanded'); + + expect(pipelineSettingsPanel.classList.contains('expanded')).toBe(false); + + initSettingsPanels(); + + expect(pipelineSettingsPanel.classList.contains('expanded')).toBe(true); + }); + }); +}); diff --git a/spec/javascripts/sidebar/assignees_spec.js b/spec/javascripts/sidebar/assignees_spec.js index c9453a21189..4e4343812bd 100644 --- a/spec/javascripts/sidebar/assignees_spec.js +++ b/spec/javascripts/sidebar/assignees_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import Assignee from '~/sidebar/components/assignees/assignees'; +import Assignee from '~/sidebar/components/assignees/assignees.vue'; import UsersMock from './mock_data'; import UsersMockHelper from '../helpers/user_mock_data_helper'; diff --git a/spec/javascripts/sidebar/lock/edit_form_buttons_spec.js b/spec/javascripts/sidebar/lock/edit_form_buttons_spec.js index b0ea8ae0206..deeea669de8 100644 --- a/spec/javascripts/sidebar/lock/edit_form_buttons_spec.js +++ b/spec/javascripts/sidebar/lock/edit_form_buttons_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import editFormButtons from '~/sidebar/components/lock/edit_form_buttons.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('EditFormButtons', () => { let vm1; diff --git a/spec/javascripts/sidebar/participants_spec.js b/spec/javascripts/sidebar/participants_spec.js index 30cc549c7c0..2a3b60c399c 100644 --- a/spec/javascripts/sidebar/participants_spec.js +++ b/spec/javascripts/sidebar/participants_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import participants from '~/sidebar/components/participants/participants.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const PARTICIPANT = { id: 1, diff --git a/spec/javascripts/sidebar/sidebar_assignees_spec.js b/spec/javascripts/sidebar/sidebar_assignees_spec.js index 6bb6d639f24..2fbb7268e0b 100644 --- a/spec/javascripts/sidebar/sidebar_assignees_spec.js +++ b/spec/javascripts/sidebar/sidebar_assignees_spec.js @@ -4,8 +4,8 @@ import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees'; import SidebarMediator from '~/sidebar/sidebar_mediator'; import SidebarService from '~/sidebar/services/sidebar_service'; import SidebarStore from '~/sidebar/stores/sidebar_store'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import Mock from './mock_data'; -import mountComponent from '../helpers/vue_mount_component_helper'; describe('sidebar assignees', () => { let vm; diff --git a/spec/javascripts/sidebar/sidebar_move_issue_spec.js b/spec/javascripts/sidebar/sidebar_move_issue_spec.js index 97f762d07a7..0da5d91e376 100644 --- a/spec/javascripts/sidebar/sidebar_move_issue_spec.js +++ b/spec/javascripts/sidebar/sidebar_move_issue_spec.js @@ -78,7 +78,7 @@ describe('SidebarMoveIssue', () => { this.sidebarMoveIssue.onConfirmClicked(); expect(this.mediator.moveIssue).toHaveBeenCalled(); - expect(this.$confirmButton.attr('disabled')).toBe('disabled'); + expect(this.$confirmButton.prop('disabled')).toBeTruthy(); expect(this.$confirmButton.hasClass('is-loading')).toBe(true); }); @@ -93,7 +93,7 @@ describe('SidebarMoveIssue', () => { // Wait for the move issue request to fail setTimeout(() => { expect(window.Flash).toHaveBeenCalled(); - expect(this.$confirmButton.attr('disabled')).toBe(undefined); + expect(this.$confirmButton.prop('disabled')).toBeFalsy(); expect(this.$confirmButton.hasClass('is-loading')).toBe(false); done(); }); @@ -120,7 +120,7 @@ describe('SidebarMoveIssue', () => { this.$content.find('.js-move-issue-dropdown-item').eq(0).trigger('click'); expect(this.mediator.setMoveToProjectId).toHaveBeenCalledWith(0); - expect(this.$confirmButton.attr('disabled')).toBe('disabled'); + expect(this.$confirmButton.prop('disabled')).toBeTruthy(); done(); }, 0); }); diff --git a/spec/javascripts/sidebar/sidebar_subscriptions_spec.js b/spec/javascripts/sidebar/sidebar_subscriptions_spec.js index a6113cb0bae..56a2543660b 100644 --- a/spec/javascripts/sidebar/sidebar_subscriptions_spec.js +++ b/spec/javascripts/sidebar/sidebar_subscriptions_spec.js @@ -4,7 +4,7 @@ import SidebarMediator from '~/sidebar/sidebar_mediator'; import SidebarService from '~/sidebar/services/sidebar_service'; import SidebarStore from '~/sidebar/stores/sidebar_store'; import eventHub from '~/sidebar/event_hub'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import Mock from './mock_data'; describe('Sidebar Subscriptions', function () { diff --git a/spec/javascripts/sidebar/subscriptions_spec.js b/spec/javascripts/sidebar/subscriptions_spec.js index 79db05f04ed..aee8f0acbb9 100644 --- a/spec/javascripts/sidebar/subscriptions_spec.js +++ b/spec/javascripts/sidebar/subscriptions_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import subscriptions from '~/sidebar/components/subscriptions/subscriptions.vue'; -import mountComponent from '../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Subscriptions', function () { let vm; diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js index 9b2a5379855..1bcfdfe72b6 100644 --- a/spec/javascripts/test_bundle.js +++ b/spec/javascripts/test_bundle.js @@ -1,6 +1,6 @@ /* eslint-disable jasmine/no-global-setup */ import $ from 'jquery'; -import 'jasmine-jquery'; +import 'vendor/jasmine-jquery'; import '~/commons'; import Vue from 'vue'; @@ -37,6 +37,7 @@ window.$ = window.jQuery = $; window.gl = window.gl || {}; window.gl.TEST_HOST = 'http://test.host'; window.gon = window.gon || {}; +window.gon.test_env = true; let hasUnhandledPromiseRejections = false; @@ -113,7 +114,9 @@ if (process.env.BABEL_ENV === 'coverage') { // exempt these files from the coverage report const troubleMakers = [ './blob_edit/blob_bundle.js', - './boards/boards_bundle.js', + './boards/components/modal/empty_state.js', + './boards/components/modal/footer.js', + './boards/components/modal/header.js', './cycle_analytics/cycle_analytics_bundle.js', './cycle_analytics/components/stage_plan_component.js', './cycle_analytics/components/stage_staging_component.js', @@ -124,7 +127,6 @@ if (process.env.BABEL_ENV === 'coverage') { './diff_notes/components/resolve_count.js', './dispatcher.js', './environments/environments_bundle.js', - './filtered_search/filtered_search_bundle.js', './graphs/graphs_bundle.js', './issuable/time_tracking/time_tracking_bundle.js', './main.js', @@ -144,6 +146,9 @@ if (process.env.BABEL_ENV === 'coverage') { describe('Uncovered files', function () { const sourceFiles = require.context('~', true, /\.js$/); + + $.holdReady(true); + sourceFiles.keys().forEach(function (path) { // ignore if there is a matching spec file if (testsContext.keys().indexOf(`${path.replace(/\.js$/, '')}_spec`) > -1) { diff --git a/spec/javascripts/u2f/authenticate_spec.js b/spec/javascripts/u2f/authenticate_spec.js index 29b15f3a782..4d15bcc4956 100644 --- a/spec/javascripts/u2f/authenticate_spec.js +++ b/spec/javascripts/u2f/authenticate_spec.js @@ -5,7 +5,7 @@ import MockU2FDevice from './mock_u2f_device'; describe('U2FAuthenticate', () => { preloadFixtures('u2f/authenticate.html.raw'); - beforeEach(() => { + beforeEach((done) => { loadFixtures('u2f/authenticate.html.raw'); this.u2fDevice = new MockU2FDevice(); this.container = $('#js-authenticate-u2f'); @@ -22,7 +22,7 @@ describe('U2FAuthenticate', () => { // bypass automatic form submission within renderAuthenticated spyOn(this.component, 'renderAuthenticated').and.returnValue(true); - return this.component.start(); + this.component.start().then(done).catch(done.fail); }); it('allows authenticating via a U2F device', () => { @@ -34,7 +34,7 @@ describe('U2FAuthenticate', () => { expect(this.component.renderAuthenticated).toHaveBeenCalledWith('{"deviceData":"this is data from the device"}'); }); - return describe('errors', () => { + describe('errors', () => { it('displays an error message', () => { const setupButton = this.container.find('#js-login-u2f-device'); setupButton.trigger('click'); diff --git a/spec/javascripts/u2f/register_spec.js b/spec/javascripts/u2f/register_spec.js index b0051f11362..dbe89c2923c 100644 --- a/spec/javascripts/u2f/register_spec.js +++ b/spec/javascripts/u2f/register_spec.js @@ -5,12 +5,12 @@ import MockU2FDevice from './mock_u2f_device'; describe('U2FRegister', () => { preloadFixtures('u2f/register.html.raw'); - beforeEach(() => { + beforeEach((done) => { loadFixtures('u2f/register.html.raw'); this.u2fDevice = new MockU2FDevice(); this.container = $('#js-register-u2f'); this.component = new U2FRegister(this.container, $('#js-register-u2f-templates'), {}, 'token'); - return this.component.start(); + this.component.start().then(done).catch(done.fail); }); it('allows registering a U2F device', () => { diff --git a/spec/javascripts/u2f/util_spec.js b/spec/javascripts/u2f/util_spec.js new file mode 100644 index 00000000000..4187183236f --- /dev/null +++ b/spec/javascripts/u2f/util_spec.js @@ -0,0 +1,45 @@ +import { canInjectU2fApi } from '~/u2f/util'; + +describe('U2F Utils', () => { + describe('canInjectU2fApi', () => { + it('returns false for Chrome < 41', () => { + const userAgent = 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.28 Safari/537.36'; + expect(canInjectU2fApi(userAgent)).toBe(false); + }); + + it('returns true for Chrome >= 41', () => { + const userAgent = 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'; + expect(canInjectU2fApi(userAgent)).toBe(true); + }); + + it('returns false for Opera < 40', () => { + const userAgent = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36 OPR/32.0.1948.25'; + expect(canInjectU2fApi(userAgent)).toBe(false); + }); + + it('returns true for Opera >= 40', () => { + const userAgent = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991'; + expect(canInjectU2fApi(userAgent)).toBe(true); + }); + + it('returns false for Safari', () => { + const userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/603.2.4 (KHTML, like Gecko) Version/10.1.1 Safari/603.2.4'; + expect(canInjectU2fApi(userAgent)).toBe(false); + }); + + it('returns false for Chrome on Android', () => { + const userAgent = 'Mozilla/5.0 (Linux; Android 7.0; VS988 Build/NRD90U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3145.0 Mobile Safari/537.36'; + expect(canInjectU2fApi(userAgent)).toBe(false); + }); + + it('returns false for Chrome on iOS', () => { + const userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1'; + expect(canInjectU2fApi(userAgent)).toBe(false); + }); + + it('returns false for Safari on iOS', () => { + const userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A356 Safari/604.1'; + expect(canInjectU2fApi(userAgent)).toBe(false); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js index f14d5f6f76c..db27aa144d6 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetAuthor', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js index 8c55622b15e..6784b498c29 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import authorTimeComponent from '~/vue_merge_request_widget/components/mr_widget_author_time.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetAuthorTime', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js index 13e5595bbfc..235c33fac0d 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetHeader', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js index cc43639f576..367c499daaf 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import mergeHelpComponent from '~/vue_merge_request_widget/components/mr_widget_merge_help.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetMergeHelp', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js index d7af956c9c1..431cb7f3913 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import mockData from '../mock_data'; describe('MRWidgetPipeline', () => { diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_rebase_spec.js index 66ecaa316c8..b453d180a40 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_rebase_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_rebase_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; import component from '~/vue_merge_request_widget/components/states/mr_widget_rebase.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Merge request widget rebase component', () => { let Component; diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js index 637bf483deb..5de6ac4079d 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import relatedLinksComponent from '~/vue_merge_request_widget/components/mr_widget_related_links.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetRelatedLinks', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js index c39fcda0071..0b25500caf4 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import mrStatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MR widget status icon component', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js index f98ebdb38e6..e818f87b4c8 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import archivedComponent from '~/vue_merge_request_widget/components/states/mr_widget_archived.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetArchived', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js index 95c94e95e3a..d069dc3fcc6 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import autoMergeFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetAutoMergeFailed', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js index 658cadddb81..658612aad3c 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import checkingComponent from '~/vue_merge_request_widget/components/states/mr_widget_checking.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetChecking', () => { let Component; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js index 51a34739ee9..0e3c134d3ac 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetClosed', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js index a7d69fdcdb9..5323523abc0 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetConflicts', () => { let Component; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js index a57b9811e08..dd1d62cd4ed 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import failedToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetFailedToMerge', () => { let Component; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js index df56c4e2c5c..dd907ad9015 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import mwpsComponent from '~/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetMergeWhenPipelineSucceeds', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js index 43a989393ba..c2c92d8ac56 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import mergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetMerged', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merging_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merging_spec.js index 0b2ed2d4086..d2d219e4bdb 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merging_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merging_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import mergingComponent from '~/vue_merge_request_widget/components/states/mr_widget_merging.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetMerging', () => { let vm; diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js index 720effb5c1c..34f76b39b28 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js @@ -1,38 +1,22 @@ import Vue from 'vue'; -import missingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch'; - -const createComponent = () => { - const Component = Vue.extend(missingBranchComponent); - const mr = { - sourceBranchRemoved: true, - }; - - return new Component({ - el: document.createElement('div'), - propsData: { mr }, - }); -}; +import missingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetMissingBranch', () => { - describe('props', () => { - it('should have props', () => { - const mrProp = missingBranchComponent.props.mr; + let vm; - expect(mrProp.type instanceof Object).toBeTruthy(); - expect(mrProp.required).toBeTruthy(); - }); + beforeEach(() => { + const Component = Vue.extend(missingBranchComponent); + vm = mountComponent(Component, { mr: { sourceBranchRemoved: true } }); }); - describe('components', () => { - it('should have components added', () => { - expect(missingBranchComponent.components['mr-widget-merge-help']).toBeDefined(); - }); + afterEach(() => { + vm.$destroy(); }); describe('computed', () => { describe('missingBranchName', () => { it('should return proper branch name', () => { - const vm = createComponent(); expect(vm.missingBranchName).toEqual('source'); vm.mr.sourceBranchRemoved = false; @@ -43,7 +27,7 @@ describe('MRWidgetMissingBranch', () => { describe('template', () => { it('should have correct elements', () => { - const el = createComponent().$el; + const el = vm.$el; const content = el.textContent.replace(/\n(\s)+/g, ' ').trim(); expect(el.classList.contains('mr-widget-body')).toBeTruthy(); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js index 33f20ab132d..9f8b96c118b 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js @@ -1,17 +1,24 @@ import Vue from 'vue'; -import notAllowedComponent from '~/vue_merge_request_widget/components/states/mr_widget_not_allowed'; +import notAllowedComponent from '~/vue_merge_request_widget/components/states/mr_widget_not_allowed.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetNotAllowed', () => { - describe('template', () => { + let vm; + beforeEach(() => { const Component = Vue.extend(notAllowedComponent); - const vm = new Component({ - el: document.createElement('div'), - }); - it('should have correct elements', () => { - expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); - expect(vm.$el.innerText).toContain('Ready to be merged automatically.'); - expect(vm.$el.innerText).toContain('Ask someone with write access to this repository to merge this request'); - }); + vm = mountComponent(Component); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders success icon', () => { + expect(vm.$el.querySelector('.ci-status-icon-success')).not.toBe(null); + }); + + it('renders informative text', () => { + expect(vm.$el.innerText).toContain('Ready to be merged automatically.'); + expect(vm.$el.innerText).toContain('Ask someone with write access to this repository to merge this request'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js index d0702f9f503..baacbc03fb1 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js @@ -1,16 +1,23 @@ import Vue from 'vue'; -import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked'; +import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('MRWidgetPipelineBlocked', () => { - describe('template', () => { + let vm; + beforeEach(() => { const Component = Vue.extend(pipelineBlockedComponent); - const vm = new Component({ - el: document.createElement('div'), - }); - it('should have correct elements', () => { - expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); - expect(vm.$el.innerText).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed'); - }); + vm = mountComponent(Component); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders warning icon', () => { + expect(vm.$el.querySelector('.ci-status-icon-warning')).not.toBe(null); + }); + + it('renders information text', () => { + expect(vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ')).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed'); }); }); diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js index cd00d0a39a3..18ba34b55a5 100644 --- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js +++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js @@ -3,8 +3,8 @@ import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options'; import eventHub from '~/vue_merge_request_widget/event_hub'; import notify from '~/lib/utils/notify'; import { stateKey } from '~/vue_merge_request_widget/stores/state_maps'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; import mockData from './mock_data'; -import mountComponent from '../helpers/vue_mount_component_helper'; const returnPromise = data => new Promise((resolve) => { resolve({ @@ -295,6 +295,15 @@ describe('mrWidgetOptions', () => { expect(notify.notifyMe).not.toHaveBeenCalled(); }); + + it('should not notify if no pipeline provided', () => { + vm.handleNotification({ + ...data, + pipeline: undefined, + }); + + expect(notify.notifyMe).not.toHaveBeenCalled(); + }); }); describe('resumePolling', () => { diff --git a/spec/javascripts/vue_shared/components/ci_badge_link_spec.js b/spec/javascripts/vue_shared/components/ci_badge_link_spec.js index 8762ce9903b..668742ebaee 100644 --- a/spec/javascripts/vue_shared/components/ci_badge_link_spec.js +++ b/spec/javascripts/vue_shared/components/ci_badge_link_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import ciBadge from '~/vue_shared/components/ci_badge_link.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('CI Badge Link Component', () => { let CIBadge; diff --git a/spec/javascripts/vue_shared/components/clipboard_button_spec.js b/spec/javascripts/vue_shared/components/clipboard_button_spec.js index 08e4e1f8337..d0fc10d69ea 100644 --- a/spec/javascripts/vue_shared/components/clipboard_button_spec.js +++ b/spec/javascripts/vue_shared/components/clipboard_button_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import clipboardButton from '~/vue_shared/components/clipboard_button.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('clipboard button', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/confirmation_input_spec.js b/spec/javascripts/vue_shared/components/confirmation_input_spec.js deleted file mode 100644 index a6a12614e77..00000000000 --- a/spec/javascripts/vue_shared/components/confirmation_input_spec.js +++ /dev/null @@ -1,63 +0,0 @@ -import Vue from 'vue'; -import confirmationInput from '~/vue_shared/components/confirmation_input.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; - -describe('Confirmation input component', () => { - const Component = Vue.extend(confirmationInput); - const props = { - inputId: 'dummy-id', - confirmationKey: 'confirmation-key', - confirmationValue: 'confirmation-value', - }; - let vm; - - afterEach(() => { - vm.$destroy(); - }); - - describe('props', () => { - beforeEach(() => { - vm = mountComponent(Component, props); - }); - - it('sets id of the input field to inputId', () => { - expect(vm.$refs.enteredValue.id).toBe(props.inputId); - }); - - it('sets name of the input field to confirmationKey', () => { - expect(vm.$refs.enteredValue.name).toBe(props.confirmationKey); - }); - }); - - describe('computed', () => { - describe('inputLabel', () => { - it('escapes confirmationValue by default', () => { - vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng' }); - expect(vm.inputLabel).toBe('Type <code>n<e></e>ds escap"ng</code> to confirm:'); - }); - - it('does not escape confirmationValue if escapeValue is false', () => { - vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng', shouldEscapeConfirmationValue: false }); - expect(vm.inputLabel).toBe('Type <code>n<e></e>ds escap"ng</code> to confirm:'); - }); - }); - }); - - describe('methods', () => { - describe('hasCorrectValue', () => { - beforeEach(() => { - vm = mountComponent(Component, props); - }); - - it('returns false if entered value is incorrect', () => { - vm.$refs.enteredValue.value = 'incorrect'; - expect(vm.hasCorrectValue()).toBe(false); - }); - - it('returns true if entered value is correct', () => { - vm.$refs.enteredValue.value = props.confirmationValue; - expect(vm.hasCorrectValue()).toBe(true); - }); - }); - }); -}); diff --git a/spec/javascripts/vue_shared/components/expand_button_spec.js b/spec/javascripts/vue_shared/components/expand_button_spec.js index a33ab689dd1..f19589d3b75 100644 --- a/spec/javascripts/vue_shared/components/expand_button_spec.js +++ b/spec/javascripts/vue_shared/components/expand_button_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import expandButton from '~/vue_shared/components/expand_button.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('expand button', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/file_icon_spec.js b/spec/javascripts/vue_shared/components/file_icon_spec.js index d99b17bdc79..f7581251bf0 100644 --- a/spec/javascripts/vue_shared/components/file_icon_spec.js +++ b/spec/javascripts/vue_shared/components/file_icon_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import fileIcon from '~/vue_shared/components/file_icon.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('File Icon component', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/gl_modal_spec.js b/spec/javascripts/vue_shared/components/gl_modal_spec.js new file mode 100644 index 00000000000..2805d9a7003 --- /dev/null +++ b/spec/javascripts/vue_shared/components/gl_modal_spec.js @@ -0,0 +1,192 @@ +import Vue from 'vue'; +import GlModal from '~/vue_shared/components/gl_modal.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; + +const modalComponent = Vue.extend(GlModal); + +describe('GlModal', () => { + let vm; + + afterEach(() => { + vm.$destroy(); + }); + + describe('props', () => { + describe('with id', () => { + const props = { + id: 'my-modal', + }; + + beforeEach(() => { + vm = mountComponent(modalComponent, props); + }); + + it('assigns the id to the modal', () => { + expect(vm.$el.id).toBe(props.id); + }); + }); + + describe('without id', () => { + beforeEach(() => { + vm = mountComponent(modalComponent, { }); + }); + + it('does not add an id attribute to the modal', () => { + expect(vm.$el.hasAttribute('id')).toBe(false); + }); + }); + + describe('with headerTitleText', () => { + const props = { + headerTitleText: 'my title text', + }; + + beforeEach(() => { + vm = mountComponent(modalComponent, props); + }); + + it('sets the modal title', () => { + const modalTitle = vm.$el.querySelector('.modal-title'); + expect(modalTitle.innerHTML.trim()).toBe(props.headerTitleText); + }); + }); + + describe('with footerPrimaryButtonVariant', () => { + const props = { + footerPrimaryButtonVariant: 'danger', + }; + + beforeEach(() => { + vm = mountComponent(modalComponent, props); + }); + + it('sets the primary button class', () => { + const primaryButton = vm.$el.querySelector('.modal-footer button:last-of-type'); + expect(primaryButton).toHaveClass(`btn-${props.footerPrimaryButtonVariant}`); + }); + }); + + describe('with footerPrimaryButtonText', () => { + const props = { + footerPrimaryButtonText: 'my button text', + }; + + beforeEach(() => { + vm = mountComponent(modalComponent, props); + }); + + it('sets the primary button text', () => { + const primaryButton = vm.$el.querySelector('.modal-footer button:last-of-type'); + expect(primaryButton.innerHTML.trim()).toBe(props.footerPrimaryButtonText); + }); + }); + }); + + it('works with data-toggle="modal"', (done) => { + setFixtures(` + <button id="modal-button" data-toggle="modal" data-target="#my-modal"></button> + <div id="modal-container"></div> + `); + + const modalContainer = document.getElementById('modal-container'); + const modalButton = document.getElementById('modal-button'); + vm = mountComponent(modalComponent, { + id: 'my-modal', + }, modalContainer); + $(vm.$el).on('shown.bs.modal', () => done()); + + modalButton.click(); + }); + + describe('methods', () => { + const dummyEvent = 'not really an event'; + + beforeEach(() => { + vm = mountComponent(modalComponent, { }); + spyOn(vm, '$emit'); + }); + + describe('emitCancel', () => { + it('emits a cancel event', () => { + vm.emitCancel(dummyEvent); + + expect(vm.$emit).toHaveBeenCalledWith('cancel', dummyEvent); + }); + }); + + describe('emitSubmit', () => { + it('emits a submit event', () => { + vm.emitSubmit(dummyEvent); + + expect(vm.$emit).toHaveBeenCalledWith('submit', dummyEvent); + }); + }); + }); + + describe('slots', () => { + const slotContent = 'this should go into the slot'; + const modalWithSlot = (slotName) => { + let template; + if (slotName) { + template = ` + <gl-modal> + <template slot="${slotName}">${slotContent}</template> + </gl-modal> + `; + } else { + template = `<gl-modal>${slotContent}</gl-modal>`; + } + + return Vue.extend({ + components: { + GlModal, + }, + template, + }); + }; + + describe('default slot', () => { + beforeEach(() => { + vm = mountComponent(modalWithSlot()); + }); + + it('sets the modal body', () => { + const modalBody = vm.$el.querySelector('.modal-body'); + expect(modalBody.innerHTML).toBe(slotContent); + }); + }); + + describe('header slot', () => { + beforeEach(() => { + vm = mountComponent(modalWithSlot('header')); + }); + + it('sets the modal header', () => { + const modalHeader = vm.$el.querySelector('.modal-header'); + expect(modalHeader.innerHTML).toBe(slotContent); + }); + }); + + describe('title slot', () => { + beforeEach(() => { + vm = mountComponent(modalWithSlot('title')); + }); + + it('sets the modal title', () => { + const modalTitle = vm.$el.querySelector('.modal-title'); + expect(modalTitle.innerHTML).toBe(slotContent); + }); + }); + + describe('footer slot', () => { + beforeEach(() => { + vm = mountComponent(modalWithSlot('footer')); + }); + + it('sets the modal footer', () => { + const modalFooter = vm.$el.querySelector('.modal-footer'); + expect(modalFooter.innerHTML).toBe(slotContent); + }); + }); + }); +}); diff --git a/spec/javascripts/vue_shared/components/header_ci_component_spec.js b/spec/javascripts/vue_shared/components/header_ci_component_spec.js index b378a0bd896..65499a2d730 100644 --- a/spec/javascripts/vue_shared/components/header_ci_component_spec.js +++ b/spec/javascripts/vue_shared/components/header_ci_component_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import headerCi from '~/vue_shared/components/header_ci_component.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Header CI Component', () => { let HeaderCi; diff --git a/spec/javascripts/vue_shared/components/icon_spec.js b/spec/javascripts/vue_shared/components/icon_spec.js index a22b6bd3a67..68d57ebc8f0 100644 --- a/spec/javascripts/vue_shared/components/icon_spec.js +++ b/spec/javascripts/vue_shared/components/icon_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import Icon from '~/vue_shared/components/icon.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Sprite Icon Component', function () { describe('Initialization', function () { diff --git a/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js index 24484796bf1..e6ed77dbb52 100644 --- a/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js +++ b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import issueWarning from '~/vue_shared/components/issue/issue_warning.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const IssueWarning = Vue.extend(issueWarning); diff --git a/spec/javascripts/vue_shared/components/loading_button_spec.js b/spec/javascripts/vue_shared/components/loading_button_spec.js index 49bf8ee6f7c..51c19cd4080 100644 --- a/spec/javascripts/vue_shared/components/loading_button_spec.js +++ b/spec/javascripts/vue_shared/components/loading_button_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import loadingButton from '~/vue_shared/components/loading_button.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const LABEL = 'Hello'; diff --git a/spec/javascripts/vue_shared/components/modal_spec.js b/spec/javascripts/vue_shared/components/modal_spec.js index a5f9c75be4e..8412df74f98 100644 --- a/spec/javascripts/vue_shared/components/modal_spec.js +++ b/spec/javascripts/vue_shared/components/modal_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import modal from '~/vue_shared/components/modal.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const modalComponent = Vue.extend(modal); diff --git a/spec/javascripts/vue_shared/components/navigation_tabs_spec.js b/spec/javascripts/vue_shared/components/navigation_tabs_spec.js index 78e7d747b92..09fda95d7d3 100644 --- a/spec/javascripts/vue_shared/components/navigation_tabs_spec.js +++ b/spec/javascripts/vue_shared/components/navigation_tabs_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import navigationTabs from '~/vue_shared/components/navigation_tabs.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('navigation tabs component', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/notes/placeholder_system_note_spec.js b/spec/javascripts/vue_shared/components/notes/placeholder_system_note_spec.js index 7b8e6c330c2..262571efcb8 100644 --- a/spec/javascripts/vue_shared/components/notes/placeholder_system_note_spec.js +++ b/spec/javascripts/vue_shared/components/notes/placeholder_system_note_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import placeholderSystemNote from '~/vue_shared/components/notes/placeholder_system_note.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('placeholder system note component', () => { let PlaceholderSystemNote; diff --git a/spec/javascripts/vue_shared/components/panel_resizer_spec.js b/spec/javascripts/vue_shared/components/panel_resizer_spec.js index 70ce3dffaba..8efcb54659d 100644 --- a/spec/javascripts/vue_shared/components/panel_resizer_spec.js +++ b/spec/javascripts/vue_shared/components/panel_resizer_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import panelResizer from '~/vue_shared/components/panel_resizer.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Panel Resizer component', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/pikaday_spec.js b/spec/javascripts/vue_shared/components/pikaday_spec.js index 47af9534737..b349e2a2a81 100644 --- a/spec/javascripts/vue_shared/components/pikaday_spec.js +++ b/spec/javascripts/vue_shared/components/pikaday_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import datePicker from '~/vue_shared/components/pikaday.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('datePicker', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js b/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js index cce53193870..8c296af6652 100644 --- a/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import collapsedCalendarIcon from '~/vue_shared/components/sidebar/collapsed_calendar_icon.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('collapsedCalendarIcon', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js b/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js index 2de108da2ac..9d60f9c758f 100644 --- a/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import collapsedGroupedDatePicker from '~/vue_shared/components/sidebar/collapsed_grouped_date_picker.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('collapsedGroupedDatePicker', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js b/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js index 926e11b4d30..8840a5a9dbf 100644 --- a/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import sidebarDatePicker from '~/vue_shared/components/sidebar/date_picker.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('sidebarDatePicker', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js b/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js index 752a9e89d50..c911a129173 100644 --- a/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js +++ b/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import toggleSidebar from '~/vue_shared/components/sidebar/toggle_sidebar.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('toggleSidebar', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js b/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js index a5db0b2c59e..bbd50863069 100644 --- a/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js +++ b/spec/javascripts/vue_shared/components/skeleton_loading_container_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import skeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Skeleton loading container', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js b/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js index 6940b04573e..de3bf667fb3 100644 --- a/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js +++ b/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js @@ -2,7 +2,7 @@ import Vue from 'vue'; import stackedProgressBarComponent from '~/vue_shared/components/stacked_progress_bar.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const createComponent = (config) => { const Component = Vue.extend(stackedProgressBarComponent); diff --git a/spec/javascripts/vue_shared/components/toggle_button_spec.js b/spec/javascripts/vue_shared/components/toggle_button_spec.js index 859995d33fa..71952cc39e0 100644 --- a/spec/javascripts/vue_shared/components/toggle_button_spec.js +++ b/spec/javascripts/vue_shared/components/toggle_button_spec.js @@ -1,6 +1,6 @@ import Vue from 'vue'; import toggleButton from '~/vue_shared/components/toggle_button.vue'; -import mountComponent from '../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; describe('Toggle Button', () => { let vm; diff --git a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js index aa93134f2dd..446f025c127 100644 --- a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js +++ b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import { placeholderImage } from '~/lazy_loader'; import userAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue'; -import mountComponent from '../../../helpers/vue_mount_component_helper'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; const DEFAULT_PROPS = { size: 99, diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb index 6ee3d531d6e..f7b1a61f4f8 100644 --- a/spec/lib/backup/repository_spec.rb +++ b/spec/lib/backup/repository_spec.rb @@ -33,10 +33,22 @@ describe Backup::Repository do allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1]) end - it 'shows the appropriate error' do - described_class.new.restore + context 'hashed storage' do + it 'shows the appropriate error' do + described_class.new.restore - expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error") + expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} (#{project.disk_path}) - error") + end + end + + context 'legacy storage' do + let!(:project) { create(:project, :legacy_storage) } + + it 'shows the appropriate error' do + described_class.new.restore + + expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error") + end end end end diff --git a/spec/lib/banzai/commit_renderer_spec.rb b/spec/lib/banzai/commit_renderer_spec.rb index 84adaebdcbe..e7ebb2a332f 100644 --- a/spec/lib/banzai/commit_renderer_spec.rb +++ b/spec/lib/banzai/commit_renderer_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Banzai::CommitRenderer do describe '.render' do it 'renders a commit description and title' do - user = double(:user) + user = build(:user) project = create(:project, :repository) expect(Banzai::ObjectRenderer).to receive(:new).with(project, user).and_call_original diff --git a/spec/lib/banzai/filter/html_entity_filter_spec.rb b/spec/lib/banzai/filter/html_entity_filter_spec.rb index 91e18d876d5..1d98fc0d5db 100644 --- a/spec/lib/banzai/filter/html_entity_filter_spec.rb +++ b/spec/lib/banzai/filter/html_entity_filter_spec.rb @@ -3,17 +3,12 @@ require 'spec_helper' describe Banzai::Filter::HtmlEntityFilter do include FilterSpecHelper - let(:unescaped) { 'foo <strike attr="foo">&&&</strike>' } - let(:escaped) { 'foo <strike attr="foo">&&&</strike>' } + let(:unescaped) { 'foo <strike attr="foo">&&&</strike>' } + let(:escaped) { 'foo <strike attr="foo">&&amp;&</strike>' } it 'converts common entities to their HTML-escaped equivalents' do output = filter(unescaped) expect(output).to eq(escaped) end - - it 'does not double-escape' do - escaped = ERB::Util.html_escape("Merge branch 'blabla' into 'master'") - expect(filter(escaped)).to eq(escaped) - end end diff --git a/spec/lib/banzai/filter/issuable_state_filter_spec.rb b/spec/lib/banzai/filter/issuable_state_filter_spec.rb index cacb33d3372..17347768a49 100644 --- a/spec/lib/banzai/filter/issuable_state_filter_spec.rb +++ b/spec/lib/banzai/filter/issuable_state_filter_spec.rb @@ -77,6 +77,14 @@ describe Banzai::Filter::IssuableStateFilter do expect(doc.css('a').last.text).to eq("#{closed_issue.to_reference(other_project)} (closed)") end + it 'skips cross project references if the user cannot read cross project' do + expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + link = create_link(closed_issue.to_reference(other_project), issue: closed_issue.id, reference_type: 'issue') + doc = filter(link, context.merge(project: other_project)) + + expect(doc.css('a').last.text).to eq("#{closed_issue.to_reference(other_project)}") + end + it 'does not append state when filter is not enabled' do link = create_link('text', issue: closed_issue.id, reference_type: 'issue') context = { current_user: user } diff --git a/spec/lib/banzai/filter/redactor_filter_spec.rb b/spec/lib/banzai/filter/redactor_filter_spec.rb index 5a7858e77f3..9a2e521fdcf 100644 --- a/spec/lib/banzai/filter/redactor_filter_spec.rb +++ b/spec/lib/banzai/filter/redactor_filter_spec.rb @@ -6,7 +6,7 @@ describe Banzai::Filter::RedactorFilter do it 'ignores non-GFM links' do html = %(See <a href="https://google.com/">Google</a>) - doc = filter(html, current_user: double) + doc = filter(html, current_user: build(:user)) expect(doc.css('a').length).to eq 1 end diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb index 9f2efa05a01..ef52c572898 100644 --- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb +++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb @@ -3,35 +3,86 @@ require 'spec_helper' describe Banzai::Filter::SyntaxHighlightFilter do include FilterSpecHelper + shared_examples "XSS prevention" do |lang| + it "escapes HTML tags" do + # This is how a script tag inside a code block is presented to this filter + # after Markdown rendering. + result = filter(%{<pre lang="#{lang}"><code><script>alert(1)</script></code></pre>}) + + expect(result.to_html).not_to include("<script>alert(1)</script>") + expect(result.to_html).to include("alert(1)") + end + end + context "when no language is specified" do it "highlights as plaintext" do result = filter('<pre><code>def fun end</code></pre>') + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>') end + + include_examples "XSS prevention", "" end context "when a valid language is specified" do it "highlights as that language" do result = filter('<pre><code lang="ruby">def fun end</code></pre>') + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>') end + + include_examples "XSS prevention", "ruby" end context "when an invalid language is specified" do it "highlights as plaintext" do result = filter('<pre><code lang="gnuplot">This is a test</code></pre>') + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') end + + include_examples "XSS prevention", "gnuplot" end - context "when Rouge formatting fails" do + context "languages that should be passed through" do + %w(math mermaid plantuml).each do |lang| + context "when #{lang} is specified" do + it "highlights as plaintext but with the correct language attribute and class" do + result = filter(%{<pre><code lang="#{lang}">This is a test</code></pre>}) + + expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight #{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + end + + include_examples "XSS prevention", lang + end + end + end + + context "when Rouge lexing fails" do before do - allow_any_instance_of(Rouge::Formatter).to receive(:format).and_raise(StandardError) + allow_any_instance_of(Rouge::Lexers::Ruby).to receive(:stream_tokens).and_raise(StandardError) end it "highlights as plaintext" do result = filter('<pre><code lang="ruby">This is a test</code></pre>') - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code>This is a test</code></pre>') + + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre>') + end + + include_examples "XSS prevention", "ruby" + end + + context "when Rouge lexing fails after a retry" do + before do + allow_any_instance_of(Rouge::Lexers::PlainText).to receive(:stream_tokens).and_raise(StandardError) + end + + it "does not add highlighting classes" do + result = filter('<pre><code>This is a test</code></pre>') + + expect(result.to_html).to eq('<pre><code>This is a test</code></pre>') end + + include_examples "XSS prevention", "ruby" end end diff --git a/spec/lib/banzai/redactor_spec.rb b/spec/lib/banzai/redactor_spec.rb index 2424c3fdc66..441f3725985 100644 --- a/spec/lib/banzai/redactor_spec.rb +++ b/spec/lib/banzai/redactor_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Banzai::Redactor do - let(:user) { build(:user) } + let(:user) { create(:user) } let(:project) { build(:project) } let(:redactor) { described_class.new(project, user) } @@ -40,6 +40,16 @@ describe Banzai::Redactor do expect(doc.to_html).to eq(original_content) end end + + it 'returns <a> tag with original href if it is originally a link reference' do + href = 'http://localhost:3000' + doc = Nokogiri::HTML + .fragment("<a class='gfm' data-reference-type='issue' data-original=#{href} data-link-reference='true'>#{href}</a>") + + redactor.redact([doc]) + + expect(doc.to_html).to eq('<a href="http://localhost:3000">http://localhost:3000</a>') + end end context 'when project is in pending delete' do @@ -88,6 +98,55 @@ describe Banzai::Redactor do end end + context 'when the user cannot read cross project' do + include ActionView::Helpers::UrlHelper + let(:project) { create(:project) } + let(:other_project) { create(:project, :public) } + + def create_link(issuable) + type = issuable.class.name.underscore.downcase + link_to(issuable.to_reference, '', + class: 'gfm has-tooltip', + title: issuable.title, + data: { + reference_type: type, + "#{type}": issuable.id + }) + end + + before do + project.add_developer(user) + + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global) { false } + allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + end + + it 'skips links to issues within the same project' do + issue = create(:issue, project: project) + link = create_link(issue) + doc = Nokogiri::HTML.fragment(link) + + redactor.redact([doc]) + result = doc.css('a').last + + expect(result['class']).to include('has-tooltip') + expect(result['title']).to eq(issue.title) + end + + it 'removes info from a cross project reference' do + issue = create(:issue, project: other_project) + link = create_link(issue) + doc = Nokogiri::HTML.fragment(link) + + redactor.redact([doc]) + result = doc.css('a').last + + expect(result['class']).not_to include('has-tooltip') + expect(result['title']).to be_empty + end + end + describe '#redact_nodes' do it 'redacts an Array of nodes' do doc = Nokogiri::HTML.fragment('<a href="foo">foo</a>') diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb index 4cef3bdb24b..0a63567ee40 100644 --- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb @@ -19,19 +19,58 @@ describe Banzai::ReferenceParser::IssueParser do it 'returns the nodes when the user can read the issue' do expect(Ability).to receive(:issues_readable_by_user) - .with([issue], user) - .and_return([issue]) + .with([issue], user) + .and_return([issue]) expect(subject.nodes_visible_to_user(user, [link])).to eq([link]) end it 'returns an empty Array when the user can not read the issue' do expect(Ability).to receive(:issues_readable_by_user) - .with([issue], user) - .and_return([]) + .with([issue], user) + .and_return([]) expect(subject.nodes_visible_to_user(user, [link])).to eq([]) end + + context 'when the user cannot read cross project' do + let(:issue) { create(:issue) } + + before do + allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global) { false } + end + + it 'returns the nodes when the user can read the issue' do + expect(Ability).to receive(:allowed?) + .with(user, :read_issue_iid, issue) + .and_return(true) + + expect(subject.nodes_visible_to_user(user, [link])).to eq([link]) + end + + it 'returns an empty Array when the user can not read the issue' do + expect(Ability).to receive(:allowed?) + .with(user, :read_issue_iid, issue) + .and_return(false) + + expect(subject.nodes_visible_to_user(user, [link])).to eq([]) + end + + context 'when the issue is not cross project' do + let(:issue) { create(:issue, project: project) } + + it 'does not check `can_read_reference` if the issue is not cross project' do + expect(Ability).to receive(:issues_readable_by_user) + .with([issue], user) + .and_return([]) + + expect(subject).not_to receive(:can_read_reference?).with(user, issue) + + expect(subject.nodes_visible_to_user(user, [link])).to eq([]) + end + end + end end context 'when the link does not have a data-issue attribute' do diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb index f668f78c2b8..2a0e19ae796 100644 --- a/spec/lib/gitlab/asciidoc_spec.rb +++ b/spec/lib/gitlab/asciidoc_spec.rb @@ -95,6 +95,14 @@ module Gitlab expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>') end end + + context 'outfilesuffix' do + it 'defaults to adoc' do + output = render("Inter-document reference <<README.adoc#>>", context) + + expect(output).to include("a href=\"README.adoc\"") + end + end end def render(*args) diff --git a/spec/lib/gitlab/ldap/access_spec.rb b/spec/lib/gitlab/auth/ldap/access_spec.rb index 6a47350be81..9b3916bf9e3 100644 --- a/spec/lib/gitlab/ldap/access_spec.rb +++ b/spec/lib/gitlab/auth/ldap/access_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::LDAP::Access do +describe Gitlab::Auth::LDAP::Access do let(:access) { described_class.new user } let(:user) { create(:omniauth_user) } @@ -19,7 +19,7 @@ describe Gitlab::LDAP::Access do context 'when the user cannot be found' do before do - allow(Gitlab::LDAP::Person).to receive(:find_by_dn).and_return(nil) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(nil) end it { is_expected.to be_falsey } @@ -33,12 +33,12 @@ describe Gitlab::LDAP::Access do context 'when the user is found' do before do - allow(Gitlab::LDAP::Person).to receive(:find_by_dn).and_return(:ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(:ldap_user) end context 'and the user is disabled via active directory' do before do - allow(Gitlab::LDAP::Person).to receive(:disabled_via_active_directory?).and_return(true) + allow(Gitlab::Auth::LDAP::Person).to receive(:disabled_via_active_directory?).and_return(true) end it { is_expected.to be_falsey } @@ -52,7 +52,7 @@ describe Gitlab::LDAP::Access do context 'and has no disabled flag in active diretory' do before do - allow(Gitlab::LDAP::Person).to receive(:disabled_via_active_directory?).and_return(false) + allow(Gitlab::Auth::LDAP::Person).to receive(:disabled_via_active_directory?).and_return(false) end it { is_expected.to be_truthy } @@ -87,15 +87,15 @@ describe Gitlab::LDAP::Access do context 'without ActiveDirectory enabled' do before do - allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true) - allow_any_instance_of(Gitlab::LDAP::Config).to receive(:active_directory).and_return(false) + allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:active_directory).and_return(false) end it { is_expected.to be_truthy } context 'when user cannot be found' do before do - allow(Gitlab::LDAP::Person).to receive(:find_by_dn).and_return(nil) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(nil) end it { is_expected.to be_falsey } diff --git a/spec/lib/gitlab/ldap/adapter_spec.rb b/spec/lib/gitlab/auth/ldap/adapter_spec.rb index 6132abd9b35..10c60d792bd 100644 --- a/spec/lib/gitlab/ldap/adapter_spec.rb +++ b/spec/lib/gitlab/auth/ldap/adapter_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::LDAP::Adapter do +describe Gitlab::Auth::LDAP::Adapter do include LdapHelpers let(:ldap) { double(:ldap) } @@ -139,6 +139,6 @@ describe Gitlab::LDAP::Adapter do end def ldap_attributes - Gitlab::LDAP::Person.ldap_attributes(Gitlab::LDAP::Config.new('ldapmain')) + Gitlab::Auth::LDAP::Person.ldap_attributes(Gitlab::Auth::LDAP::Config.new('ldapmain')) end end diff --git a/spec/lib/gitlab/ldap/auth_hash_spec.rb b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb index 9c30ddd7fe2..05541972f87 100644 --- a/spec/lib/gitlab/ldap/auth_hash_spec.rb +++ b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::LDAP::AuthHash do +describe Gitlab::Auth::LDAP::AuthHash do include LdapHelpers let(:auth_hash) do @@ -56,7 +56,7 @@ describe Gitlab::LDAP::AuthHash do end before do - allow_any_instance_of(Gitlab::LDAP::Config).to receive(:attributes).and_return(attributes) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:attributes).and_return(attributes) end it "has the correct username" do diff --git a/spec/lib/gitlab/ldap/authentication_spec.rb b/spec/lib/gitlab/auth/ldap/authentication_spec.rb index 9d57a46c12b..111572d043b 100644 --- a/spec/lib/gitlab/ldap/authentication_spec.rb +++ b/spec/lib/gitlab/auth/ldap/authentication_spec.rb @@ -1,14 +1,14 @@ require 'spec_helper' -describe Gitlab::LDAP::Authentication do +describe Gitlab::Auth::LDAP::Authentication do let(:dn) { 'uid=John Smith, ou=People, dc=example, dc=com' } - let(:user) { create(:omniauth_user, extern_uid: Gitlab::LDAP::Person.normalize_dn(dn)) } + let(:user) { create(:omniauth_user, extern_uid: Gitlab::Auth::LDAP::Person.normalize_dn(dn)) } let(:login) { 'john' } let(:password) { 'password' } describe 'login' do before do - allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true) + allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) end it "finds the user if authentication is successful" do @@ -43,7 +43,7 @@ describe Gitlab::LDAP::Authentication do end it "fails if ldap is disabled" do - allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(false) + allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(false) expect(described_class.login(login, password)).to be_falsey end diff --git a/spec/lib/gitlab/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb index ca2213cd112..82587e2ba55 100644 --- a/spec/lib/gitlab/ldap/config_spec.rb +++ b/spec/lib/gitlab/auth/ldap/config_spec.rb @@ -1,10 +1,18 @@ require 'spec_helper' -describe Gitlab::LDAP::Config do +describe Gitlab::Auth::LDAP::Config do include LdapHelpers let(:config) { described_class.new('ldapmain') } + describe '.servers' do + it 'returns empty array if no server information is available' do + allow(Gitlab.config).to receive(:ldap).and_return('enabled' => false) + + expect(described_class.servers).to eq [] + end + end + describe '#initialize' do it 'requires a provider' do expect { described_class.new }.to raise_error ArgumentError diff --git a/spec/lib/gitlab/ldap/dn_spec.rb b/spec/lib/gitlab/auth/ldap/dn_spec.rb index 8e21ecdf9ab..f2983a02602 100644 --- a/spec/lib/gitlab/ldap/dn_spec.rb +++ b/spec/lib/gitlab/auth/ldap/dn_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::LDAP::DN do +describe Gitlab::Auth::LDAP::DN do using RSpec::Parameterized::TableSyntax describe '#normalize_value' do @@ -13,7 +13,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'John Smith,' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly') end end @@ -21,7 +21,7 @@ describe Gitlab::LDAP::DN do let(:given) { '#aa aa' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"") end end @@ -29,7 +29,7 @@ describe Gitlab::LDAP::DN do let(:given) { '#aaXaaa' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"") end end @@ -37,7 +37,7 @@ describe Gitlab::LDAP::DN do let(:given) { '#aaaYaa' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"") end end @@ -45,7 +45,7 @@ describe Gitlab::LDAP::DN do let(:given) { '"Sebasti\\cX\\a1n"' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"") end end @@ -53,7 +53,7 @@ describe Gitlab::LDAP::DN do let(:given) { '"James' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly') end end @@ -61,7 +61,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'J\ames' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"') end end @@ -69,7 +69,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'foo\\' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly') end end end @@ -86,7 +86,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'uid=john smith+telephonenumber=+1 555-555-5555,ou=people,dc=example,dc=com' } it 'raises UnsupportedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::UnsupportedError) + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::UnsupportedError) end end @@ -95,7 +95,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'uid = John Smith + telephoneNumber = + 1 555-555-5555 , ou = People,dc=example,dc=com' } it 'raises UnsupportedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::UnsupportedError) + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::UnsupportedError) end end @@ -103,7 +103,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'uid = John Smith + telephoneNumber = +1 555-555-5555 , ou = People,dc=example,dc=com' } it 'raises UnsupportedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::UnsupportedError) + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::UnsupportedError) end end end @@ -115,7 +115,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'uid=John Smith,' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly') end end @@ -123,7 +123,7 @@ describe Gitlab::LDAP::DN do let(:given) { '0.9.2342.19200300.100.1.25=#aa aa' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"") end end @@ -131,7 +131,7 @@ describe Gitlab::LDAP::DN do let(:given) { '0.9.2342.19200300.100.1.25=#aaXaaa' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"") end end @@ -139,7 +139,7 @@ describe Gitlab::LDAP::DN do let(:given) { '0.9.2342.19200300.100.1.25=#aaaYaa' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"") end end @@ -147,7 +147,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'uid="Sebasti\\cX\\a1n"' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"") + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"") end end @@ -155,7 +155,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'John' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly') end end @@ -163,7 +163,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'cn="James' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly') end end @@ -171,7 +171,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'cn=J\ames' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"') end end @@ -179,7 +179,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'cn=\\' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'DN string ended unexpectedly') end end @@ -187,7 +187,7 @@ describe Gitlab::LDAP::DN do let(:given) { '1.2.d=Value' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized RDN OID attribute type name character "d"') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized RDN OID attribute type name character "d"') end end @@ -195,7 +195,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'd1.2=Value' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "."') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "."') end end @@ -203,7 +203,7 @@ describe Gitlab::LDAP::DN do let(:given) { ' -uid=John Smith' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized first character of an RDN attribute type name "-"') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized first character of an RDN attribute type name "-"') end end @@ -211,7 +211,7 @@ describe Gitlab::LDAP::DN do let(:given) { 'uid\\=john' } it 'raises MalformedError' do - expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "\\"') + expect { subject }.to raise_error(Gitlab::Auth::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "\\"') end end end diff --git a/spec/lib/gitlab/ldap/person_spec.rb b/spec/lib/gitlab/auth/ldap/person_spec.rb index b54d4000b53..1527fe60fb9 100644 --- a/spec/lib/gitlab/ldap/person_spec.rb +++ b/spec/lib/gitlab/auth/ldap/person_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::LDAP::Person do +describe Gitlab::Auth::LDAP::Person do include LdapHelpers let(:entry) { ldap_user_entry('john.doe') } @@ -59,22 +59,13 @@ describe Gitlab::LDAP::Person do } } ) - config = Gitlab::LDAP::Config.new('ldapmain') + config = Gitlab::Auth::LDAP::Config.new('ldapmain') ldap_attributes = described_class.ldap_attributes(config) expect(ldap_attributes).to match_array(%w(dn uid cn mail memberof)) end end - describe '.validate_entry' do - it 'raises InvalidEntryError' do - entry['foo'] = 'bar' - - expect { described_class.new(entry, 'ldapmain') } - .to raise_error(Gitlab::LDAP::Person::InvalidEntryError) - end - end - describe '#name' do it 'uses the configured name attribute and handles values as an array' do name = 'John Doe' diff --git a/spec/lib/gitlab/ldap/user_spec.rb b/spec/lib/gitlab/auth/ldap/user_spec.rb index 048caa38fcf..cab2169593a 100644 --- a/spec/lib/gitlab/ldap/user_spec.rb +++ b/spec/lib/gitlab/auth/ldap/user_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::LDAP::User do +describe Gitlab::Auth::LDAP::User do let(:ldap_user) { described_class.new(auth_hash) } let(:gl_user) { ldap_user.gl_user } let(:info) do @@ -177,7 +177,7 @@ describe Gitlab::LDAP::User do describe 'blocking' do def configure_block(value) - allow_any_instance_of(Gitlab::LDAP::Config) + allow_any_instance_of(Gitlab::Auth::LDAP::Config) .to receive(:block_auto_created_users).and_return(value) end diff --git a/spec/lib/gitlab/o_auth/auth_hash_spec.rb b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb index dbcc200b90b..40001cea22e 100644 --- a/spec/lib/gitlab/o_auth/auth_hash_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::OAuth::AuthHash do +describe Gitlab::Auth::OAuth::AuthHash do let(:provider) { 'ldap'.freeze } let(:auth_hash) do described_class.new( diff --git a/spec/lib/gitlab/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb index 30faf107e3f..fc35d430917 100644 --- a/spec/lib/gitlab/o_auth/provider_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::OAuth::Provider do +describe Gitlab::Auth::OAuth::Provider do describe '#config_for' do context 'for an LDAP provider' do context 'when the provider exists' do diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb index 03e0a9e2a03..0c71f1d8ca6 100644 --- a/spec/lib/gitlab/o_auth/user_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::OAuth::User do +describe Gitlab::Auth::OAuth::User do let(:oauth_user) { described_class.new(auth_hash) } let(:gl_user) { oauth_user.gl_user } let(:uid) { 'my-uid' } @@ -18,7 +18,7 @@ describe Gitlab::OAuth::User do } } end - let(:ldap_user) { Gitlab::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') } + let(:ldap_user) { Gitlab::Auth::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') } describe '#persisted?' do let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') } @@ -39,7 +39,7 @@ describe Gitlab::OAuth::User do describe '#save' do def stub_ldap_config(messages) - allow(Gitlab::LDAP::Config).to receive_messages(messages) + allow(Gitlab::Auth::LDAP::Config).to receive_messages(messages) end let(:provider) { 'twitter' } @@ -215,7 +215,7 @@ describe Gitlab::OAuth::User do context "and no account for the LDAP user" do before do - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) oauth_user.save end @@ -250,7 +250,7 @@ describe Gitlab::OAuth::User do context "and LDAP user has an account already" do let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') } it "adds the omniauth identity to the LDAP account" do - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) oauth_user.save @@ -270,8 +270,8 @@ describe Gitlab::OAuth::User do context 'when an LDAP person is not found by uid' do it 'tries to find an LDAP person by DN and adds the omniauth identity to the user' do - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(nil) - allow(Gitlab::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user) oauth_user.save @@ -297,7 +297,7 @@ describe Gitlab::OAuth::User do context 'and no account for the LDAP user' do it 'creates a user favoring the LDAP username and strips email domain' do - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) oauth_user.save @@ -309,7 +309,7 @@ describe Gitlab::OAuth::User do context "and no corresponding LDAP person" do before do - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(nil) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(nil) end include_examples "to verify compliance with allow_single_sign_on" @@ -358,13 +358,13 @@ describe Gitlab::OAuth::User do allow(ldap_user).to receive(:username) { uid } allow(ldap_user).to receive(:email) { ['johndoe@example.com', 'john2@example.com'] } allow(ldap_user).to receive(:dn) { dn } - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user) end context "and no account for the LDAP user" do context 'dont block on create (LDAP)' do before do - allow_any_instance_of(Gitlab::LDAP::Config).to receive_messages(block_auto_created_users: false) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) end it do @@ -376,7 +376,7 @@ describe Gitlab::OAuth::User do context 'block on create (LDAP)' do before do - allow_any_instance_of(Gitlab::LDAP::Config).to receive_messages(block_auto_created_users: true) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) end it do @@ -392,7 +392,7 @@ describe Gitlab::OAuth::User do context 'dont block on create (LDAP)' do before do - allow_any_instance_of(Gitlab::LDAP::Config).to receive_messages(block_auto_created_users: false) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) end it do @@ -404,7 +404,7 @@ describe Gitlab::OAuth::User do context 'block on create (LDAP)' do before do - allow_any_instance_of(Gitlab::LDAP::Config).to receive_messages(block_auto_created_users: true) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) end it do @@ -448,7 +448,7 @@ describe Gitlab::OAuth::User do context 'dont block on create (LDAP)' do before do - allow_any_instance_of(Gitlab::LDAP::Config).to receive_messages(block_auto_created_users: false) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) end it do @@ -460,7 +460,7 @@ describe Gitlab::OAuth::User do context 'block on create (LDAP)' do before do - allow_any_instance_of(Gitlab::LDAP::Config).to receive_messages(block_auto_created_users: true) + allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) end it do @@ -724,6 +724,10 @@ describe Gitlab::OAuth::User do it "does not update the user location" do expect(gl_user.location).not_to eq(info_hash[:address][:country]) end + + it 'does not create associated user synced attributes metadata' do + expect(gl_user.user_synced_attributes_metadata).to be_nil + end end end diff --git a/spec/lib/gitlab/saml/auth_hash_spec.rb b/spec/lib/gitlab/auth/saml/auth_hash_spec.rb index a555935aea3..bb950e6bbf8 100644 --- a/spec/lib/gitlab/saml/auth_hash_spec.rb +++ b/spec/lib/gitlab/auth/saml/auth_hash_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::Saml::AuthHash do +describe Gitlab::Auth::Saml::AuthHash do include LoginHelpers let(:raw_info_attr) { { 'groups' => %w(Developers Freelancers) } } diff --git a/spec/lib/gitlab/saml/user_spec.rb b/spec/lib/gitlab/auth/saml/user_spec.rb index 1765980e977..62514ca0688 100644 --- a/spec/lib/gitlab/saml/user_spec.rb +++ b/spec/lib/gitlab/auth/saml/user_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::Saml::User do +describe Gitlab::Auth::Saml::User do include LdapHelpers include LoginHelpers @@ -17,7 +17,7 @@ describe Gitlab::Saml::User do email: 'john@mail.com' } end - let(:ldap_user) { Gitlab::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') } + let(:ldap_user) { Gitlab::Auth::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') } describe '#save' do before do @@ -159,10 +159,10 @@ describe Gitlab::Saml::User do allow(ldap_user).to receive(:username) { uid } allow(ldap_user).to receive(:email) { %w(john@mail.com john2@example.com) } allow(ldap_user).to receive(:dn) { dn } - allow(Gitlab::LDAP::Adapter).to receive(:new).and_return(adapter) - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).with(uid, adapter).and_return(ldap_user) - allow(Gitlab::LDAP::Person).to receive(:find_by_dn).with(dn, adapter).and_return(ldap_user) - allow(Gitlab::LDAP::Person).to receive(:find_by_email).with('john@mail.com', adapter).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Adapter).to receive(:new).and_return(adapter) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).with(uid, adapter).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).with(dn, adapter).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).with('john@mail.com', adapter).and_return(ldap_user) end context 'and no account for the LDAP user' do @@ -210,10 +210,10 @@ describe Gitlab::Saml::User do nil_types = uid_types - [uid_type] nil_types.each do |type| - allow(Gitlab::LDAP::Person).to receive(:"find_by_#{type}").and_return(nil) + allow(Gitlab::Auth::LDAP::Person).to receive(:"find_by_#{type}").and_return(nil) end - allow(Gitlab::LDAP::Person).to receive(:"find_by_#{uid_type}").and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:"find_by_#{uid_type}").and_return(ldap_user) end it 'adds the omniauth identity to the LDAP account' do @@ -280,7 +280,7 @@ describe Gitlab::Saml::User do it 'adds the LDAP identity to the existing SAML user' do create(:omniauth_user, email: 'john@mail.com', extern_uid: dn, provider: 'saml', username: 'john') - allow(Gitlab::LDAP::Person).to receive(:find_by_uid).with(dn, adapter).and_return(ldap_user) + allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).with(dn, adapter).and_return(ldap_user) local_hash = OmniAuth::AuthHash.new(uid: dn, provider: provider, info: info_hash) local_saml_user = described_class.new(local_hash) diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index cc202ce8bca..f969f9e8e38 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -309,17 +309,17 @@ describe Gitlab::Auth do context "with ldap enabled" do before do - allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true) + allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) end it "tries to autheticate with db before ldap" do - expect(Gitlab::LDAP::Authentication).not_to receive(:login) + expect(Gitlab::Auth::LDAP::Authentication).not_to receive(:login) gl_auth.find_with_user_password(username, password) end it "uses ldap as fallback to for authentication" do - expect(Gitlab::LDAP::Authentication).to receive(:login) + expect(Gitlab::Auth::LDAP::Authentication).to receive(:login) gl_auth.find_with_user_password('ldap_user', 'password') end @@ -336,7 +336,7 @@ describe Gitlab::Auth do context "with ldap enabled" do before do - allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true) + allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) end it "does not find non-ldap user by valid login/password" do diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb index c8df6dd2118..007e93c1db6 100644 --- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb +++ b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb @@ -15,10 +15,6 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :m .to receive(:commits_count=).and_return(nil) end - after do - [Project, MergeRequest, MergeRequestDiff].each(&:reset_column_information) - end - def diffs_to_hashes(diffs) diffs.as_json(only: Gitlab::Git::Diff::SERIALIZE_KEYS).map(&:with_indifferent_access) end diff --git a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb new file mode 100644 index 00000000000..e112e9e9e3d --- /dev/null +++ b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb @@ -0,0 +1,54 @@ +require 'spec_helper' + +describe Gitlab::BackgroundMigration::MigrateBuildStage, :migration, schema: 20180212101928 do + let(:projects) { table(:projects) } + let(:pipelines) { table(:ci_pipelines) } + let(:stages) { table(:ci_stages) } + let(:jobs) { table(:ci_builds) } + + STATUSES = { created: 0, pending: 1, running: 2, success: 3, + failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze + + before do + projects.create!(id: 123, name: 'gitlab', path: 'gitlab-ce') + pipelines.create!(id: 1, project_id: 123, ref: 'master', sha: 'adf43c3a') + + jobs.create!(id: 1, commit_id: 1, project_id: 123, + stage_idx: 2, stage: 'build', status: :success) + jobs.create!(id: 2, commit_id: 1, project_id: 123, + stage_idx: 2, stage: 'build', status: :success) + jobs.create!(id: 3, commit_id: 1, project_id: 123, + stage_idx: 1, stage: 'test', status: :failed) + jobs.create!(id: 4, commit_id: 1, project_id: 123, + stage_idx: 1, stage: 'test', status: :success) + jobs.create!(id: 5, commit_id: 1, project_id: 123, + stage_idx: 3, stage: 'deploy', status: :pending) + jobs.create!(id: 6, commit_id: 1, project_id: 123, + stage_idx: 3, stage: nil, status: :pending) + end + + it 'correctly migrates builds stages' do + expect(stages.count).to be_zero + + described_class.new.perform(1, 6) + + expect(stages.count).to eq 3 + expect(stages.all.pluck(:name)).to match_array %w[test build deploy] + expect(jobs.where(stage_id: nil)).to be_one + expect(jobs.find_by(stage_id: nil).id).to eq 6 + expect(stages.all.pluck(:status)).to match_array [STATUSES[:success], + STATUSES[:failed], + STATUSES[:pending]] + end + + it 'recovers from unique constraint violation only twice' do + allow(described_class::Migratable::Stage) + .to receive(:find_by).and_return(nil) + + expect(described_class::Migratable::Stage) + .to receive(:find_by).exactly(3).times + + expect { described_class.new.perform(1, 6) } + .to raise_error ActiveRecord::RecordNotUnique + end +end diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb index 2b69e718e08..e99257e3481 100644 --- a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb @@ -7,10 +7,6 @@ describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsData, .to receive(:commits_count=).and_return(nil) end - after do - [Project, MergeRequest, MergeRequestDiff].each(&:reset_column_information) - end - describe '#perform' do let(:mr_with_event) { create(:merge_request) } let!(:merged_event) { create(:event, :merged, target: mr_with_event) } diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb new file mode 100644 index 00000000000..c76adcbe2f5 --- /dev/null +++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb @@ -0,0 +1,262 @@ +require 'spec_helper' + +# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work. +describe Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile, :migration, schema: 20180208183958 do + include MigrationsHelpers::TrackUntrackedUploadsHelpers + + let!(:appearances) { table(:appearances) } + let!(:namespaces) { table(:namespaces) } + let!(:projects) { table(:projects) } + let!(:routes) { table(:routes) } + let!(:uploads) { table(:uploads) } + + before(:all) do + ensure_temporary_tracking_table_exists + end + + describe '#upload_path' do + def assert_upload_path(file_path, expected_upload_path) + untracked_file = create_untracked_file(file_path) + + expect(untracked_file.upload_path).to eq(expected_upload_path) + end + + context 'for an appearance logo file path' do + it 'returns the file path relative to the CarrierWave root' do + assert_upload_path('/-/system/appearance/logo/1/some_logo.jpg', 'uploads/-/system/appearance/logo/1/some_logo.jpg') + end + end + + context 'for an appearance header_logo file path' do + it 'returns the file path relative to the CarrierWave root' do + assert_upload_path('/-/system/appearance/header_logo/1/some_logo.jpg', 'uploads/-/system/appearance/header_logo/1/some_logo.jpg') + end + end + + context 'for a pre-Markdown Note attachment file path' do + it 'returns the file path relative to the CarrierWave root' do + assert_upload_path('/-/system/note/attachment/1234/some_attachment.pdf', 'uploads/-/system/note/attachment/1234/some_attachment.pdf') + end + end + + context 'for a user avatar file path' do + it 'returns the file path relative to the CarrierWave root' do + assert_upload_path('/-/system/user/avatar/1234/avatar.jpg', 'uploads/-/system/user/avatar/1234/avatar.jpg') + end + end + + context 'for a group avatar file path' do + it 'returns the file path relative to the CarrierWave root' do + assert_upload_path('/-/system/group/avatar/1234/avatar.jpg', 'uploads/-/system/group/avatar/1234/avatar.jpg') + end + end + + context 'for a project avatar file path' do + it 'returns the file path relative to the CarrierWave root' do + assert_upload_path('/-/system/project/avatar/1234/avatar.jpg', 'uploads/-/system/project/avatar/1234/avatar.jpg') + end + end + + context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do + it 'returns the file path relative to the project directory in uploads' do + project = create_project + random_hex = SecureRandom.hex + + assert_upload_path("/#{get_full_path(project)}/#{random_hex}/Some file.jpg", "#{random_hex}/Some file.jpg") + end + end + end + + describe '#uploader' do + def assert_uploader(file_path, expected_uploader) + untracked_file = create_untracked_file(file_path) + + expect(untracked_file.uploader).to eq(expected_uploader) + end + + context 'for an appearance logo file path' do + it 'returns AttachmentUploader as a string' do + assert_uploader('/-/system/appearance/logo/1/some_logo.jpg', 'AttachmentUploader') + end + end + + context 'for an appearance header_logo file path' do + it 'returns AttachmentUploader as a string' do + assert_uploader('/-/system/appearance/header_logo/1/some_logo.jpg', 'AttachmentUploader') + end + end + + context 'for a pre-Markdown Note attachment file path' do + it 'returns AttachmentUploader as a string' do + assert_uploader('/-/system/note/attachment/1234/some_attachment.pdf', 'AttachmentUploader') + end + end + + context 'for a user avatar file path' do + it 'returns AvatarUploader as a string' do + assert_uploader('/-/system/user/avatar/1234/avatar.jpg', 'AvatarUploader') + end + end + + context 'for a group avatar file path' do + it 'returns AvatarUploader as a string' do + assert_uploader('/-/system/group/avatar/1234/avatar.jpg', 'AvatarUploader') + end + end + + context 'for a project avatar file path' do + it 'returns AvatarUploader as a string' do + assert_uploader('/-/system/project/avatar/1234/avatar.jpg', 'AvatarUploader') + end + end + + context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do + it 'returns FileUploader as a string' do + project = create_project + + assert_uploader("/#{get_full_path(project)}/#{SecureRandom.hex}/Some file.jpg", 'FileUploader') + end + end + end + + describe '#model_type' do + def assert_model_type(file_path, expected_model_type) + untracked_file = create_untracked_file(file_path) + + expect(untracked_file.model_type).to eq(expected_model_type) + end + + context 'for an appearance logo file path' do + it 'returns Appearance as a string' do + assert_model_type('/-/system/appearance/logo/1/some_logo.jpg', 'Appearance') + end + end + + context 'for an appearance header_logo file path' do + it 'returns Appearance as a string' do + assert_model_type('/-/system/appearance/header_logo/1/some_logo.jpg', 'Appearance') + end + end + + context 'for a pre-Markdown Note attachment file path' do + it 'returns Note as a string' do + assert_model_type('/-/system/note/attachment/1234/some_attachment.pdf', 'Note') + end + end + + context 'for a user avatar file path' do + it 'returns User as a string' do + assert_model_type('/-/system/user/avatar/1234/avatar.jpg', 'User') + end + end + + context 'for a group avatar file path' do + it 'returns Namespace as a string' do + assert_model_type('/-/system/group/avatar/1234/avatar.jpg', 'Namespace') + end + end + + context 'for a project avatar file path' do + it 'returns Project as a string' do + assert_model_type('/-/system/project/avatar/1234/avatar.jpg', 'Project') + end + end + + context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do + it 'returns Project as a string' do + project = create_project + + assert_model_type("/#{get_full_path(project)}/#{SecureRandom.hex}/Some file.jpg", 'Project') + end + end + end + + describe '#model_id' do + def assert_model_id(file_path, expected_model_id) + untracked_file = create_untracked_file(file_path) + + expect(untracked_file.model_id).to eq(expected_model_id) + end + + context 'for an appearance logo file path' do + it 'returns the ID as a string' do + assert_model_id('/-/system/appearance/logo/1/some_logo.jpg', 1) + end + end + + context 'for an appearance header_logo file path' do + it 'returns the ID as a string' do + assert_model_id('/-/system/appearance/header_logo/1/some_logo.jpg', 1) + end + end + + context 'for a pre-Markdown Note attachment file path' do + it 'returns the ID as a string' do + assert_model_id('/-/system/note/attachment/1234/some_attachment.pdf', 1234) + end + end + + context 'for a user avatar file path' do + it 'returns the ID as a string' do + assert_model_id('/-/system/user/avatar/1234/avatar.jpg', 1234) + end + end + + context 'for a group avatar file path' do + it 'returns the ID as a string' do + assert_model_id('/-/system/group/avatar/1234/avatar.jpg', 1234) + end + end + + context 'for a project avatar file path' do + it 'returns the ID as a string' do + assert_model_id('/-/system/project/avatar/1234/avatar.jpg', 1234) + end + end + + context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do + it 'returns the ID as a string' do + project = create_project + + assert_model_id("/#{get_full_path(project)}/#{SecureRandom.hex}/Some file.jpg", project.id) + end + end + end + + describe '#file_size' do + context 'for an appearance logo file path' do + let(:appearance) { create_or_update_appearance(logo: true) } + let(:untracked_file) { described_class.create!(path: get_uploads(appearance, 'Appearance').first.path) } + + it 'returns the file size' do + expect(untracked_file.file_size).to eq(1062) + end + end + + context 'for a project avatar file path' do + let(:project) { create_project(avatar: true) } + let(:untracked_file) { described_class.create!(path: get_uploads(project, 'Project').first.path) } + + it 'returns the file size' do + expect(untracked_file.file_size).to eq(1062) + end + end + + context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do + let(:project) { create_project } + let(:untracked_file) { create_untracked_file("/#{get_full_path(project)}/#{get_uploads(project, 'Project').first.path}") } + + before do + add_markdown_attachment(project) + end + + it 'returns the file size' do + expect(untracked_file.file_size).to eq(1062) + end + end + end + + def create_untracked_file(path_relative_to_upload_dir) + described_class.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}#{path_relative_to_upload_dir}") + end +end diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb index be45c00dfe6..0d2074eed22 100644 --- a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb @@ -1,59 +1,60 @@ require 'spec_helper' -describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do - include TrackUntrackedUploadsHelpers +# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work. +describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq, :migration, schema: 20180208183958 do + include MigrationsHelpers::TrackUntrackedUploadsHelpers subject { described_class.new } - let!(:untracked_files_for_uploads) { described_class::UntrackedFile } - let!(:uploads) { described_class::Upload } + let!(:appearances) { table(:appearances) } + let!(:namespaces) { table(:namespaces) } + let!(:notes) { table(:notes) } + let!(:projects) { table(:projects) } + let!(:routes) { table(:routes) } + let!(:untracked_files_for_uploads) { table(:untracked_files_for_uploads) } + let!(:uploads) { table(:uploads) } + let!(:users) { table(:users) } before do - DatabaseCleaner.clean - drop_temp_table_if_exists ensure_temporary_tracking_table_exists uploads.delete_all end - after(:all) do - drop_temp_table_if_exists - end - context 'with untracked files and tracked files in untracked_files_for_uploads' do - let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) } - let!(:user1) { create(:user, :with_avatar) } - let!(:user2) { create(:user, :with_avatar) } - let!(:project1) { create(:project, :with_avatar) } - let!(:project2) { create(:project, :with_avatar) } + let!(:appearance) { create_or_update_appearance(logo: true, header_logo: true) } + let!(:user1) { create_user(avatar: true) } + let!(:user2) { create_user(avatar: true) } + let!(:project1) { create_project(avatar: true) } + let!(:project2) { create_project(avatar: true) } before do - UploadService.new(project1, uploaded_file, FileUploader).execute # Markdown upload - UploadService.new(project2, uploaded_file, FileUploader).execute # Markdown upload + add_markdown_attachment(project1) + add_markdown_attachment(project2) # File records created by PrepareUntrackedUploads - untracked_files_for_uploads.create!(path: appearance.uploads.first.path) - untracked_files_for_uploads.create!(path: appearance.uploads.last.path) - untracked_files_for_uploads.create!(path: user1.uploads.first.path) - untracked_files_for_uploads.create!(path: user2.uploads.first.path) - untracked_files_for_uploads.create!(path: project1.uploads.first.path) - untracked_files_for_uploads.create!(path: project2.uploads.first.path) - untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project1.full_path}/#{project1.uploads.last.path}") - untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/#{project2.uploads.last.path}") + untracked_files_for_uploads.create!(path: get_uploads(appearance, 'Appearance').first.path) + untracked_files_for_uploads.create!(path: get_uploads(appearance, 'Appearance').last.path) + untracked_files_for_uploads.create!(path: get_uploads(user1, 'User').first.path) + untracked_files_for_uploads.create!(path: get_uploads(user2, 'User').first.path) + untracked_files_for_uploads.create!(path: get_uploads(project1, 'Project').first.path) + untracked_files_for_uploads.create!(path: get_uploads(project2, 'Project').first.path) + untracked_files_for_uploads.create!(path: "#{legacy_project_uploads_dir(project1).sub("#{MigrationsHelpers::TrackUntrackedUploadsHelpers::PUBLIC_DIR}/", '')}/#{get_uploads(project1, 'Project').last.path}") + untracked_files_for_uploads.create!(path: "#{legacy_project_uploads_dir(project2).sub("#{MigrationsHelpers::TrackUntrackedUploadsHelpers::PUBLIC_DIR}/", '')}/#{get_uploads(project2, 'Project').last.path}") # Untrack 4 files - user2.uploads.delete_all - project2.uploads.delete_all # 2 files: avatar and a Markdown upload - appearance.uploads.where("path like '%header_logo%'").delete_all + get_uploads(user2, 'User').delete_all + get_uploads(project2, 'Project').delete_all # 2 files: avatar and a Markdown upload + get_uploads(appearance, 'Appearance').where("path like '%header_logo%'").delete_all end it 'adds untracked files to the uploads table' do expect do - subject.perform(1, untracked_files_for_uploads.last.id) + subject.perform(1, untracked_files_for_uploads.reorder(:id).last.id) end.to change { uploads.count }.from(4).to(8) - expect(user2.uploads.count).to eq(1) - expect(project2.uploads.count).to eq(2) - expect(appearance.uploads.count).to eq(2) + expect(get_uploads(user2, 'User').count).to eq(1) + expect(get_uploads(project2, 'Project').count).to eq(2) + expect(get_uploads(appearance, 'Appearance').count).to eq(2) end it 'deletes rows after processing them' do @@ -67,9 +68,9 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do it 'does not create duplicate uploads of already tracked files' do subject.perform(1, untracked_files_for_uploads.last.id) - expect(user1.uploads.count).to eq(1) - expect(project1.uploads.count).to eq(2) - expect(appearance.uploads.count).to eq(2) + expect(get_uploads(user1, 'User').count).to eq(1) + expect(get_uploads(project1, 'Project').count).to eq(2) + expect(get_uploads(appearance, 'Appearance').count).to eq(2) end it 'uses the start and end batch ids [only 1st half]' do @@ -81,11 +82,11 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do subject.perform(start_id, end_id) end.to change { uploads.count }.from(4).to(6) - expect(user1.uploads.count).to eq(1) - expect(user2.uploads.count).to eq(1) - expect(appearance.uploads.count).to eq(2) - expect(project1.uploads.count).to eq(2) - expect(project2.uploads.count).to eq(0) + expect(get_uploads(user1, 'User').count).to eq(1) + expect(get_uploads(user2, 'User').count).to eq(1) + expect(get_uploads(appearance, 'Appearance').count).to eq(2) + expect(get_uploads(project1, 'Project').count).to eq(2) + expect(get_uploads(project2, 'Project').count).to eq(0) # Only 4 have been either confirmed or added to uploads expect(untracked_files_for_uploads.count).to eq(4) @@ -100,11 +101,11 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do subject.perform(start_id, end_id) end.to change { uploads.count }.from(4).to(6) - expect(user1.uploads.count).to eq(1) - expect(user2.uploads.count).to eq(0) - expect(appearance.uploads.count).to eq(1) - expect(project1.uploads.count).to eq(2) - expect(project2.uploads.count).to eq(2) + expect(get_uploads(user1, 'User').count).to eq(1) + expect(get_uploads(user2, 'User').count).to eq(0) + expect(get_uploads(appearance, 'Appearance').count).to eq(1) + expect(get_uploads(project1, 'Project').count).to eq(2) + expect(get_uploads(project2, 'Project').count).to eq(2) # Only 4 have been either confirmed or added to uploads expect(untracked_files_for_uploads.count).to eq(4) @@ -117,13 +118,13 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do end it 'drops the temporary tracking table after processing the batch, if there are no untracked rows left' do - subject.perform(1, untracked_files_for_uploads.last.id) + expect(subject).to receive(:drop_temp_table_if_finished) - expect(ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)).to be_falsey + subject.perform(1, untracked_files_for_uploads.last.id) end it 'does not block a whole batch because of one bad path' do - untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/._7d37bf4c747916390e596744117d5d1a") + untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{get_full_path(project2)}/._7d37bf4c747916390e596744117d5d1a") expect(untracked_files_for_uploads.count).to eq(9) expect(uploads.count).to eq(4) @@ -134,7 +135,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do end it 'an unparseable path is shown in error output' do - bad_path = "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/._7d37bf4c747916390e596744117d5d1a" + bad_path = "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{get_full_path(project2)}/._7d37bf4c747916390e596744117d5d1a" untracked_files_for_uploads.create!(path: bad_path) expect(Rails.logger).to receive(:error).with(/Error parsing path "#{bad_path}":/) @@ -153,367 +154,100 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do describe 'upload outcomes for each path pattern' do shared_examples_for 'non_markdown_file' do - let!(:expected_upload_attrs) { model.uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum') } + let!(:expected_upload_attrs) { model_uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum') } let!(:untracked_file) { untracked_files_for_uploads.create!(path: expected_upload_attrs['path']) } before do - model.uploads.delete_all + model_uploads.delete_all end it 'creates an Upload record' do expect do subject.perform(1, untracked_files_for_uploads.last.id) - end.to change { model.reload.uploads.count }.from(0).to(1) + end.to change { model_uploads.count }.from(0).to(1) - expect(model.uploads.first.attributes).to include(expected_upload_attrs) + expect(model_uploads.first.attributes).to include(expected_upload_attrs) end end context 'for an appearance logo file path' do - let(:model) { create_or_update_appearance(logo: uploaded_file) } + let(:model) { create_or_update_appearance(logo: true) } + let(:model_uploads) { get_uploads(model, 'Appearance') } it_behaves_like 'non_markdown_file' end context 'for an appearance header_logo file path' do - let(:model) { create_or_update_appearance(header_logo: uploaded_file) } + let(:model) { create_or_update_appearance(header_logo: true) } + let(:model_uploads) { get_uploads(model, 'Appearance') } it_behaves_like 'non_markdown_file' end context 'for a pre-Markdown Note attachment file path' do - let(:model) { create(:note, :with_attachment) } - let!(:expected_upload_attrs) { Upload.where(model_type: 'Note', model_id: model.id).first.attributes.slice('path', 'uploader', 'size', 'checksum') } + let(:model) { create_note(attachment: true) } + let!(:expected_upload_attrs) { get_uploads(model, 'Note').first.attributes.slice('path', 'uploader', 'size', 'checksum') } let!(:untracked_file) { untracked_files_for_uploads.create!(path: expected_upload_attrs['path']) } before do - Upload.where(model_type: 'Note', model_id: model.id).delete_all + get_uploads(model, 'Note').delete_all end # Can't use the shared example because Note doesn't have an `uploads` association it 'creates an Upload record' do expect do subject.perform(1, untracked_files_for_uploads.last.id) - end.to change { Upload.where(model_type: 'Note', model_id: model.id).count }.from(0).to(1) + end.to change { get_uploads(model, 'Note').count }.from(0).to(1) - expect(Upload.where(model_type: 'Note', model_id: model.id).first.attributes).to include(expected_upload_attrs) + expect(get_uploads(model, 'Note').first.attributes).to include(expected_upload_attrs) end end context 'for a user avatar file path' do - let(:model) { create(:user, :with_avatar) } + let(:model) { create_user(avatar: true) } + let(:model_uploads) { get_uploads(model, 'User') } it_behaves_like 'non_markdown_file' end context 'for a group avatar file path' do - let(:model) { create(:group, :with_avatar) } + let(:model) { create_group(avatar: true) } + let(:model_uploads) { get_uploads(model, 'Namespace') } it_behaves_like 'non_markdown_file' end context 'for a project avatar file path' do - let(:model) { create(:project, :with_avatar) } + let(:model) { create_project(avatar: true) } + let(:model_uploads) { get_uploads(model, 'Project') } it_behaves_like 'non_markdown_file' end context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do - let(:model) { create(:project) } + let(:model) { create_project } before do # Upload the file - UploadService.new(model, uploaded_file, FileUploader).execute + add_markdown_attachment(model) # Create the untracked_files_for_uploads record - untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{model.full_path}/#{model.uploads.first.path}") + untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{get_full_path(model)}/#{get_uploads(model, 'Project').first.path}") # Save the expected upload attributes - @expected_upload_attrs = model.reload.uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum') + @expected_upload_attrs = get_uploads(model, 'Project').first.attributes.slice('path', 'uploader', 'size', 'checksum') # Untrack the file - model.reload.uploads.delete_all + get_uploads(model, 'Project').delete_all end it 'creates an Upload record' do expect do subject.perform(1, untracked_files_for_uploads.last.id) - end.to change { model.reload.uploads.count }.from(0).to(1) - - expect(model.uploads.first.attributes).to include(@expected_upload_attrs) - end - end - end -end - -describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do - include TrackUntrackedUploadsHelpers - - let(:upload_class) { Gitlab::BackgroundMigration::PopulateUntrackedUploads::Upload } - - before(:all) do - ensure_temporary_tracking_table_exists - end - - after(:all) do - drop_temp_table_if_exists - end - - describe '#upload_path' do - def assert_upload_path(file_path, expected_upload_path) - untracked_file = create_untracked_file(file_path) - - expect(untracked_file.upload_path).to eq(expected_upload_path) - end - - context 'for an appearance logo file path' do - it 'returns the file path relative to the CarrierWave root' do - assert_upload_path('/-/system/appearance/logo/1/some_logo.jpg', 'uploads/-/system/appearance/logo/1/some_logo.jpg') - end - end - - context 'for an appearance header_logo file path' do - it 'returns the file path relative to the CarrierWave root' do - assert_upload_path('/-/system/appearance/header_logo/1/some_logo.jpg', 'uploads/-/system/appearance/header_logo/1/some_logo.jpg') - end - end - - context 'for a pre-Markdown Note attachment file path' do - it 'returns the file path relative to the CarrierWave root' do - assert_upload_path('/-/system/note/attachment/1234/some_attachment.pdf', 'uploads/-/system/note/attachment/1234/some_attachment.pdf') - end - end - - context 'for a user avatar file path' do - it 'returns the file path relative to the CarrierWave root' do - assert_upload_path('/-/system/user/avatar/1234/avatar.jpg', 'uploads/-/system/user/avatar/1234/avatar.jpg') - end - end - - context 'for a group avatar file path' do - it 'returns the file path relative to the CarrierWave root' do - assert_upload_path('/-/system/group/avatar/1234/avatar.jpg', 'uploads/-/system/group/avatar/1234/avatar.jpg') - end - end - - context 'for a project avatar file path' do - it 'returns the file path relative to the CarrierWave root' do - assert_upload_path('/-/system/project/avatar/1234/avatar.jpg', 'uploads/-/system/project/avatar/1234/avatar.jpg') - end - end - - context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do - it 'returns the file path relative to the project directory in uploads' do - project = create(:project) - random_hex = SecureRandom.hex - - assert_upload_path("/#{project.full_path}/#{random_hex}/Some file.jpg", "#{random_hex}/Some file.jpg") - end - end - end - - describe '#uploader' do - def assert_uploader(file_path, expected_uploader) - untracked_file = create_untracked_file(file_path) - - expect(untracked_file.uploader).to eq(expected_uploader) - end - - context 'for an appearance logo file path' do - it 'returns AttachmentUploader as a string' do - assert_uploader('/-/system/appearance/logo/1/some_logo.jpg', 'AttachmentUploader') - end - end - - context 'for an appearance header_logo file path' do - it 'returns AttachmentUploader as a string' do - assert_uploader('/-/system/appearance/header_logo/1/some_logo.jpg', 'AttachmentUploader') - end - end - - context 'for a pre-Markdown Note attachment file path' do - it 'returns AttachmentUploader as a string' do - assert_uploader('/-/system/note/attachment/1234/some_attachment.pdf', 'AttachmentUploader') - end - end - - context 'for a user avatar file path' do - it 'returns AvatarUploader as a string' do - assert_uploader('/-/system/user/avatar/1234/avatar.jpg', 'AvatarUploader') - end - end - - context 'for a group avatar file path' do - it 'returns AvatarUploader as a string' do - assert_uploader('/-/system/group/avatar/1234/avatar.jpg', 'AvatarUploader') - end - end - - context 'for a project avatar file path' do - it 'returns AvatarUploader as a string' do - assert_uploader('/-/system/project/avatar/1234/avatar.jpg', 'AvatarUploader') - end - end - - context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do - it 'returns FileUploader as a string' do - project = create(:project) - - assert_uploader("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'FileUploader') - end - end - end - - describe '#model_type' do - def assert_model_type(file_path, expected_model_type) - untracked_file = create_untracked_file(file_path) - - expect(untracked_file.model_type).to eq(expected_model_type) - end + end.to change { get_uploads(model, 'Project').count }.from(0).to(1) - context 'for an appearance logo file path' do - it 'returns Appearance as a string' do - assert_model_type('/-/system/appearance/logo/1/some_logo.jpg', 'Appearance') - end - end - - context 'for an appearance header_logo file path' do - it 'returns Appearance as a string' do - assert_model_type('/-/system/appearance/header_logo/1/some_logo.jpg', 'Appearance') - end - end - - context 'for a pre-Markdown Note attachment file path' do - it 'returns Note as a string' do - assert_model_type('/-/system/note/attachment/1234/some_attachment.pdf', 'Note') - end - end - - context 'for a user avatar file path' do - it 'returns User as a string' do - assert_model_type('/-/system/user/avatar/1234/avatar.jpg', 'User') - end - end - - context 'for a group avatar file path' do - it 'returns Namespace as a string' do - assert_model_type('/-/system/group/avatar/1234/avatar.jpg', 'Namespace') - end - end - - context 'for a project avatar file path' do - it 'returns Project as a string' do - assert_model_type('/-/system/project/avatar/1234/avatar.jpg', 'Project') - end - end - - context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do - it 'returns Project as a string' do - project = create(:project) - - assert_model_type("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'Project') - end - end - end - - describe '#model_id' do - def assert_model_id(file_path, expected_model_id) - untracked_file = create_untracked_file(file_path) - - expect(untracked_file.model_id).to eq(expected_model_id) - end - - context 'for an appearance logo file path' do - it 'returns the ID as a string' do - assert_model_id('/-/system/appearance/logo/1/some_logo.jpg', 1) - end - end - - context 'for an appearance header_logo file path' do - it 'returns the ID as a string' do - assert_model_id('/-/system/appearance/header_logo/1/some_logo.jpg', 1) - end - end - - context 'for a pre-Markdown Note attachment file path' do - it 'returns the ID as a string' do - assert_model_id('/-/system/note/attachment/1234/some_attachment.pdf', 1234) - end - end - - context 'for a user avatar file path' do - it 'returns the ID as a string' do - assert_model_id('/-/system/user/avatar/1234/avatar.jpg', 1234) - end - end - - context 'for a group avatar file path' do - it 'returns the ID as a string' do - assert_model_id('/-/system/group/avatar/1234/avatar.jpg', 1234) - end - end - - context 'for a project avatar file path' do - it 'returns the ID as a string' do - assert_model_id('/-/system/project/avatar/1234/avatar.jpg', 1234) + expect(get_uploads(model, 'Project').first.attributes).to include(@expected_upload_attrs) end end - - context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do - it 'returns the ID as a string' do - project = create(:project) - - assert_model_id("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", project.id) - end - end - end - - describe '#file_size' do - context 'for an appearance logo file path' do - let(:appearance) { create_or_update_appearance(logo: uploaded_file) } - let(:untracked_file) { described_class.create!(path: appearance.uploads.first.path) } - - it 'returns the file size' do - expect(untracked_file.file_size).to eq(35255) - end - - it 'returns the same thing that CarrierWave would return' do - expect(untracked_file.file_size).to eq(appearance.logo.size) - end - end - - context 'for a project avatar file path' do - let(:project) { create(:project, avatar: uploaded_file) } - let(:untracked_file) { described_class.create!(path: project.uploads.first.path) } - - it 'returns the file size' do - expect(untracked_file.file_size).to eq(35255) - end - - it 'returns the same thing that CarrierWave would return' do - expect(untracked_file.file_size).to eq(project.avatar.size) - end - end - - context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do - let(:project) { create(:project) } - let(:untracked_file) { create_untracked_file("/#{project.full_path}/#{project.uploads.first.path}") } - - before do - UploadService.new(project, uploaded_file, FileUploader).execute - end - - it 'returns the file size' do - expect(untracked_file.file_size).to eq(35255) - end - - it 'returns the same thing that CarrierWave would return' do - expect(untracked_file.file_size).to eq(project.uploads.first.size) - end - end - end - - def create_untracked_file(path_relative_to_upload_dir) - described_class.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}#{path_relative_to_upload_dir}") end end diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb index 370c2490b97..35750d89c35 100644 --- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb +++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb @@ -1,20 +1,16 @@ require 'spec_helper' -describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do - include TrackUntrackedUploadsHelpers - include MigrationsHelpers - - let!(:untracked_files_for_uploads) { described_class::UntrackedFile } - - before do - DatabaseCleaner.clean - - drop_temp_table_if_exists - end - - after do - drop_temp_table_if_exists - end +# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work. +describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq, :migration, schema: 20180208183958 do + include MigrationsHelpers::TrackUntrackedUploadsHelpers + + let!(:untracked_files_for_uploads) { table(:untracked_files_for_uploads) } + let!(:appearances) { table(:appearances) } + let!(:namespaces) { table(:namespaces) } + let!(:projects) { table(:projects) } + let!(:routes) { table(:routes) } + let!(:uploads) { table(:uploads) } + let!(:users) { table(:users) } around do |example| # Especially important so the follow-up migration does not get run @@ -23,71 +19,34 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end - # E.g. The installation is in use at the time of migration, and someone has - # just uploaded a file - shared_examples 'does not add files in /uploads/tmp' do - let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } - - before do - FileUtils.mkdir(File.dirname(tmp_file)) - FileUtils.touch(tmp_file) - end - - after do - FileUtils.rm(tmp_file) - end - - it 'does not add files from /uploads/tmp' do - described_class.new.perform - - expect(untracked_files_for_uploads.count).to eq(5) - end - end - - it 'ensures the untracked_files_for_uploads table exists' do - expect do - described_class.new.perform - end.to change { ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads) }.from(false).to(true) - end - - it 'has a path field long enough for really long paths' do - described_class.new.perform - - component = 'a' * 255 - - long_path = [ - 'uploads', - component, # project.full_path - component # filename - ].flatten.join('/') + shared_examples 'prepares the untracked_files_for_uploads table' do + context 'when files were uploaded before and after hashed storage was enabled' do + let!(:appearance) { create_or_update_appearance(logo: true, header_logo: true) } + let!(:user) { create_user(avatar: true) } + let!(:project1) { create_project(avatar: true) } + let(:project2) { create_project } # instantiate after enabling hashed_storage - record = untracked_files_for_uploads.create!(path: long_path) - expect(record.reload.path.size).to eq(519) - end + before do + # Markdown upload before enabling hashed_storage + add_markdown_attachment(project1) - context "test bulk insert with ON CONFLICT DO NOTHING or IGNORE" do - around do |example| - # If this is CI, we use Postgres 9.2 so this whole context should be - # skipped since we're unable to use ON CONFLICT DO NOTHING or IGNORE. - if described_class.new.send(:can_bulk_insert_and_ignore_duplicates?) - example.run + # Markdown upload after enabling hashed_storage + add_markdown_attachment(project2, hashed_storage: true) end - end - context 'when files were uploaded before and after hashed storage was enabled' do - let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) } - let!(:user) { create(:user, :with_avatar) } - let!(:project1) { create(:project, :with_avatar) } - let(:project2) { create(:project) } # instantiate after enabling hashed_storage + it 'has a path field long enough for really long paths' do + described_class.new.perform - before do - # Markdown upload before enabling hashed_storage - UploadService.new(project1, uploaded_file, FileUploader).execute + component = 'a' * 255 - stub_application_setting(hashed_storage_enabled: true) + long_path = [ + 'uploads', + component, # project.full_path + component # filename + ].flatten.join('/') - # Markdown upload after enabling hashed_storage - UploadService.new(project2, uploaded_file, FileUploader).execute + record = untracked_files_for_uploads.create!(path: long_path) + expect(record.reload.path.size).to eq(519) end it 'adds unhashed files to the untracked_files_for_uploads table' do @@ -106,14 +65,15 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do it 'does not add hashed files to the untracked_files_for_uploads table' do described_class.new.perform - hashed_file_path = project2.uploads.where(uploader: 'FileUploader').first.path + hashed_file_path = get_uploads(project2, 'Project').where(uploader: 'FileUploader').first.path expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey end it 'correctly schedules the follow-up background migration jobs' do described_class.new.perform - expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5) + ids = described_class::UntrackedFile.all.order(:id).pluck(:id) + expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(ids.first, ids.last) expect(BackgroundMigrationWorker.jobs.size).to eq(1) end @@ -130,91 +90,68 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end + # E.g. The installation is in use at the time of migration, and someone has + # just uploaded a file context 'when there are files in /uploads/tmp' do - it_behaves_like 'does not add files in /uploads/tmp' - end - end - end - - context 'test bulk insert without ON CONFLICT DO NOTHING or IGNORE' do - before do - # If this is CI, we use Postgres 9.2 so this stub has no effect. - # - # If this is being run on Postgres 9.5+ or MySQL, then this stub allows us - # to test the bulk insert functionality without ON CONFLICT DO NOTHING or - # IGNORE. - allow_any_instance_of(described_class).to receive(:postgresql_pre_9_5?).and_return(true) - end - - context 'when files were uploaded before and after hashed storage was enabled' do - let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) } - let!(:user) { create(:user, :with_avatar) } - let!(:project1) { create(:project, :with_avatar) } - let(:project2) { create(:project) } # instantiate after enabling hashed_storage - - before do - # Markdown upload before enabling hashed_storage - UploadService.new(project1, uploaded_file, FileUploader).execute - - stub_application_setting(hashed_storage_enabled: true) + let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } - # Markdown upload after enabling hashed_storage - UploadService.new(project2, uploaded_file, FileUploader).execute - end - - it 'adds unhashed files to the untracked_files_for_uploads table' do - described_class.new.perform - - expect(untracked_files_for_uploads.count).to eq(5) - end - - it 'adds files with paths relative to CarrierWave.root' do - described_class.new.perform - untracked_files_for_uploads.all.each do |file| - expect(file.path.start_with?('uploads/')).to be_truthy + before do + FileUtils.mkdir(File.dirname(tmp_file)) + FileUtils.touch(tmp_file) end - end - - it 'does not add hashed files to the untracked_files_for_uploads table' do - described_class.new.perform - hashed_file_path = project2.uploads.where(uploader: 'FileUploader').first.path - expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey - end + after do + FileUtils.rm(tmp_file) + end - it 'correctly schedules the follow-up background migration jobs' do - described_class.new.perform + it 'does not add files from /uploads/tmp' do + described_class.new.perform - expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5) - expect(BackgroundMigrationWorker.jobs.size).to eq(1) + expect(untracked_files_for_uploads.count).to eq(5) + end end - # E.g. from a previous failed run of this background migration - context 'when there is existing data in untracked_files_for_uploads' do - before do - described_class.new.perform - end + context 'when the last batch size exactly matches the max batch size' do + it 'does not raise error' do + stub_const("#{described_class}::FIND_BATCH_SIZE", 5) - it 'does not error or produce duplicates of existing data' do expect do described_class.new.perform - end.not_to change { untracked_files_for_uploads.count }.from(5) + end.not_to raise_error + + expect(untracked_files_for_uploads.count).to eq(5) end end + end + end - context 'when there are files in /uploads/tmp' do - it_behaves_like 'does not add files in /uploads/tmp' - end + # If running on Postgres 9.2 (like on CI), this whole context is skipped + # since we're unable to use ON CONFLICT DO NOTHING or IGNORE. + context "test bulk insert with ON CONFLICT DO NOTHING or IGNORE", if: described_class.new.send(:can_bulk_insert_and_ignore_duplicates?) do + it_behaves_like 'prepares the untracked_files_for_uploads table' + end + + # If running on Postgres 9.2 (like on CI), the stubbed method has no effect. + # + # If running on Postgres 9.5+ or MySQL, then this context effectively tests + # the bulk insert functionality without ON CONFLICT DO NOTHING or IGNORE. + context 'test bulk insert without ON CONFLICT DO NOTHING or IGNORE' do + before do + allow_any_instance_of(described_class).to receive(:postgresql_pre_9_5?).and_return(true) end + + it_behaves_like 'prepares the untracked_files_for_uploads table' end # Very new or lightly-used installations that are running this migration # may not have an upload directory because they have no uploads. context 'when no files were ever uploaded' do - it 'does not add to the untracked_files_for_uploads table (and does not raise error)' do - described_class.new.perform + it 'deletes the `untracked_files_for_uploads` table (and does not raise error)' do + background_migration = described_class.new + + expect(background_migration).to receive(:drop_temp_table) - expect(untracked_files_for_uploads.count).to eq(0) + background_migration.perform end end end diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb index f302e412a6e..eb4b9d8b12f 100644 --- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb +++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb @@ -8,11 +8,15 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do subject(:importer) { described_class.new(admin, bare_repository) } before do + @rainbow = Rainbow.enabled + Rainbow.enabled = false + allow(described_class).to receive(:log) end after do FileUtils.rm_rf(base_dir) + Rainbow.enabled = @rainbow end shared_examples 'importing a repository' do @@ -148,7 +152,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do # This is a quick way to get a valid repository instead of copying an # existing one. Since it's not persisted, the importer will try to # create the project. - project = build(:project, :repository) + project = build(:project, :legacy_storage, :repository) original_commit_count = project.repository.commit_count bare_repo = Gitlab::BareRepositoryImport::Repository.new(project.repository_storage_path, project.repository.path) diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb index c2bca816aae..b49ddbfc780 100644 --- a/spec/lib/gitlab/checks/change_access_spec.rb +++ b/spec/lib/gitlab/checks/change_access_spec.rb @@ -177,5 +177,44 @@ describe Gitlab::Checks::ChangeAccess do expect { subject.exec }.not_to raise_error end end + + context 'LFS file lock check' do + let(:owner) { create(:user) } + let!(:lock) { create(:lfs_file_lock, user: owner, project: project, path: 'README') } + + before do + allow(project.repository).to receive(:new_commits).and_return( + project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51') + ) + end + + context 'with LFS not enabled' do + it 'skips the validation' do + expect_any_instance_of(Gitlab::Checks::CommitCheck).not_to receive(:validate) + + subject.exec + end + end + + context 'with LFS enabled' do + before do + allow(project).to receive(:lfs_enabled?).and_return(true) + end + + context 'when change is sent by a different user' do + it 'raises an error if the user is not allowed to update the file' do + expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked in Git LFS by #{lock.user.name}") + end + end + + context 'when change is sent by the author of the lock' do + let(:user) { owner } + + it "doesn't raise any error" do + expect { subject.exec }.not_to raise_error + end + end + end + end end end diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb new file mode 100644 index 00000000000..019a2ed184d --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb @@ -0,0 +1,39 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Lexeme::Equals do + let(:left) { double('left') } + let(:right) { double('right') } + + describe '.build' do + it 'creates a new instance of the token' do + expect(described_class.build('==', left, right)) + .to be_a(described_class) + end + end + + describe '.type' do + it 'is an operator' do + expect(described_class.type).to eq :operator + end + end + + describe '#evaluate' do + it 'returns false when left and right are not equal' do + allow(left).to receive(:evaluate).and_return(1) + allow(right).to receive(:evaluate).and_return(2) + + operator = described_class.new(left, right) + + expect(operator.evaluate(VARIABLE: 3)).to eq false + end + + it 'returns true when left and right are equal' do + allow(left).to receive(:evaluate).and_return(1) + allow(right).to receive(:evaluate).and_return(1) + + operator = described_class.new(left, right) + + expect(operator.evaluate(VARIABLE: 3)).to eq true + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/null_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/null_spec.rb new file mode 100644 index 00000000000..b5a59929e11 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/null_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Lexeme::Null do + describe '.build' do + it 'creates a new instance of the token' do + expect(described_class.build('null')) + .to be_a(described_class) + end + end + + describe '.type' do + it 'is a value lexeme' do + expect(described_class.type).to eq :value + end + end + + describe '#evaluate' do + it 'always evaluates to `nil`' do + expect(described_class.new('null').evaluate).to be_nil + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/string_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/string_spec.rb new file mode 100644 index 00000000000..86234dfb9e5 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/string_spec.rb @@ -0,0 +1,92 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Lexeme::String do + describe '.build' do + it 'creates a new instance of the token' do + expect(described_class.build('"my string"')) + .to be_a(described_class) + end + end + + describe '.type' do + it 'is a value lexeme' do + expect(described_class.type).to eq :value + end + end + + describe '.scan' do + context 'when using double quotes' do + it 'correctly identifies string token' do + scanner = StringScanner.new('"some string"') + + token = described_class.scan(scanner) + + expect(token).not_to be_nil + expect(token.build.evaluate).to eq 'some string' + end + end + + context 'when using single quotes' do + it 'correctly identifies string token' do + scanner = StringScanner.new("'some string 2'") + + token = described_class.scan(scanner) + + expect(token).not_to be_nil + expect(token.build.evaluate).to eq 'some string 2' + end + end + + context 'when there are mixed quotes in the string' do + it 'is a greedy scanner for double quotes' do + scanner = StringScanner.new('"some string" "and another one"') + + token = described_class.scan(scanner) + + expect(token).not_to be_nil + expect(token.build.evaluate).to eq 'some string' + end + + it 'is a greedy scanner for single quotes' do + scanner = StringScanner.new("'some string' 'and another one'") + + token = described_class.scan(scanner) + + expect(token).not_to be_nil + expect(token.build.evaluate).to eq 'some string' + end + + it 'allows to use single quotes inside double quotes' do + scanner = StringScanner.new(%("some ' string")) + + token = described_class.scan(scanner) + + expect(token).not_to be_nil + expect(token.build.evaluate).to eq "some ' string" + end + + it 'allow to use double quotes inside single quotes' do + scanner = StringScanner.new(%('some " string')) + + token = described_class.scan(scanner) + + expect(token).not_to be_nil + expect(token.build.evaluate).to eq 'some " string' + end + end + end + + describe '#evaluate' do + it 'returns string value it is is present' do + string = described_class.new('my string') + + expect(string.evaluate).to eq 'my string' + end + + it 'returns an empty string if it is empty' do + string = described_class.new('') + + expect(string.evaluate).to eq '' + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb new file mode 100644 index 00000000000..599a5411881 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Lexeme::Variable do + describe '.build' do + it 'creates a new instance of the token' do + expect(described_class.build('$VARIABLE')) + .to be_a(described_class) + end + end + + describe '.type' do + it 'is a value lexeme' do + expect(described_class.type).to eq :value + end + end + + describe '#evaluate' do + it 'returns variable value if it is defined' do + variable = described_class.new('VARIABLE') + + expect(variable.evaluate(VARIABLE: 'my variable')) + .to eq 'my variable' + end + + it 'allows to use a string as a variable key too' do + variable = described_class.new('VARIABLE') + + expect(variable.evaluate('VARIABLE' => 'my variable')) + .to eq 'my variable' + end + + it 'returns nil if it is not defined' do + variable = described_class.new('VARIABLE') + + expect(variable.evaluate(OTHER: 'variable')).to be_nil + end + + it 'returns an empty string if it is empty' do + variable = described_class.new('VARIABLE') + + expect(variable.evaluate(VARIABLE: '')).to eq '' + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb new file mode 100644 index 00000000000..230ceeb07f8 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb @@ -0,0 +1,70 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Lexer do + let(:token_class) do + Gitlab::Ci::Pipeline::Expression::Token + end + + describe '#tokens' do + it 'tokenss single value' do + tokens = described_class.new('$VARIABLE').tokens + + expect(tokens).to be_one + expect(tokens).to all(be_an_instance_of(token_class)) + end + + it 'does ignore whitespace characters' do + tokens = described_class.new("\t$VARIABLE ").tokens + + expect(tokens).to be_one + expect(tokens).to all(be_an_instance_of(token_class)) + end + + it 'tokenss multiple values of the same token' do + tokens = described_class.new("$VARIABLE1 $VARIABLE2").tokens + + expect(tokens.size).to eq 2 + expect(tokens).to all(be_an_instance_of(token_class)) + end + + it 'tokenss multiple values with different tokens' do + tokens = described_class.new('$VARIABLE "text" "value"').tokens + + expect(tokens.size).to eq 3 + expect(tokens.first.value).to eq '$VARIABLE' + expect(tokens.second.value).to eq '"text"' + expect(tokens.third.value).to eq '"value"' + end + + it 'tokenss tokens and operators' do + tokens = described_class.new('$VARIABLE == "text"').tokens + + expect(tokens.size).to eq 3 + expect(tokens.first.value).to eq '$VARIABLE' + expect(tokens.second.value).to eq '==' + expect(tokens.third.value).to eq '"text"' + end + + it 'limits statement to specified amount of tokens' do + lexer = described_class.new("$V1 $V2 $V3 $V4", max_tokens: 3) + + expect { lexer.tokens } + .to raise_error described_class::SyntaxError + end + + it 'raises syntax error in case of finding unknown tokens' do + lexer = described_class.new('$V1 123 $V2') + + expect { lexer.tokens } + .to raise_error described_class::SyntaxError + end + end + + describe '#lexemes' do + it 'returns an array of syntax lexemes' do + lexer = described_class.new('$VAR "text"') + + expect(lexer.lexemes).to eq %w[variable string] + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb new file mode 100644 index 00000000000..e8e6f585310 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb @@ -0,0 +1,26 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Parser do + describe '#tree' do + context 'when using operators' do + it 'returns a reverse descent parse tree' do + expect(described_class.seed('$VAR1 == "123" == $VAR2').tree) + .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Equals + end + end + + context 'when using a single token' do + it 'returns a single token instance' do + expect(described_class.seed('$VAR').tree) + .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Variable + end + end + + context 'when expression is empty' do + it 'returns a null token' do + expect(described_class.seed('').tree) + .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Null + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb new file mode 100644 index 00000000000..472a58599d8 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb @@ -0,0 +1,85 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Statement do + let(:pipeline) { build(:ci_pipeline) } + + subject do + described_class.new(text, pipeline) + end + + before do + pipeline.variables.build([key: 'VARIABLE', value: 'my variable']) + end + + describe '#parse_tree' do + context 'when expression is empty' do + let(:text) { '' } + + it 'raises an error' do + expect { subject.parse_tree } + .to raise_error described_class::StatementError + end + end + + context 'when expression grammar is incorrect' do + table = [ + '$VAR "text"', # missing operator + '== "123"', # invalid right side + "'single quotes'", # single quotes string + '$VAR ==', # invalid right side + '12345', # unknown syntax + '' # empty statement + ] + + table.each do |syntax| + it "raises an error when syntax is `#{syntax}`" do + expect { described_class.new(syntax, pipeline).parse_tree } + .to raise_error described_class::StatementError + end + end + end + + context 'when expression grammar is correct' do + context 'when using an operator' do + let(:text) { '$VAR == "value"' } + + it 'returns a reverse descent parse tree' do + expect(subject.parse_tree) + .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Equals + end + end + + context 'when using a single token' do + let(:text) { '$VARIABLE' } + + it 'returns a single token instance' do + expect(subject.parse_tree) + .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Variable + end + end + end + end + + describe '#evaluate' do + statements = [ + ['$VARIABLE == "my variable"', true], + ["$VARIABLE == 'my variable'", true], + ['"my variable" == $VARIABLE', true], + ['$VARIABLE == null', false], + ['$VAR == null', true], + ['null == $VAR', true], + ['$VARIABLE', 'my variable'], + ['$VAR', nil] + ] + + statements.each do |expression, value| + context "when using expression `#{expression}`" do + let(:text) { expression } + + it "evaluates to `#{value.inspect}`" do + expect(subject.evaluate).to eq value + end + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/expression/token_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/token_spec.rb new file mode 100644 index 00000000000..6d7453f0de5 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/expression/token_spec.rb @@ -0,0 +1,45 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Expression::Token do + let(:value) { '$VARIABLE' } + let(:lexeme) { Gitlab::Ci::Pipeline::Expression::Lexeme::Variable } + + subject { described_class.new(value, lexeme) } + + describe '#value' do + it 'returns raw token value' do + expect(subject.value).to eq value + end + end + + describe '#lexeme' do + it 'returns raw token lexeme' do + expect(subject.lexeme).to eq lexeme + end + end + + describe '#build' do + it 'delegates to lexeme after adding a value' do + expect(lexeme).to receive(:build) + .with(value, 'some', 'args') + + subject.build('some', 'args') + end + + it 'allows passing only required arguments' do + expect(subject.build).to be_an_instance_of(lexeme) + end + end + + describe '#type' do + it 'delegates type query to the lexeme' do + expect(subject.type).to eq :value + end + end + + describe '#to_lexeme' do + it 'returns raw lexeme syntax component name' do + expect(subject.to_lexeme).to eq 'variable' + end + end +end diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb index 28c679af12a..8d4862932b2 100644 --- a/spec/lib/gitlab/closing_issue_extractor_spec.rb +++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb @@ -365,6 +365,20 @@ describe Gitlab::ClosingIssueExtractor do .to match_array([issue, other_issue, third_issue]) end + it 'allows oxford commas (comma before and) when referencing multiple issues' do + message = "Closes #{reference}, #{reference2}, and #{reference3}" + + expect(subject.closed_by_message(message)) + .to match_array([issue, other_issue, third_issue]) + end + + it 'allows spaces before commas when referencing multiple issues' do + message = "Closes #{reference} , #{reference2} , and #{reference3}" + + expect(subject.closed_by_message(message)) + .to match_array([issue, other_issue, third_issue]) + end + it 'fetches issues in multi-line message' do message = "Awesome commit (closes #{reference})\nAlso fixes #{reference2}" diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb index f1655854486..49a179ba875 100644 --- a/spec/lib/gitlab/contributions_calendar_spec.rb +++ b/spec/lib/gitlab/contributions_calendar_spec.rb @@ -118,6 +118,19 @@ describe Gitlab::ContributionsCalendar do expect(calendar.events_by_date(today)).to contain_exactly(e1) expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3) end + + context 'when the user cannot read read cross project' do + before do + allow(Ability).to receive(:allowed?).and_call_original + expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + end + + it 'does not return any events' do + create_event(public_project, today) + + expect(calendar(user).events_by_date(today)).to be_empty + end + end end describe '#starting_year' do diff --git a/spec/lib/gitlab/cross_project_access/check_collection_spec.rb b/spec/lib/gitlab/cross_project_access/check_collection_spec.rb new file mode 100644 index 00000000000..a9e7575240e --- /dev/null +++ b/spec/lib/gitlab/cross_project_access/check_collection_spec.rb @@ -0,0 +1,55 @@ +require 'spec_helper' + +describe Gitlab::CrossProjectAccess::CheckCollection do + subject(:collection) { described_class.new } + + describe '#add_collection' do + it 'merges the checks of 2 collections' do + initial_check = double('check') + collection.add_check(initial_check) + + other_collection = described_class.new + other_check = double('other_check') + other_collection.add_check(other_check) + + shared_check = double('shared check') + other_collection.add_check(shared_check) + collection.add_check(shared_check) + + collection.add_collection(other_collection) + + expect(collection.checks).to contain_exactly(initial_check, shared_check, other_check) + end + end + + describe '#should_run?' do + def fake_check(run, skip) + check = double("Check: run=#{run} - skip={skip}") + allow(check).to receive(:should_run?).and_return(run) + allow(check).to receive(:should_skip?).and_return(skip) + allow(check).to receive(:skip).and_return(skip) + + check + end + + it 'returns true if one of the check says it should run' do + check = fake_check(true, false) + other_check = fake_check(false, false) + + collection.add_check(check) + collection.add_check(other_check) + + expect(collection.should_run?(double)).to be_truthy + end + + it 'returns false if one of the check says it should be skipped' do + check = fake_check(true, false) + other_check = fake_check(false, true) + + collection.add_check(check) + collection.add_check(other_check) + + expect(collection.should_run?(double)).to be_falsey + end + end +end diff --git a/spec/lib/gitlab/cross_project_access/check_info_spec.rb b/spec/lib/gitlab/cross_project_access/check_info_spec.rb new file mode 100644 index 00000000000..bc9dbf2bece --- /dev/null +++ b/spec/lib/gitlab/cross_project_access/check_info_spec.rb @@ -0,0 +1,111 @@ +require 'spec_helper' + +describe Gitlab::CrossProjectAccess::CheckInfo do + let(:dummy_controller) { double } + + before do + allow(dummy_controller).to receive(:action_name).and_return('index') + end + + describe '#should_run?' do + it 'runs when an action is defined' do + info = described_class.new({ index: true }, nil, nil, false) + + expect(info.should_run?(dummy_controller)).to be_truthy + end + + it 'runs when the action is missing' do + info = described_class.new({}, nil, nil, false) + + expect(info.should_run?(dummy_controller)).to be_truthy + end + + it 'does not run when the action is excluded' do + info = described_class.new({ index: false }, nil, nil, false) + + expect(info.should_run?(dummy_controller)).to be_falsy + end + + it 'runs when the `if` conditional is true' do + info = described_class.new({}, -> { true }, nil, false) + + expect(info.should_run?(dummy_controller)).to be_truthy + end + + it 'does not run when the if condition is false' do + info = described_class.new({}, -> { false }, nil, false) + + expect(info.should_run?(dummy_controller)).to be_falsy + end + + it 'does not run when the `unless` check is true' do + info = described_class.new({}, nil, -> { true }, false) + + expect(info.should_run?(dummy_controller)).to be_falsy + end + + it 'runs when the `unless` check is false' do + info = described_class.new({}, nil, -> { false }, false) + + expect(info.should_run?(dummy_controller)).to be_truthy + end + + it 'returns the the oposite of #should_skip? when the check is a skip' do + info = described_class.new({}, nil, nil, true) + + expect(info).to receive(:should_skip?).with(dummy_controller).and_return(false) + expect(info.should_run?(dummy_controller)).to be_truthy + end + end + + describe '#should_skip?' do + it 'skips when an action is defined' do + info = described_class.new({ index: true }, nil, nil, true) + + expect(info.should_skip?(dummy_controller)).to be_truthy + end + + it 'does not skip when the action is not defined' do + info = described_class.new({}, nil, nil, true) + + expect(info.should_skip?(dummy_controller)).to be_falsy + end + + it 'does not skip when the action is excluded' do + info = described_class.new({ index: false }, nil, nil, true) + + expect(info.should_skip?(dummy_controller)).to be_falsy + end + + it 'skips when the `if` conditional is true' do + info = described_class.new({ index: true }, -> { true }, nil, true) + + expect(info.should_skip?(dummy_controller)).to be_truthy + end + + it 'does not skip the `if` conditional is false' do + info = described_class.new({ index: true }, -> { false }, nil, true) + + expect(info.should_skip?(dummy_controller)).to be_falsy + end + + it 'does not skip when the `unless` check is true' do + info = described_class.new({ index: true }, nil, -> { true }, true) + + expect(info.should_skip?(dummy_controller)).to be_falsy + end + + it 'skips when `unless` check is false' do + info = described_class.new({ index: true }, nil, -> { false }, true) + + expect(info.should_skip?(dummy_controller)).to be_truthy + end + + it 'returns the the oposite of #should_run? when the check is not a skip' do + info = described_class.new({}, nil, nil, false) + + expect(info).to receive(:should_run?).with(dummy_controller).and_return(false) + expect(info.should_skip?(dummy_controller)).to be_truthy + end + end +end diff --git a/spec/lib/gitlab/cross_project_access/class_methods_spec.rb b/spec/lib/gitlab/cross_project_access/class_methods_spec.rb new file mode 100644 index 00000000000..5349685e633 --- /dev/null +++ b/spec/lib/gitlab/cross_project_access/class_methods_spec.rb @@ -0,0 +1,46 @@ +require 'spec_helper' + +describe Gitlab::CrossProjectAccess::ClassMethods do + let(:dummy_class) do + Class.new do + extend Gitlab::CrossProjectAccess::ClassMethods + end + end + let(:dummy_proc) { lambda { false } } + + describe '#requires_cross_project_access' do + it 'creates a correct check when a hash is passed' do + expect(Gitlab::CrossProjectAccess) + .to receive(:add_check).with(dummy_class, + actions: { hello: true, world: false }, + positive_condition: dummy_proc, + negative_condition: dummy_proc) + + dummy_class.requires_cross_project_access( + hello: true, world: false, if: dummy_proc, unless: dummy_proc + ) + end + + it 'creates a correct check when an array is passed' do + expect(Gitlab::CrossProjectAccess) + .to receive(:add_check).with(dummy_class, + actions: { hello: true, world: true }, + positive_condition: nil, + negative_condition: nil) + + dummy_class.requires_cross_project_access(:hello, :world) + end + + it 'creates a correct check when an array and a hash is passed' do + expect(Gitlab::CrossProjectAccess) + .to receive(:add_check).with(dummy_class, + actions: { hello: true, world: true }, + positive_condition: dummy_proc, + negative_condition: dummy_proc) + + dummy_class.requires_cross_project_access( + :hello, :world, if: dummy_proc, unless: dummy_proc + ) + end + end +end diff --git a/spec/lib/gitlab/cross_project_access_spec.rb b/spec/lib/gitlab/cross_project_access_spec.rb new file mode 100644 index 00000000000..614b0473c7e --- /dev/null +++ b/spec/lib/gitlab/cross_project_access_spec.rb @@ -0,0 +1,84 @@ +require 'spec_helper' + +describe Gitlab::CrossProjectAccess do + let(:super_class) { Class.new } + let(:descendant_class) { Class.new(super_class) } + let(:current_instance) { described_class.new } + + before do + allow(described_class).to receive(:instance).and_return(current_instance) + end + + describe '#add_check' do + it 'keeps track of the properties to check' do + expect do + described_class.add_check(super_class, + actions: { index: true }, + positive_condition: -> { true }, + negative_condition: -> { false }) + end.to change { described_class.checks.size }.by(1) + end + + it 'builds the check correctly' do + check_collection = described_class.add_check(super_class, + actions: { index: true }, + positive_condition: -> { 'positive' }, + negative_condition: -> { 'negative' }) + + check = check_collection.checks.first + + expect(check.actions).to eq(index: true) + expect(check.positive_condition.call).to eq('positive') + expect(check.negative_condition.call).to eq('negative') + end + + it 'merges the checks of a parent class into existing checks of a subclass' do + subclass_collection = described_class.add_check(descendant_class) + + expect(subclass_collection).to receive(:add_collection).and_call_original + + described_class.add_check(super_class) + end + + it 'merges the existing checks of a superclass into the checks of a subclass' do + super_collection = described_class.add_check(super_class) + descendant_collection = described_class.add_check(descendant_class) + + expect(descendant_collection.checks).to include(*super_collection.checks) + end + end + + describe '#find_check' do + it 'returns a check when it was defined for a superclass' do + expected_check = described_class.add_check(super_class, + actions: { index: true }, + positive_condition: -> { 'positive' }, + negative_condition: -> { 'negative' }) + + expect(described_class.find_check(descendant_class.new)) + .to eq(expected_check) + end + + it 'caches the result for a subclass' do + described_class.add_check(super_class, + actions: { index: true }, + positive_condition: -> { 'positive' }, + negative_condition: -> { 'negative' }) + + expect(described_class.instance).to receive(:closest_parent).once.and_call_original + + 2.times { described_class.find_check(descendant_class.new) } + end + + it 'returns the checks for the closest class if there are more checks available' do + described_class.add_check(super_class, + actions: { index: true }) + expected_check = described_class.add_check(descendant_class, + actions: { index: true, show: false }) + + check = described_class.find_check(descendant_class.new) + + expect(check).to eq(expected_check) + end + end +end diff --git a/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb index 3fe0493ed9b..8b07da11c5d 100644 --- a/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb +++ b/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb @@ -41,7 +41,7 @@ describe Gitlab::CycleAnalytics::BaseEventFetcher do milestone = create(:milestone, project: project) issue.update(milestone: milestone) - create_merge_request_closing_issue(issue) + create_merge_request_closing_issue(user, project, issue) end end end diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb index 38a47a159e1..397dd4e5d2c 100644 --- a/spec/lib/gitlab/cycle_analytics/events_spec.rb +++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb @@ -236,8 +236,8 @@ describe 'cycle analytics events' do pipeline.run! pipeline.succeed! - merge_merge_requests_closing_issue(context) - deploy_master + merge_merge_requests_closing_issue(user, project, context) + deploy_master(user, project) end it 'has the name' do @@ -294,8 +294,8 @@ describe 'cycle analytics events' do let!(:context) { create(:issue, project: project, created_at: 2.days.ago) } before do - merge_merge_requests_closing_issue(context) - deploy_master + merge_merge_requests_closing_issue(user, project, context) + deploy_master(user, project) end it 'has the total time' do @@ -334,7 +334,7 @@ describe 'cycle analytics events' do def setup(context) milestone = create(:milestone, project: project) context.update(milestone: milestone) - mr = create_merge_request_closing_issue(context, commit_message: "References #{context.to_reference}") + mr = create_merge_request_closing_issue(user, project, context, commit_message: "References #{context.to_reference}") ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash) end diff --git a/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb b/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb new file mode 100644 index 00000000000..56a316318cb --- /dev/null +++ b/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb @@ -0,0 +1,140 @@ +require 'spec_helper' + +describe Gitlab::CycleAnalytics::UsageData do + describe '#to_json' do + before do + Timecop.freeze do + user = create(:user, :admin) + projects = create_list(:project, 2, :repository) + + projects.each_with_index do |project, time| + issue = create(:issue, project: project, created_at: (time + 1).hour.ago) + + allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) + + milestone = create(:milestone, project: project) + mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") + pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) + + create_cycle(user, project, issue, mr, milestone, pipeline) + deploy_master(user, project, environment: 'staging') + deploy_master(user, project) + end + end + end + + shared_examples 'a valid usage data result' do + it 'returns the aggregated usage data of every selected project' do + result = subject.to_json + + expect(result).to have_key(:avg_cycle_analytics) + + CycleAnalytics::STAGES.each do |stage| + expect(result[:avg_cycle_analytics]).to have_key(stage) + + stage_values = result[:avg_cycle_analytics][stage] + expected_values = expect_values_per_stage[stage] + + expected_values.each_pair do |op, value| + expect(stage_values).to have_key(op) + + if op == :missing + expect(stage_values[op]).to eq(value) + else + # delta is used because of git timings that Timecop does not stub + expect(stage_values[op].to_i).to be_within(5).of(value.to_i) + end + end + end + end + end + + context 'when using postgresql', :postgresql do + let(:expect_values_per_stage) do + { + issue: { + average: 5400, + sd: 2545, + missing: 0 + }, + plan: { + average: 2, + sd: 2, + missing: 0 + }, + code: { + average: nil, + sd: 0, + missing: 2 + }, + test: { + average: nil, + sd: 0, + missing: 2 + }, + review: { + average: 0, + sd: 0, + missing: 0 + }, + staging: { + average: 0, + sd: 0, + missing: 0 + }, + production: { + average: 5400, + sd: 2545, + missing: 0 + } + } + end + + it_behaves_like 'a valid usage data result' + end + + context 'when using mysql', :mysql do + let(:expect_values_per_stage) do + { + issue: { + average: nil, + sd: 0, + missing: 2 + }, + plan: { + average: nil, + sd: 0, + missing: 2 + }, + code: { + average: nil, + sd: 0, + missing: 2 + }, + test: { + average: nil, + sd: 0, + missing: 2 + }, + review: { + average: nil, + sd: 0, + missing: 2 + }, + staging: { + average: nil, + sd: 0, + missing: 2 + }, + production: { + average: nil, + sd: 0, + missing: 2 + } + } + end + + it_behaves_like 'a valid usage data result' + end + end +end diff --git a/spec/lib/gitlab/database/median_spec.rb b/spec/lib/gitlab/database/median_spec.rb new file mode 100644 index 00000000000..1b5e30089ce --- /dev/null +++ b/spec/lib/gitlab/database/median_spec.rb @@ -0,0 +1,17 @@ +require 'spec_helper' + +describe Gitlab::Database::Median do + let(:dummy_class) do + Class.new do + include Gitlab::Database::Median + end + end + + subject(:median) { dummy_class.new } + + describe '#median_datetimes' do + it 'raises NotSupportedError', :mysql do + expect { median.median_datetimes(nil, nil, nil, :project_id) }.to raise_error(dummy_class::NotSupportedError, "partition_column is not supported for MySQL") + end + end +end diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb index f31475dbd71..b411aaa19da 100644 --- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb @@ -94,7 +94,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : describe '#move_repositories' do let(:namespace) { create(:group, name: 'hello-group') } it 'moves a project for a namespace' do - create(:project, :repository, namespace: namespace, path: 'hello-project') + create(:project, :repository, :legacy_storage, namespace: namespace, path: 'hello-project') expected_path = File.join(TestEnv.repos_path, 'bye-group', 'hello-project.git') subject.move_repositories(namespace, 'hello-group', 'bye-group') @@ -104,7 +104,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : it 'moves a namespace in a subdirectory correctly' do child_namespace = create(:group, name: 'sub-group', parent: namespace) - create(:project, :repository, namespace: child_namespace, path: 'hello-project') + create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project') expected_path = File.join(TestEnv.repos_path, 'hello-group', 'renamed-sub-group', 'hello-project.git') @@ -115,7 +115,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : it 'moves a parent namespace with subdirectories' do child_namespace = create(:group, name: 'sub-group', parent: namespace) - create(:project, :repository, namespace: child_namespace, path: 'hello-project') + create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project') expected_path = File.join(TestEnv.repos_path, 'renamed-group', 'sub-group', 'hello-project.git') subject.move_repositories(child_namespace, 'hello-group', 'renamed-group') @@ -166,7 +166,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : describe '#rename_namespace_dependencies' do it "moves the the repository for a project in the namespace" do - create(:project, :repository, namespace: namespace, path: "the-path-project") + create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project") expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git") subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0') @@ -187,7 +187,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : end it 'invalidates the markdown cache of related projects' do - project = create(:project, namespace: namespace, path: "the-path-project") + project = create(:project, :legacy_storage, namespace: namespace, path: "the-path-project") expect(subject).to receive(:remove_cached_html_for_projects).with([project.id]) @@ -243,7 +243,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : describe '#revert_renames', :redis do it 'renames the routes back to the previous values' do - project = create(:project, :repository, path: 'a-project', namespace: namespace) + project = create(:project, :legacy_storage, :repository, path: 'a-project', namespace: namespace) subject.rename_namespace(namespace) expect(subject).to receive(:perform_rename) @@ -261,7 +261,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : end it 'moves the repositories back to their original place' do - project = create(:project, :repository, path: 'a-project', namespace: namespace) + project = create(:project, :repository, :legacy_storage, path: 'a-project', namespace: namespace) project.create_repository subject.rename_namespace(namespace) diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb index 0958144643b..b4896d69077 100644 --- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb @@ -5,6 +5,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de let(:subject) { described_class.new(['the-path'], migration) } let(:project) do create(:project, + :legacy_storage, path: 'the-path', namespace: create(:namespace, path: 'known-parent' )) end @@ -17,7 +18,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de describe '#projects_for_paths' do it 'searches using nested paths' do namespace = create(:namespace, path: 'hello') - project = create(:project, path: 'THE-path', namespace: namespace) + project = create(:project, :legacy_storage, path: 'THE-path', namespace: namespace) result_ids = described_class.new(['Hello/the-path'], migration) .projects_for_paths.map(&:id) @@ -26,8 +27,8 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de end it 'includes the correct projects' do - project = create(:project, path: 'THE-path') - _other_project = create(:project) + project = create(:project, :legacy_storage, path: 'THE-path') + _other_project = create(:project, :legacy_storage) result_ids = subject.projects_for_paths.map(&:id) @@ -36,7 +37,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de end describe '#rename_projects' do - let!(:projects) { create_list(:project, 2, path: 'the-path') } + let!(:projects) { create_list(:project, 2, :legacy_storage, path: 'the-path') } it 'renames each project' do expect(subject).to receive(:rename_project).twice @@ -120,7 +121,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de describe '#move_repository' do let(:known_parent) { create(:namespace, path: 'known-parent') } - let(:project) { create(:project, :repository, path: 'the-path', namespace: known_parent) } + let(:project) { create(:project, :repository, :legacy_storage, path: 'the-path', namespace: known_parent) } it 'moves the repository for a project' do expected_path = File.join(TestEnv.repos_path, 'known-parent', 'new-repo.git') diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb index cd602ccab8e..73d60c021c8 100644 --- a/spec/lib/gitlab/diff/highlight_spec.rb +++ b/spec/lib/gitlab/diff/highlight_spec.rb @@ -72,6 +72,28 @@ describe Gitlab::Diff::Highlight do expect(subject[5].text).to eq(code) expect(subject[5].text).to be_html_safe end + + context 'when the inline diff marker has an invalid range' do + before do + allow_any_instance_of(Gitlab::Diff::InlineDiffMarker).to receive(:mark).and_raise(RangeError) + end + + it 'keeps the original rich line' do + code = %q{+ raise RuntimeError, "System commands must be given as an array of strings"} + + expect(subject[5].text).to eq(code) + expect(subject[5].text).not_to be_html_safe + end + + it 'reports to Sentry if configured' do + allow(Gitlab::Sentry).to receive(:enabled?).and_return(true) + + expect(Gitlab::Sentry).to receive(:context) + expect(Raven).to receive(:capture_exception) + + subject + end + end end end end diff --git a/spec/lib/gitlab/email/attachment_uploader_spec.rb b/spec/lib/gitlab/email/attachment_uploader_spec.rb index f61dbc67ad1..45c690842bc 100644 --- a/spec/lib/gitlab/email/attachment_uploader_spec.rb +++ b/spec/lib/gitlab/email/attachment_uploader_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" describe Gitlab::Email::AttachmentUploader do describe "#execute" do - let(:project) { build(:project) } + let(:project) { create(:project) } let(:message_raw) { fixture_file("emails/attachment.eml") } let(:message) { Mail::Message.new(message_raw) } diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb index 4e9367323cb..83d431a7458 100644 --- a/spec/lib/gitlab/encoding_helper_spec.rb +++ b/spec/lib/gitlab/encoding_helper_spec.rb @@ -24,6 +24,11 @@ describe Gitlab::EncodingHelper do 'removes invalid bytes from ASCII-8bit encoded multibyte string. This can occur when a git diff match line truncates in the middle of a multibyte character. This occurs after the second word in this example. The test string is as short as we can get while still triggering the error condition when not looking at `detect[:confidence]`.', "mu ns\xC3\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi ".force_encoding('ASCII-8BIT'), "mu ns\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi " + ], + [ + 'string with detected encoding that is not supported in Ruby', + "\xFFe,i\xFF,\xB8oi,'\xB8,\xFF,-", + "--broken encoding: IBM420_ltr" ] ].each do |description, test_string, xpect| it description do diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb index 326ed2f2ecf..13df8531b63 100644 --- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb +++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb @@ -39,8 +39,8 @@ describe Gitlab::Gfm::UploadsRewriter do it 'copies files' do expect(new_files).to all(exist) expect(old_paths).not_to match_array new_paths - expect(old_paths).to all(include(old_project.full_path)) - expect(new_paths).to all(include(new_project.full_path)) + expect(old_paths).to all(include(old_project.disk_path)) + expect(new_paths).to all(include(new_project.disk_path)) end it 'does not remove old files' do diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb index 8ac960133c5..a6341cd509b 100644 --- a/spec/lib/gitlab/git/blob_spec.rb +++ b/spec/lib/gitlab/git/blob_spec.rb @@ -178,67 +178,77 @@ describe Gitlab::Git::Blob, seed_helper: true do end describe '.batch' do - let(:blob_references) do - [ - [SeedRepo::Commit::ID, "files/ruby/popen.rb"], - [SeedRepo::Commit::ID, 'six'] - ] - end + shared_examples 'loading blobs in batch' do + let(:blob_references) do + [ + [SeedRepo::Commit::ID, "files/ruby/popen.rb"], + [SeedRepo::Commit::ID, 'six'] + ] + end - subject { described_class.batch(repository, blob_references) } + subject { described_class.batch(repository, blob_references) } - it { expect(subject.size).to eq(blob_references.size) } + it { expect(subject.size).to eq(blob_references.size) } - context 'first blob' do - let(:blob) { subject[0] } + context 'first blob' do + let(:blob) { subject[0] } - it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) } - it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) } - it { expect(blob.path).to eq("files/ruby/popen.rb") } - it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) } - it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) } - it { expect(blob.size).to eq(669) } - it { expect(blob.mode).to eq("100644") } - end + it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) } + it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) } + it { expect(blob.path).to eq("files/ruby/popen.rb") } + it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) } + it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) } + it { expect(blob.size).to eq(669) } + it { expect(blob.mode).to eq("100644") } + end - context 'second blob' do - let(:blob) { subject[1] } + context 'second blob' do + let(:blob) { subject[1] } - it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') } - it { expect(blob.data).to eq('') } - it 'does not mark the blob as binary' do - expect(blob).not_to be_binary + it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') } + it { expect(blob.data).to eq('') } + it 'does not mark the blob as binary' do + expect(blob).not_to be_binary + end end - end - context 'limiting' do - subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) } + context 'limiting' do + subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) } - context 'positive' do - let(:blob_size_limit) { 10 } + context 'positive' do + let(:blob_size_limit) { 10 } - it { expect(subject.first.data.size).to eq(10) } - end + it { expect(subject.first.data.size).to eq(10) } + end - context 'zero' do - let(:blob_size_limit) { 0 } + context 'zero' do + let(:blob_size_limit) { 0 } - it 'only loads the metadata' do - expect(subject.first.size).not_to be(0) - expect(subject.first.data).to eq('') + it 'only loads the metadata' do + expect(subject.first.size).not_to be(0) + expect(subject.first.data).to eq('') + end end - end - context 'negative' do - let(:blob_size_limit) { -1 } + context 'negative' do + let(:blob_size_limit) { -1 } - it 'ignores MAX_DATA_DISPLAY_SIZE' do - stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100) + it 'ignores MAX_DATA_DISPLAY_SIZE' do + stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100) - expect(subject.first.data.size).to eq(669) + expect(subject.first.data.size).to eq(669) + end end end end + + context 'when Gitaly list_blobs_by_sha_path feature is enabled' do + it_behaves_like 'loading blobs in batch' + end + + context 'when Gitaly list_blobs_by_sha_path feature is disabled', :disable_gitaly do + it_behaves_like 'loading blobs in batch' + end end describe '.batch_lfs_pointers' do @@ -266,6 +276,7 @@ describe Gitlab::Git::Blob, seed_helper: true do expect(blobs.count).to eq(1) expect(blobs).to all( be_a(Gitlab::Git::Blob) ) + expect(blobs).to be_an(Array) end it 'accepts blob IDs as a lazy enumerator' do diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb index 85e6efd7ca2..a05feaac1ca 100644 --- a/spec/lib/gitlab/git/commit_spec.rb +++ b/spec/lib/gitlab/git/commit_spec.rb @@ -102,6 +102,10 @@ describe Gitlab::Git::Commit, seed_helper: true do expect(described_class.find(repository, SeedRepo::Commit::ID)).to be_valid_commit end + it "returns an array of parent ids" do + expect(described_class.find(repository, SeedRepo::Commit::ID).parent_ids).to be_an(Array) + end + it "should return valid commit for tag" do expect(described_class.find(repository, 'v1.0.0').id).to eq('6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') end @@ -389,81 +393,111 @@ describe Gitlab::Git::Commit, seed_helper: true do end end - describe '.extract_signature' do - subject { described_class.extract_signature(repository, commit_id) } + shared_examples 'extracting commit signature' do + context 'when the commit is signed' do + let(:commit_id) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } - shared_examples '.extract_signature' do - context 'when the commit is signed' do - let(:commit_id) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } + it 'returns signature and signed text' do + signature, signed_text = subject - it 'returns signature and signed text' do - signature, signed_text = subject + expected_signature = <<~SIGNATURE + -----BEGIN PGP SIGNATURE----- + Version: GnuPG/MacGPG2 v2.0.22 (Darwin) + Comment: GPGTools - https://gpgtools.org - expected_signature = <<~SIGNATURE - -----BEGIN PGP SIGNATURE----- - Version: GnuPG/MacGPG2 v2.0.22 (Darwin) - Comment: GPGTools - https://gpgtools.org + iQEcBAABCgAGBQJTDvaZAAoJEGJ8X1ifRn8XfvYIAMuB0yrbTGo1BnOSoDfyrjb0 + Kw2EyUzvXYL72B63HMdJ+/0tlSDC6zONF3fc+bBD8z+WjQMTbwFNMRbSSy2rKEh+ + mdRybOP3xBIMGgEph0/kmWln39nmFQBsPRbZBWoU10VfI/ieJdEOgOphszgryRar + TyS73dLBGE9y9NIININVaNISet9D9QeXFqc761CGjh4YIghvPpi+YihMWapGka6v + hgKhX+hc5rj+7IEE0CXmlbYR8OYvAbAArc5vJD7UTxAY4Z7/l9d6Ydt9GQ25khfy + ANFgltYzlR6evLFmDjssiP/mx/ZMN91AL0ueJ9nNGv411Mu2CUW+tDCaQf35mdc= + =j51i + -----END PGP SIGNATURE----- + SIGNATURE - iQEcBAABCgAGBQJTDvaZAAoJEGJ8X1ifRn8XfvYIAMuB0yrbTGo1BnOSoDfyrjb0 - Kw2EyUzvXYL72B63HMdJ+/0tlSDC6zONF3fc+bBD8z+WjQMTbwFNMRbSSy2rKEh+ - mdRybOP3xBIMGgEph0/kmWln39nmFQBsPRbZBWoU10VfI/ieJdEOgOphszgryRar - TyS73dLBGE9y9NIININVaNISet9D9QeXFqc761CGjh4YIghvPpi+YihMWapGka6v - hgKhX+hc5rj+7IEE0CXmlbYR8OYvAbAArc5vJD7UTxAY4Z7/l9d6Ydt9GQ25khfy - ANFgltYzlR6evLFmDjssiP/mx/ZMN91AL0ueJ9nNGv411Mu2CUW+tDCaQf35mdc= - =j51i - -----END PGP SIGNATURE----- - SIGNATURE + expect(signature).to eq(expected_signature.chomp) + expect(signature).to be_a_binary_string - expect(signature).to eq(expected_signature.chomp) - expect(signature).to be_a_binary_string + expected_signed_text = <<~SIGNED_TEXT + tree 22bfa2fbd217df24731f43ff43a4a0f8db759dae + parent ae73cb07c9eeaf35924a10f713b364d32b2dd34f + author Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> 1393489561 +0200 + committer Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> 1393489561 +0200 - expected_signed_text = <<~SIGNED_TEXT - tree 22bfa2fbd217df24731f43ff43a4a0f8db759dae - parent ae73cb07c9eeaf35924a10f713b364d32b2dd34f - author Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> 1393489561 +0200 - committer Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> 1393489561 +0200 + Feature added - Feature added + Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> + SIGNED_TEXT - Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> - SIGNED_TEXT - - expect(signed_text).to eq(expected_signed_text) - expect(signed_text).to be_a_binary_string - end + expect(signed_text).to eq(expected_signed_text) + expect(signed_text).to be_a_binary_string end + end - context 'when the commit has no signature' do - let(:commit_id) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' } + context 'when the commit has no signature' do + let(:commit_id) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' } - it 'returns nil' do - expect(subject).to be_nil - end + it 'returns nil' do + expect(subject).to be_nil end + end - context 'when the commit cannot be found' do - let(:commit_id) { Gitlab::Git::BLANK_SHA } + context 'when the commit cannot be found' do + let(:commit_id) { Gitlab::Git::BLANK_SHA } - it 'returns nil' do - expect(subject).to be_nil - end + it 'returns nil' do + expect(subject).to be_nil end + end + + context 'when the commit ID is invalid' do + let(:commit_id) { '4b4918a572fa86f9771e5ba40fbd48e' } - context 'when the commit ID is invalid' do - let(:commit_id) { '4b4918a572fa86f9771e5ba40fbd48e' } + it 'raises ArgumentError' do + expect { subject }.to raise_error(ArgumentError) + end + end + end - it 'raises ArgumentError' do - expect { subject }.to raise_error(ArgumentError) + describe '.extract_signature_lazily' do + shared_examples 'loading signatures in batch once' do + it 'fetches signatures in batch once' do + commit_ids = %w[0b4bc9a49b562e85de7cc9e834518ea6828729b9 4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6] + signatures = commit_ids.map do |commit_id| + described_class.extract_signature_lazily(repository, commit_id) end + + expect(described_class).to receive(:batch_signature_extraction) + .with(repository, commit_ids) + .once + .and_return({}) + + 2.times { signatures.each(&:itself) } end end + subject { described_class.extract_signature_lazily(repository, commit_id).itself } + + context 'with Gitaly extract_commit_signature_in_batch feature enabled' do + it_behaves_like 'extracting commit signature' + it_behaves_like 'loading signatures in batch once' + end + + context 'with Gitaly extract_commit_signature_in_batch feature disabled', :disable_gitaly do + it_behaves_like 'extracting commit signature' + it_behaves_like 'loading signatures in batch once' + end + end + + describe '.extract_signature' do + subject { described_class.extract_signature(repository, commit_id) } + context 'with gitaly' do - it_behaves_like '.extract_signature' + it_behaves_like 'extracting commit signature' end - context 'without gitaly', :skip_gitaly_mock do - it_behaves_like '.extract_signature' + context 'without gitaly', :disable_gitaly do + it_behaves_like 'extracting commit signature' end end end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index ec1c7a96f92..25defb98b7c 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -1,3 +1,4 @@ +# coding: utf-8 require "spec_helper" describe Gitlab::Git::Repository, seed_helper: true do @@ -599,13 +600,44 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#branch_names_contains_sha' do + shared_examples 'returning the right branches' do + let(:head_id) { repository.rugged.head.target.oid } + let(:new_branch) { head_id } + + before do + repository.create_branch(new_branch, 'master') + end + + after do + repository.delete_branch(new_branch) + end + + it 'displays that branch' do + expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch) + end + end + + context 'when Gitaly is enabled' do + it_behaves_like 'returning the right branches' + end + + context 'when Gitaly is disabled', :disable_gitaly do + it_behaves_like 'returning the right branches' + end + end + describe "#refs_hash" do - let(:refs) { repository.refs_hash } + subject { repository.refs_hash } it "should have as many entries as branches and tags" do expected_refs = SeedRepo::Repo::BRANCHES + SeedRepo::Repo::TAGS # We flatten in case a commit is pointed at by more than one branch and/or tag - expect(refs.values.flatten.size).to eq(expected_refs.size) + expect(subject.values.flatten.size).to eq(expected_refs.size) + end + + it 'has valid commit ids as keys' do + expect(subject.keys).to all( match(Commit::COMMIT_SHA_PATTERN) ) end end @@ -863,7 +895,7 @@ describe Gitlab::Git::Repository, seed_helper: true do repository.log(options.merge(path: "encoding")) end - it "should not follow renames" do + it "does not follow renames" do expect(log_commits).to include(commit_with_new_name) expect(log_commits).to include(rename_commit) expect(log_commits).not_to include(commit_with_old_name) @@ -875,7 +907,7 @@ describe Gitlab::Git::Repository, seed_helper: true do repository.log(options.merge(path: "encoding/CHANGELOG")) end - it "should not follow renames" do + it "does not follow renames" do expect(log_commits).to include(commit_with_new_name) expect(log_commits).to include(rename_commit) expect(log_commits).not_to include(commit_with_old_name) @@ -887,7 +919,7 @@ describe Gitlab::Git::Repository, seed_helper: true do repository.log(options.merge(path: "CHANGELOG")) end - it "should not follow renames" do + it "does not follow renames" do expect(log_commits).to include(commit_with_old_name) expect(log_commits).to include(rename_commit) expect(log_commits).not_to include(commit_with_new_name) @@ -899,7 +931,7 @@ describe Gitlab::Git::Repository, seed_helper: true do repository.log(options.merge(ref: "refs/heads/fix-blob-path", path: "files/testdir/file.txt")) end - it "should return a list of commits" do + it "returns a list of commits" do expect(log_commits.size).to eq(1) end end @@ -959,6 +991,16 @@ describe Gitlab::Git::Repository, seed_helper: true do it { expect { repository.log(limit: limit) }.to raise_error(ArgumentError) } end end + + context 'with all' do + let(:options) { { all: true, limit: 50 } } + + it 'returns a list of commits' do + commits = repository.log(options) + + expect(commits.size).to eq(37) + end + end end describe "#rugged_commits_between" do @@ -1102,6 +1144,20 @@ describe Gitlab::Git::Repository, seed_helper: true do context 'when Gitaly count_commits feature is disabled', :skip_gitaly_mock do it_behaves_like 'extended commit counting' + + context "with all" do + it "returns the number of commits in the whole repository" do + options = { all: true } + + expect(repository.count_commits(options)).to eq(34) + end + end + + context 'without all or ref being specified' do + it "raises an ArgumentError" do + expect { repository.count_commits({}) }.to raise_error(ArgumentError, "Please specify a valid ref or set the 'all' attribute to true") + end + end end end @@ -1374,79 +1430,95 @@ describe Gitlab::Git::Repository, seed_helper: true do end describe "#copy_gitattributes" do - let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') } - - it "raises an error with invalid ref" do - expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef) - end + shared_examples 'applying git attributes' do + let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') } - context "with no .gitattrbutes" do - before do - repository.copy_gitattributes("master") + after do + FileUtils.rm_rf(attributes_path) if Dir.exist?(attributes_path) end - it "does not have an info/attributes" do - expect(File.exist?(attributes_path)).to be_falsey + it "raises an error with invalid ref" do + expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef) end - after do - FileUtils.rm_rf(attributes_path) - end - end + context 'when forcing encoding issues' do + let(:branch_name) { "ʕ•ᴥ•ʔ" } - context "with .gitattrbutes" do - before do - repository.copy_gitattributes("gitattributes") - end + before do + repository.create_branch(branch_name, "master") + end - it "has an info/attributes" do - expect(File.exist?(attributes_path)).to be_truthy - end + after do + repository.rm_branch(branch_name, user: build(:admin)) + end - it "has the same content in info/attributes as .gitattributes" do - contents = File.open(attributes_path, "rb") { |f| f.read } - expect(contents).to eq("*.md binary\n") - end + it "doesn't raise with a valid unicode ref" do + expect { repository.copy_gitattributes(branch_name) }.not_to raise_error - after do - FileUtils.rm_rf(attributes_path) + repository + end end - end - context "with updated .gitattrbutes" do - before do - repository.copy_gitattributes("gitattributes") - repository.copy_gitattributes("gitattributes-updated") - end + context "with no .gitattrbutes" do + before do + repository.copy_gitattributes("master") + end - it "has an info/attributes" do - expect(File.exist?(attributes_path)).to be_truthy + it "does not have an info/attributes" do + expect(File.exist?(attributes_path)).to be_falsey + end end - it "has the updated content in info/attributes" do - contents = File.read(attributes_path) - expect(contents).to eq("*.txt binary\n") - end + context "with .gitattrbutes" do + before do + repository.copy_gitattributes("gitattributes") + end - after do - FileUtils.rm_rf(attributes_path) - end - end + it "has an info/attributes" do + expect(File.exist?(attributes_path)).to be_truthy + end - context "with no .gitattrbutes in HEAD but with previous info/attributes" do - before do - repository.copy_gitattributes("gitattributes") - repository.copy_gitattributes("master") + it "has the same content in info/attributes as .gitattributes" do + contents = File.open(attributes_path, "rb") { |f| f.read } + expect(contents).to eq("*.md binary\n") + end end - it "does not have an info/attributes" do - expect(File.exist?(attributes_path)).to be_falsey + context "with updated .gitattrbutes" do + before do + repository.copy_gitattributes("gitattributes") + repository.copy_gitattributes("gitattributes-updated") + end + + it "has an info/attributes" do + expect(File.exist?(attributes_path)).to be_truthy + end + + it "has the updated content in info/attributes" do + contents = File.read(attributes_path) + expect(contents).to eq("*.txt binary\n") + end end - after do - FileUtils.rm_rf(attributes_path) + context "with no .gitattrbutes in HEAD but with previous info/attributes" do + before do + repository.copy_gitattributes("gitattributes") + repository.copy_gitattributes("master") + end + + it "does not have an info/attributes" do + expect(File.exist?(attributes_path)).to be_falsey + end end end + + context 'when gitaly is enabled' do + it_behaves_like 'applying git attributes' + end + + context 'when gitaly is disabled', :disable_gitaly do + it_behaves_like 'applying git attributes' + end end describe '#ref_exists?' do @@ -1585,7 +1657,7 @@ describe Gitlab::Git::Repository, seed_helper: true do expected_languages = [ { value: 66.63, label: "Ruby", color: "#701516", highlight: "#701516" }, { value: 22.96, label: "JavaScript", color: "#f1e05a", highlight: "#f1e05a" }, - { value: 7.9, label: "HTML", color: "#e44b23", highlight: "#e44b23" }, + { value: 7.9, label: "HTML", color: "#e34c26", highlight: "#e34c26" }, { value: 2.51, label: "CoffeeScript", color: "#244776", highlight: "#244776" } ] @@ -1617,6 +1689,35 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#license_short_name' do + shared_examples 'acquiring the Licensee license key' do + subject { repository.license_short_name } + + context 'when no license file can be found' do + let(:project) { create(:project, :repository) } + let(:repository) { project.repository.raw_repository } + + before do + project.repository.delete_file(project.owner, 'LICENSE', message: 'remove license', branch_name: 'master') + end + + it { is_expected.to be_nil } + end + + context 'when an mit license is found' do + it { is_expected.to eq('mit') } + end + end + + context 'when gitaly is enabled' do + it_behaves_like 'acquiring the Licensee license key' + end + + context 'when gitaly is disabled', :disable_gitaly do + it_behaves_like 'acquiring the Licensee license key' + end + end + describe '#with_repo_branch_commit' do context 'when comparing with the same repository' do let(:start_repository) { repository } @@ -2199,7 +2300,7 @@ describe Gitlab::Git::Repository, seed_helper: true do context 'sparse checkout', :skip_gitaly_mock do let(:expected_files) { %w(files files/js files/js/application.js) } - before do + it 'checks out only the files in the diff' do allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args| m.call(*args) do worktree_path = args[0] @@ -2211,11 +2312,59 @@ describe Gitlab::Git::Repository, seed_helper: true do expect(Dir[files_pattern]).to eq(expected) end end - end - it 'checkouts only the files in the diff' do subject end + + context 'when the diff contains a rename' do + let(:repo) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '').rugged } + let(:end_sha) { new_commit_move_file(repo).oid } + + after do + # Erase our commits so other tests get the original repo + repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '').rugged + repo.references.update('refs/heads/master', SeedRepo::LastCommit::ID) + end + + it 'does not include the renamed file in the sparse checkout' do + allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args| + m.call(*args) do + worktree_path = args[0] + files_pattern = File.join(worktree_path, '**', '*') + + expect(Dir[files_pattern]).not_to include('CHANGELOG') + expect(Dir[files_pattern]).not_to include('encoding/CHANGELOG') + end + end + + subject + end + end + end + + context 'with an ASCII-8BIT diff', :skip_gitaly_mock do + let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+✓ testme\n ======\n \n Sample repo for testing gitlab features\n" } + + it 'applies a ASCII-8BIT diff' do + allow(repository).to receive(:run_git!).and_call_original + allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT')) + + expect(subject).to match(/\h{40}/) + end + end + + context 'with trailing whitespace in an invalid patch', :skip_gitaly_mock do + let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+ \n ====== \n \n Sample repo for testing gitlab features\n" } + + it 'does not include whitespace warnings in the error' do + allow(repository).to receive(:run_git!).and_call_original + allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT')) + + expect { subject }.to raise_error do |error| + expect(error).to be_a(described_class::GitError) + expect(error.message).not_to include('trailing whitespace') + end + end end end end diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb index bd8dbf07fa7..761f7732036 100644 --- a/spec/lib/gitlab/git/wiki_spec.rb +++ b/spec/lib/gitlab/git/wiki_spec.rb @@ -3,34 +3,38 @@ require 'spec_helper' describe Gitlab::Git::Wiki do let(:project) { create(:project) } let(:user) { project.owner } - let(:wiki) { ProjectWiki.new(project, user) } - let(:gollum_wiki) { wiki.wiki } + let(:project_wiki) { ProjectWiki.new(project, user) } + subject { project_wiki.wiki } # Remove skip_gitaly_mock flag when gitaly_find_page when - # https://gitlab.com/gitlab-org/gitaly/merge_requests/539 gets merged + # https://gitlab.com/gitlab-org/gitlab-ce/issues/42039 is solved describe '#page', :skip_gitaly_mock do - it 'returns the right page' do + before do create_page('page1', 'content') - create_page('foo/page1', 'content') - - expect(gollum_wiki.page(title: 'page1', dir: '').url_path).to eq 'page1' - expect(gollum_wiki.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1' + create_page('foo/page1', 'content foo/page1') + end + after do destroy_page('page1') destroy_page('page1', 'foo') end + + it 'returns the right page' do + expect(subject.page(title: 'page1', dir: '').url_path).to eq 'page1' + expect(subject.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1' + end end def create_page(name, content) - gollum_wiki.write_page(name, :markdown, content, commit_details) + subject.write_page(name, :markdown, content, commit_details(name)) end - def commit_details - Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit") + def commit_details(name) + Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "created page #{name}") end def destroy_page(title, dir = '') - page = gollum_wiki.page(title: title, dir: dir) - wiki.delete_page(page, "test commit") + page = subject.page(title: title, dir: dir) + project_wiki.delete_page(page, "test commit") end end diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 3c3697e7aa9..6f07e423c1b 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -18,8 +18,9 @@ describe Gitlab::GitAccess do redirected_path: redirected_path) end - let(:push_access_check) { access.check('git-receive-pack', '_any') } - let(:pull_access_check) { access.check('git-upload-pack', '_any') } + let(:changes) { '_any' } + let(:push_access_check) { access.check('git-receive-pack', changes) } + let(:pull_access_check) { access.check('git-upload-pack', changes) } describe '#check with single protocols allowed' do def disable_protocol(protocol) @@ -533,6 +534,19 @@ describe Gitlab::GitAccess do expect { pull_access_check }.to raise_unauthorized('Your account has been blocked.') end + context 'when the project repository does not exist' do + it 'returns not found' do + project.add_guest(user) + repo = project.repository + FileUtils.rm_rf(repo.path) + + # Sanity check for rm_rf + expect(repo.exists?).to eq(false) + + expect { pull_access_check }.to raise_error(Gitlab::GitAccess::NotFoundError, 'A repository for this project does not exist yet.') + end + end + describe 'without access to project' do context 'pull code' do it { expect { pull_access_check }.to raise_not_found } @@ -646,6 +660,20 @@ describe Gitlab::GitAccess do end end + describe 'check LFS integrity' do + let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature'] } + + before do + project.add_developer(user) + end + + it 'checks LFS integrity only for first change' do + expect_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).exactly(1).times + + push_access_check + end + end + describe '#check_push_access!' do before do merge_into_protected_branch diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb index 186b2d9279d..730ede99fc9 100644 --- a/spec/lib/gitlab/git_access_wiki_spec.rb +++ b/spec/lib/gitlab/git_access_wiki_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Gitlab::GitAccessWiki do let(:access) { described_class.new(user, project, 'web', authentication_abilities: authentication_abilities, redirected_path: redirected_path) } - let(:project) { create(:project, :repository) } + let(:project) { create(:project, :wiki_repo) } let(:user) { create(:user) } let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] } let(:redirected_path) { nil } @@ -48,6 +48,18 @@ describe Gitlab::GitAccessWiki do it 'give access to download wiki code' do expect { subject }.not_to raise_error end + + context 'when the wiki repository does not exist' do + it 'returns not found' do + wiki_repo = project.wiki.repository + FileUtils.rm_rf(wiki_repo.path) + + # Sanity check for rm_rf + expect(wiki_repo.exists?).to eq(false) + + expect { subject }.to raise_error(Gitlab::GitAccess::NotFoundError, 'A repository for this project does not exist yet.') + end + end end context 'when wiki feature is disabled' do diff --git a/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb new file mode 100644 index 00000000000..9db710e759e --- /dev/null +++ b/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe Gitlab::GitalyClient::BlobsStitcher do + describe 'enumeration' do + it 'combines segregated blob messages together' do + messages = [ + OpenStruct.new(oid: 'abcdef1', path: 'path/to/file', size: 1642, revision: 'f00ba7', mode: 0100644, data: "first-line\n"), + OpenStruct.new(oid: '', data: 'second-line'), + OpenStruct.new(oid: '', data: '', revision: 'f00ba7', path: 'path/to/non-existent/file'), + OpenStruct.new(oid: 'abcdef2', path: 'path/to/another-file', size: 2461, revision: 'f00ba8', mode: 0100644, data: "GIF87a\x90\x01".b) + ] + + blobs = described_class.new(messages).to_a + + expect(blobs.size).to be(2) + + expect(blobs[0].id).to eq('abcdef1') + expect(blobs[0].mode).to eq('100644') + expect(blobs[0].name).to eq('file') + expect(blobs[0].path).to eq('path/to/file') + expect(blobs[0].size).to eq(1642) + expect(blobs[0].commit_id).to eq('f00ba7') + expect(blobs[0].data).to eq("first-line\nsecond-line") + expect(blobs[0].binary?).to be false + + expect(blobs[1].id).to eq('abcdef2') + expect(blobs[1].mode).to eq('100644') + expect(blobs[1].name).to eq('another-file') + expect(blobs[1].path).to eq('path/to/another-file') + expect(blobs[1].size).to eq(2461) + expect(blobs[1].commit_id).to eq('f00ba8') + expect(blobs[1].data).to eq("GIF87a\x90\x01".b) + expect(blobs[1].binary?).to be true + end + end +end diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb index 3722a91c050..9be3fa633a7 100644 --- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb @@ -113,7 +113,7 @@ describe Gitlab::GitalyClient::CommitService do .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) .and_return([]) - client.tree_entries(repository, revision, path) + client.tree_entries(repository, revision, path, false) end context 'with UTF-8 params strings' do @@ -126,7 +126,7 @@ describe Gitlab::GitalyClient::CommitService do .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) .and_return([]) - client.tree_entries(repository, revision, path) + client.tree_entries(repository, revision, path, false) end end end @@ -166,6 +166,32 @@ describe Gitlab::GitalyClient::CommitService do described_class.new(repository).find_commit(revision) end + + describe 'caching', :request_store do + let(:commit_dbl) { double(id: 'f01b' * 10) } + + context 'when passed revision is a branch name' do + it 'calls Gitaly' do + expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commit).twice.and_return(double(commit: commit_dbl)) + + commit = nil + 2.times { commit = described_class.new(repository).find_commit('master') } + + expect(commit).to eq(commit_dbl) + end + end + + context 'when passed revision is a commit ID' do + it 'returns a cached commit' do + expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commit).once.and_return(double(commit: commit_dbl)) + + commit = nil + 2.times { commit = described_class.new(repository).find_commit('f01b' * 10) } + + expect(commit).to eq(commit_dbl) + end + end + end end describe '#patch' do diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb index cbc7ce1c1b0..c50e73cecfc 100644 --- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb @@ -84,4 +84,30 @@ describe Gitlab::GitalyClient::RepositoryService do expect(client.has_local_branches?).to be(true) end end + + describe '#rebase_in_progress?' do + let(:rebase_id) { 1 } + + it 'sends a repository_rebase_in_progress message' do + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:is_rebase_in_progress) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .and_return(double(in_progress: true)) + + client.rebase_in_progress?(rebase_id) + end + end + + describe '#squash_in_progress?' do + let(:squash_id) { 1 } + + it 'sends a repository_squash_in_progress message' do + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:is_squash_in_progress) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .and_return(double(in_progress: true)) + + client.squash_in_progress?(squash_id) + end + end end diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb index 46a57e08963..5bedfc79dd3 100644 --- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb @@ -11,7 +11,8 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do import_source: 'foo/bar', repository_storage_path: 'foo', disk_path: 'foo', - repository: repository + repository: repository, + create_wiki: true ) end @@ -192,7 +193,7 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do expect(importer.import_wiki_repository).to eq(true) end - it 'marks the import as failed if an error was raised' do + it 'marks the import as failed and creates an empty repo if an error was raised' do expect(importer.gitlab_shell) .to receive(:import_repository) .and_raise(Gitlab::Shell::Error) @@ -201,6 +202,9 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do .to receive(:fail_import) .and_return(false) + expect(project) + .to receive(:create_wiki) + expect(importer.import_wiki_repository).to eq(false) end end diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb index e3bf2801406..8c6d673391b 100644 --- a/spec/lib/gitlab/gpg/commit_spec.rb +++ b/spec/lib/gitlab/gpg/commit_spec.rb @@ -38,7 +38,7 @@ describe Gitlab::Gpg::Commit do end before do - allow(Gitlab::Git::Commit).to receive(:extract_signature) + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) .with(Gitlab::Git::Repository, commit_sha) .and_return( [ @@ -49,7 +49,9 @@ describe Gitlab::Gpg::Commit do end it 'returns a valid signature' do - expect(described_class.new(commit).signature).to have_attributes( + signature = described_class.new(commit).signature + + expect(signature).to have_attributes( commit_sha: commit_sha, project: project, gpg_key: gpg_key, @@ -58,9 +60,31 @@ describe Gitlab::Gpg::Commit do gpg_key_user_email: GpgHelpers::User1.emails.first, verification_status: 'verified' ) + expect(signature.persisted?).to be_truthy end it_behaves_like 'returns the cached signature on second call' + + context 'read-only mode' do + before do + allow(Gitlab::Database).to receive(:read_only?).and_return(true) + end + + it 'does not create a cached signature' do + signature = described_class.new(commit).signature + + expect(signature).to have_attributes( + commit_sha: commit_sha, + project: project, + gpg_key: gpg_key, + gpg_key_primary_keyid: GpgHelpers::User1.primary_keyid, + gpg_key_user_name: GpgHelpers::User1.names.first, + gpg_key_user_email: GpgHelpers::User1.emails.first, + verification_status: 'verified' + ) + expect(signature.persisted?).to be_falsey + end + end end context 'commit signed with a subkey' do @@ -77,7 +101,7 @@ describe Gitlab::Gpg::Commit do end before do - allow(Gitlab::Git::Commit).to receive(:extract_signature) + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) .with(Gitlab::Git::Repository, commit_sha) .and_return( [ @@ -116,7 +140,7 @@ describe Gitlab::Gpg::Commit do end before do - allow(Gitlab::Git::Commit).to receive(:extract_signature) + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) .with(Gitlab::Git::Repository, commit_sha) .and_return( [ @@ -151,7 +175,7 @@ describe Gitlab::Gpg::Commit do end before do - allow(Gitlab::Git::Commit).to receive(:extract_signature) + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) .with(Gitlab::Git::Repository, commit_sha) .and_return( [ @@ -187,7 +211,7 @@ describe Gitlab::Gpg::Commit do end before do - allow(Gitlab::Git::Commit).to receive(:extract_signature) + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) .with(Gitlab::Git::Repository, commit_sha) .and_return( [ @@ -217,7 +241,7 @@ describe Gitlab::Gpg::Commit do let!(:commit) { create :commit, project: project, sha: commit_sha } before do - allow(Gitlab::Git::Commit).to receive(:extract_signature) + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) .with(Gitlab::Git::Repository, commit_sha) .and_return( [ diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb index c034eccf2a6..6fbffc38444 100644 --- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb +++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb @@ -26,7 +26,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do before do allow_any_instance_of(Project).to receive(:commit).and_return(commit) - allow(Gitlab::Git::Commit).to receive(:extract_signature) + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) .with(Gitlab::Git::Repository, commit_sha) .and_return(signature) end diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 0ecb50f7110..41a55027f4d 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -276,6 +276,7 @@ project: - fork_network_member - fork_network - custom_attributes +- lfs_file_locks award_emoji: - awardable - user @@ -290,3 +291,5 @@ push_event_payload: issue_assignees: - issue - assignee +lfs_file_locks: +- user diff --git a/spec/lib/gitlab/import_export/relation_factory_spec.rb b/spec/lib/gitlab/import_export/relation_factory_spec.rb index f1df44cea75..5c61a5a2044 100644 --- a/spec/lib/gitlab/import_export/relation_factory_spec.rb +++ b/spec/lib/gitlab/import_export/relation_factory_spec.rb @@ -29,6 +29,7 @@ describe Gitlab::ImportExport::RelationFactory do 'service_id' => service_id, 'push_events' => true, 'issues_events' => false, + 'confidential_issues_events' => false, 'merge_requests_events' => true, 'tag_push_events' => false, 'note_events' => true, diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 5a33fa3fd53..feaab6673cd 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -530,3 +530,9 @@ ProjectCustomAttribute: - project_id - key - value +LfsFileLock: +- id +- path +- user_id +- project_id +- created_at diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb index a685521cbf0..8a3a244be21 100644 --- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb @@ -16,7 +16,7 @@ describe Gitlab::ImportExport::UploadsRestorer do end describe 'legacy storage' do - let(:project) { create(:project) } + let(:project) { create(:project, :legacy_storage) } subject(:restorer) { described_class.new(project: project, shared: shared) } @@ -34,7 +34,7 @@ describe Gitlab::ImportExport::UploadsRestorer do end describe 'hashed storage' do - let(:project) { create(:project, :hashed) } + let(:project) { create(:project) } subject(:restorer) { described_class.new(project: project, shared: shared) } diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb index 959779523f4..177036c109b 100644 --- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb @@ -15,7 +15,7 @@ describe Gitlab::ImportExport::UploadsSaver do end describe 'legacy storage' do - let(:project) { create(:project) } + let(:project) { create(:project, :legacy_storage) } subject(:saver) { described_class.new(shared: shared, project: project) } @@ -37,7 +37,7 @@ describe Gitlab::ImportExport::UploadsSaver do end describe 'hashed storage' do - let(:project) { create(:project, :hashed) } + let(:project) { create(:project) } subject(:saver) { described_class.new(shared: shared, project: project) } diff --git a/spec/lib/gitlab/import_export/wiki_restorer_spec.rb b/spec/lib/gitlab/import_export/wiki_restorer_spec.rb new file mode 100644 index 00000000000..81b654e9c5f --- /dev/null +++ b/spec/lib/gitlab/import_export/wiki_restorer_spec.rb @@ -0,0 +1,45 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::WikiRestorer do + describe 'restore a wiki Git repo' do + let!(:project_with_wiki) { create(:project, :wiki_repo) } + let!(:project_without_wiki) { create(:project) } + let!(:project) { create(:project) } + let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } + let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) } + let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(project: project_with_wiki, shared: shared) } + let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) } + let(:restorer) do + described_class.new(path_to_bundle: bundle_path, + shared: shared, + project: project.wiki, + wiki_enabled: true) + end + + before do + allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + + bundler.save + end + + after do + FileUtils.rm_rf(export_path) + Gitlab::Shell.new.remove_repository(project_with_wiki.wiki.repository_storage_path, project_with_wiki.wiki.disk_path) + Gitlab::Shell.new.remove_repository(project.wiki.repository_storage_path, project.wiki.disk_path) + end + + it 'restores the wiki repo successfully' do + expect(restorer.restore).to be true + end + + describe "no wiki in the bundle" do + let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(project: project_without_wiki, shared: shared) } + + it 'creates an empty wiki' do + expect(restorer.restore).to be true + + expect(project.wiki_repository_exists?).to be true + end + end + end +end diff --git a/spec/lib/gitlab/kubernetes/config_map_spec.rb b/spec/lib/gitlab/kubernetes/config_map_spec.rb new file mode 100644 index 00000000000..33dfa461202 --- /dev/null +++ b/spec/lib/gitlab/kubernetes/config_map_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe Gitlab::Kubernetes::ConfigMap do + let(:kubeclient) { double('kubernetes client') } + let(:application) { create(:clusters_applications_prometheus) } + let(:config_map) { described_class.new(application.name, application.values) } + let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } + + let(:metadata) do + { + name: "values-content-configuration-#{application.name}", + namespace: namespace, + labels: { name: "values-content-configuration-#{application.name}" } + } + end + + describe '#generate' do + let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: { values: application.values }) } + subject { config_map.generate } + + it 'should build a Kubeclient Resource' do + is_expected.to eq(resource) + end + end +end diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb index 69112fe90b1..740466ea5cb 100644 --- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb @@ -5,14 +5,21 @@ describe Gitlab::Kubernetes::Helm::Api do let(:helm) { described_class.new(client) } let(:gitlab_namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } let(:namespace) { Gitlab::Kubernetes::Namespace.new(gitlab_namespace, client) } - let(:install_helm) { true } - let(:chart) { 'stable/a_chart' } - let(:application_name) { 'app_name' } - let(:command) { Gitlab::Kubernetes::Helm::InstallCommand.new(application_name, install_helm: install_helm, chart: chart) } + let(:application) { create(:clusters_applications_prometheus) } + + let(:command) do + Gitlab::Kubernetes::Helm::InstallCommand.new( + application.name, + chart: application.chart, + values: application.values + ) + end + subject { helm } before do allow(Gitlab::Kubernetes::Namespace).to receive(:new).with(gitlab_namespace, client).and_return(namespace) + allow(client).to receive(:create_config_map) end describe '#initialize' do @@ -26,6 +33,7 @@ describe Gitlab::Kubernetes::Helm::Api do describe '#install' do before do allow(client).to receive(:create_pod).and_return(nil) + allow(client).to receive(:create_config_map).and_return(nil) allow(namespace).to receive(:ensure_exists!).once end @@ -35,6 +43,16 @@ describe Gitlab::Kubernetes::Helm::Api do subject.install(command) end + + context 'with a ConfigMap' do + let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application.name, application.values).generate } + + it 'creates a ConfigMap on kubeclient' do + expect(client).to receive(:create_config_map).with(resource).once + + subject.install(command) + end + end end describe '#installation_status' do diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb new file mode 100644 index 00000000000..3cfdae794f6 --- /dev/null +++ b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb @@ -0,0 +1,44 @@ +require 'spec_helper' + +describe Gitlab::Kubernetes::Helm::BaseCommand do + let(:application) { create(:clusters_applications_helm) } + let(:base_command) { described_class.new(application.name) } + + describe '#generate_script' do + let(:helm_version) { Gitlab::Kubernetes::Helm::HELM_VERSION } + let(:command) do + <<~HEREDOC + set -eo pipefail + apk add -U ca-certificates openssl >/dev/null + wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v#{helm_version}-linux-amd64.tar.gz | tar zxC /tmp >/dev/null + mv /tmp/linux-amd64/helm /usr/bin/ + HEREDOC + end + + subject { base_command.generate_script } + + it 'should return a command that prepares the environment for helm-cli' do + expect(subject).to eq(command) + end + end + + describe '#pod_resource' do + subject { base_command.pod_resource } + + it 'should returns a kubeclient resoure with pod content for application' do + is_expected.to be_an_instance_of ::Kubeclient::Resource + end + end + + describe '#config_map?' do + subject { base_command.config_map? } + + it { is_expected.to be_falsy } + end + + describe '#pod_name' do + subject { base_command.pod_name } + + it { is_expected.to eq('install-helm') } + end +end diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb new file mode 100644 index 00000000000..e6920b0a76f --- /dev/null +++ b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe Gitlab::Kubernetes::Helm::InitCommand do + let(:application) { create(:clusters_applications_helm) } + let(:init_command) { described_class.new(application.name) } + + describe '#generate_script' do + let(:command) do + <<~MSG.chomp + set -eo pipefail + apk add -U ca-certificates openssl >/dev/null + wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.0-linux-amd64.tar.gz | tar zxC /tmp >/dev/null + mv /tmp/linux-amd64/helm /usr/bin/ + helm init >/dev/null + MSG + end + + subject { init_command.generate_script } + + it 'should return the appropriate command' do + is_expected.to eq(command) + end + end +end diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb index 63997a40d52..137b8f718de 100644 --- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb @@ -1,79 +1,56 @@ require 'rails_helper' describe Gitlab::Kubernetes::Helm::InstallCommand do - let(:prometheus) { create(:clusters_applications_prometheus) } - - describe "#initialize" do - context "With all the params" do - subject { described_class.new(prometheus.name, install_helm: true, chart: prometheus.chart, chart_values_file: prometheus.chart_values_file) } - - it 'should assign all parameters' do - expect(subject.name).to eq(prometheus.name) - expect(subject.install_helm).to be_truthy - expect(subject.chart).to eq(prometheus.chart) - expect(subject.chart_values_file).to eq("#{Rails.root}/vendor/prometheus/values.yaml") - end - end - - context 'when install_helm is not set' do - subject { described_class.new(prometheus.name, chart: prometheus.chart, chart_values_file: true) } - - it 'should set install_helm as false' do - expect(subject.install_helm).to be_falsy - end - end - - context 'when chart is not set' do - subject { described_class.new(prometheus.name, install_helm: true) } + let(:application) { create(:clusters_applications_prometheus) } + let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } + + let(:install_command) do + described_class.new( + application.name, + chart: application.chart, + values: application.values + ) + end - it 'should set chart as nil' do - expect(subject.chart).to be_falsy - end + describe '#generate_script' do + let(:command) do + <<~MSG + set -eo pipefail + apk add -U ca-certificates openssl >/dev/null + wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.0-linux-amd64.tar.gz | tar zxC /tmp >/dev/null + mv /tmp/linux-amd64/helm /usr/bin/ + helm init --client-only >/dev/null + helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null + MSG end - context 'when chart_values_file is not set' do - subject { described_class.new(prometheus.name, install_helm: true, chart: prometheus.chart) } + subject { install_command.generate_script } - it 'should set chart_values_file as nil' do - expect(subject.chart_values_file).to be_falsy - end + it 'should return appropriate command' do + is_expected.to eq(command) end - end - - describe "#generate_script" do - let(:install_command) { described_class.new(prometheus.name, install_helm: install_helm) } - let(:client) { double('kubernetes client') } - let(:namespace) { Gitlab::Kubernetes::Namespace.new(Gitlab::Kubernetes::Helm::NAMESPACE, client) } - subject { install_command.send(:generate_script, namespace.name) } - context 'when install helm is true' do - let(:install_helm) { true } - let(:command) do - <<~MSG - set -eo pipefail - apk add -U ca-certificates openssl >/dev/null - wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.0-linux-amd64.tar.gz | tar zxC /tmp >/dev/null - mv /tmp/linux-amd64/helm /usr/bin/ - - helm init >/dev/null - MSG + context 'with an application with a repository' do + let(:ci_runner) { create(:ci_runner) } + let(:application) { create(:clusters_applications_runner, runner: ci_runner) } + let(:install_command) do + described_class.new( + application.name, + chart: application.chart, + values: application.values, + repository: application.repository + ) end - it 'should return appropriate command' do - is_expected.to eq(command) - end - end - - context 'when install helm is false' do - let(:install_helm) { false } let(:command) do <<~MSG set -eo pipefail apk add -U ca-certificates openssl >/dev/null wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.0-linux-amd64.tar.gz | tar zxC /tmp >/dev/null mv /tmp/linux-amd64/helm /usr/bin/ - helm init --client-only >/dev/null + helm repo add #{application.name} #{application.repository} + helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null MSG end @@ -81,50 +58,29 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do is_expected.to eq(command) end end + end - context 'when chart is present' do - let(:install_command) { described_class.new(prometheus.name, chart: prometheus.chart) } - let(:command) do - <<~MSG.chomp - set -eo pipefail - apk add -U ca-certificates openssl >/dev/null - wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.0-linux-amd64.tar.gz | tar zxC /tmp >/dev/null - mv /tmp/linux-amd64/helm /usr/bin/ + describe '#config_map?' do + subject { install_command.config_map? } - helm init --client-only >/dev/null - helm install #{prometheus.chart} --name #{prometheus.name} --namespace #{namespace.name} >/dev/null - MSG - end + it { is_expected.to be_truthy } + end - it 'should return appropriate command' do - is_expected.to eq(command) - end + describe '#config_map_resource' do + let(:metadata) do + { + name: "values-content-configuration-#{application.name}", + namespace: namespace, + labels: { name: "values-content-configuration-#{application.name}" } + } end - context 'when chart values file is present' do - let(:install_command) { described_class.new(prometheus.name, chart: prometheus.chart, chart_values_file: prometheus.chart_values_file) } - let(:command) do - <<~MSG.chomp - set -eo pipefail - apk add -U ca-certificates openssl >/dev/null - wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v2.7.0-linux-amd64.tar.gz | tar zxC /tmp >/dev/null - mv /tmp/linux-amd64/helm /usr/bin/ + let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: { values: application.values }) } - helm init --client-only >/dev/null - helm install #{prometheus.chart} --name #{prometheus.name} --namespace #{namespace.name} -f /data/helm/#{prometheus.name}/config/values.yaml >/dev/null - MSG - end + subject { install_command.config_map_resource } - it 'should return appropriate command' do - is_expected.to eq(command) - end + it 'returns a KubeClient resource with config map content for the application' do + is_expected.to eq(resource) end end - - describe "#pod_name" do - let(:install_command) { described_class.new(prometheus.name, install_helm: true, chart: prometheus.chart, chart_values_file: true) } - subject { install_command.send(:pod_name) } - - it { is_expected.to eq('install-prometheus') } - end end diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb index ebb6033f71e..43adc80d576 100644 --- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb @@ -5,13 +5,9 @@ describe Gitlab::Kubernetes::Helm::Pod do let(:cluster) { create(:cluster) } let(:app) { create(:clusters_applications_prometheus, cluster: cluster) } let(:command) { app.install_command } - let(:client) { double('kubernetes client') } - let(:namespace) { Gitlab::Kubernetes::Namespace.new(Gitlab::Kubernetes::Helm::NAMESPACE, client) } - subject { described_class.new(command, namespace.name, client) } + let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } - before do - allow(client).to receive(:create_config_map).and_return(nil) - end + subject { described_class.new(command, namespace) } shared_examples 'helm pod' do it 'should generate a Kubeclient::Resource' do @@ -47,7 +43,7 @@ describe Gitlab::Kubernetes::Helm::Pod do end end - context 'with a configuration file' do + context 'with a install command' do it_behaves_like 'helm pod' it 'should include volumes for the container' do @@ -62,14 +58,14 @@ describe Gitlab::Kubernetes::Helm::Pod do end it 'should mount configMap specification in the volume' do - spec = subject.generate.spec - expect(spec.volumes.first.configMap['name']).to eq("values-content-configuration-#{app.name}") - expect(spec.volumes.first.configMap['items'].first['key']).to eq('values') - expect(spec.volumes.first.configMap['items'].first['path']).to eq('values.yaml') + volume = subject.generate.spec.volumes.first + expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}") + expect(volume.configMap['items'].first['key']).to eq('values') + expect(volume.configMap['items'].first['path']).to eq('values.yaml') end end - context 'without a configuration file' do + context 'with a init command' do let(:app) { create(:clusters_applications_helm, cluster: cluster) } it_behaves_like 'helm pod' diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb index 60a134be939..b24c9882c0c 100644 --- a/spec/lib/gitlab/middleware/go_spec.rb +++ b/spec/lib/gitlab/middleware/go_spec.rb @@ -3,19 +3,30 @@ require 'spec_helper' describe Gitlab::Middleware::Go do let(:app) { double(:app) } let(:middleware) { described_class.new(app) } + let(:env) do + { + 'rack.input' => '', + 'REQUEST_METHOD' => 'GET' + } + end describe '#call' do describe 'when go-get=0' do + before do + env['QUERY_STRING'] = 'go-get=0' + end + it 'skips go-import generation' do - env = { 'rack.input' => '', - 'QUERY_STRING' => 'go-get=0' } expect(app).to receive(:call).with(env).and_return('no-go') middleware.call(env) end end describe 'when go-get=1' do - let(:current_user) { nil } + before do + env['QUERY_STRING'] = 'go-get=1' + env['PATH_INFO'] = "/#{path}" + end shared_examples 'go-get=1' do |enabled_protocol:| context 'with simple 2-segment project path' do @@ -54,21 +65,75 @@ describe Gitlab::Middleware::Go do project.update_attribute(:visibility_level, Project::PRIVATE) end - context 'with access to the project' do + shared_examples 'unauthorized' do + it 'returns the 2-segment group path' do + expect_response_with_path(go, enabled_protocol, group.full_path) + end + end + + context 'when not authenticated' do + it_behaves_like 'unauthorized' + end + + context 'when authenticated' do let(:current_user) { project.creator } before do project.team.add_master(current_user) end - it 'returns the full project path' do - expect_response_with_path(go, enabled_protocol, project.full_path) + shared_examples 'authenticated' do + context 'with access to the project' do + it 'returns the full project path' do + expect_response_with_path(go, enabled_protocol, project.full_path) + end + end + + context 'without access to the project' do + before do + project.team.find_member(current_user).destroy + end + + it_behaves_like 'unauthorized' + end end - end - context 'without access to the project' do - it 'returns the 2-segment group path' do - expect_response_with_path(go, enabled_protocol, group.full_path) + context 'using warden' do + before do + env['warden'] = double(authenticate: current_user) + end + + context 'when active' do + it_behaves_like 'authenticated' + end + + context 'when blocked' do + before do + current_user.block! + end + + it_behaves_like 'unauthorized' + end + end + + context 'using a personal access token' do + let(:personal_access_token) { create(:personal_access_token, user: current_user) } + + before do + env['HTTP_PRIVATE_TOKEN'] = personal_access_token.token + end + + context 'with api scope' do + it_behaves_like 'authenticated' + end + + context 'with read_user scope' do + before do + personal_access_token.update_attribute(:scopes, [:read_user]) + end + + it_behaves_like 'unauthorized' + end end end end @@ -138,12 +203,6 @@ describe Gitlab::Middleware::Go do end def go - env = { - 'rack.input' => '', - 'QUERY_STRING' => 'go-get=1', - 'PATH_INFO' => "/#{path}", - 'warden' => double(authenticate: current_user) - } middleware.call(env) end diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb index 8d925460f01..a2ba91dae80 100644 --- a/spec/lib/gitlab/middleware/multipart_spec.rb +++ b/spec/lib/gitlab/middleware/multipart_spec.rb @@ -5,15 +5,17 @@ require 'tempfile' describe Gitlab::Middleware::Multipart do let(:app) { double(:app) } let(:middleware) { described_class.new(app) } + let(:original_filename) { 'filename' } it 'opens top-level files' do Tempfile.open('top-level') do |tempfile| - env = post_env({ 'file' => tempfile.path }, { 'file.name' => 'filename' }, Gitlab::Workhorse.secret, 'gitlab-workhorse') + env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename }, Gitlab::Workhorse.secret, 'gitlab-workhorse') expect(app).to receive(:call) do |env| file = Rack::Request.new(env).params['file'] expect(file).to be_a(::UploadedFile) expect(file.path).to eq(tempfile.path) + expect(file.original_filename).to eq(original_filename) end middleware.call(env) @@ -34,13 +36,14 @@ describe Gitlab::Middleware::Multipart do it 'opens files one level deep' do Tempfile.open('one-level') do |tempfile| - in_params = { 'user' => { 'avatar' => { '.name' => 'filename' } } } + in_params = { 'user' => { 'avatar' => { '.name' => original_filename } } } env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse') expect(app).to receive(:call) do |env| file = Rack::Request.new(env).params['user']['avatar'] expect(file).to be_a(::UploadedFile) expect(file.path).to eq(tempfile.path) + expect(file.original_filename).to eq(original_filename) end middleware.call(env) @@ -49,13 +52,14 @@ describe Gitlab::Middleware::Multipart do it 'opens files two levels deep' do Tempfile.open('two-levels') do |tempfile| - in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => 'filename' } } } } + in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename } } } } env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse') expect(app).to receive(:call) do |env| file = Rack::Request.new(env).params['project']['milestone']['themesong'] expect(file).to be_a(::UploadedFile) expect(file.path).to eq(tempfile.path) + expect(file.original_filename).to eq(original_filename) end middleware.call(env) diff --git a/spec/lib/gitlab/plugin_spec.rb b/spec/lib/gitlab/plugin_spec.rb new file mode 100644 index 00000000000..33dd4f79130 --- /dev/null +++ b/spec/lib/gitlab/plugin_spec.rb @@ -0,0 +1,68 @@ +require 'spec_helper' + +describe Gitlab::Plugin do + describe '.execute' do + let(:data) { Gitlab::DataBuilder::Push::SAMPLE_DATA } + let(:plugin) { Rails.root.join('plugins', 'test.rb') } + let(:tmp_file) { Tempfile.new('plugin-dump') } + let(:result) { described_class.execute(plugin.to_s, data) } + let(:success) { result.first } + let(:message) { result.last } + + let(:plugin_source) do + <<~EOS + #!/usr/bin/env ruby + x = STDIN.read + File.write('#{tmp_file.path}', x) + EOS + end + + before do + File.write(plugin, plugin_source) + end + + after do + FileUtils.rm(plugin) + end + + context 'successful execution' do + before do + File.chmod(0o777, plugin) + end + + after do + tmp_file.close! + end + + it { expect(success).to be true } + it { expect(message).to be_empty } + + it 'ensures plugin received data via stdin' do + result + + expect(File.read(tmp_file.path)).to eq(data.to_json) + end + end + + context 'non-executable' do + it { expect(success).to be false } + it { expect(message).to include('Permission denied') } + end + + context 'non-zero exit' do + let(:plugin_source) do + <<~EOS + #!/usr/bin/env ruby + exit 1 + EOS + end + + before do + File.chmod(0o777, plugin) + end + + it { expect(success).to be false } + it { expect(message).to be_empty } + end + end +end diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb index 4a43dbb2371..f02b1cf55fb 100644 --- a/spec/lib/gitlab/profiler_spec.rb +++ b/spec/lib/gitlab/profiler_spec.rb @@ -53,6 +53,15 @@ describe Gitlab::Profiler do described_class.profile('/', user: user) end + context 'when providing a user without a personal access token' do + it 'raises an error' do + user = double(:user) + allow(user).to receive_message_chain(:personal_access_tokens, :active, :pluck).and_return([]) + + expect { described_class.profile('/', user: user) }.to raise_error('Your user must have a personal_access_token') + end + end + it 'uses the private_token for auth if both it and user are set' do user = double(:user) user_token = 'user' diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb index 1ebb0105cf5..d8250e4b4c6 100644 --- a/spec/lib/gitlab/project_search_results_spec.rb +++ b/spec/lib/gitlab/project_search_results_spec.rb @@ -1,3 +1,4 @@ +# coding: utf-8 require 'spec_helper' describe Gitlab::ProjectSearchResults do @@ -105,6 +106,32 @@ describe Gitlab::ProjectSearchResults do end end + context 'when the search returns non-ASCII data' do + context 'with UTF-8' do + let(:results) { project.repository.search_files_by_content("файл", 'master') } + + it 'returns results as UTF-8' do + expect(subject.filename).to eq('encoding/russian.rb') + expect(subject.basename).to eq('encoding/russian') + expect(subject.ref).to eq('master') + expect(subject.startline).to eq(1) + expect(subject.data).to eq("Хороший файл") + end + end + + context 'with ISO-8859-1' do + let(:search_result) { "master:encoding/iso8859.txt\x001\x00\xC4\xFC\nmaster:encoding/iso8859.txt\x002\x00\nmaster:encoding/iso8859.txt\x003\x00foo\n".force_encoding(Encoding::ASCII_8BIT) } + + it 'returns results as UTF-8' do + expect(subject.filename).to eq('encoding/iso8859.txt') + expect(subject.basename).to eq('encoding/iso8859') + expect(subject.ref).to eq('master') + expect(subject.startline).to eq(1) + expect(subject.data).to eq("Äü\n\nfoo") + end + end + end + context "when filename has extension" do let(:search_result) { "master:CONTRIBUTE.md\x005\x00- [Contribute to GitLab](#contribute-to-gitlab)\n" } diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb index c7169717fc1..0697cb2def6 100644 --- a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb +++ b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb @@ -7,7 +7,7 @@ describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do include_examples 'additional metrics query' do let(:deployment) { create(:deployment, environment: environment) } - let(:query_params) { [deployment.id] } + let(:query_params) { [environment.id, deployment.id] } it 'queries using specific time' do expect(client).to receive(:query_range).with(anything, diff --git a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb index ffe3ad85baa..84dc31d9732 100644 --- a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb +++ b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb @@ -31,7 +31,7 @@ describe Gitlab::Prometheus::Queries::DeploymentQuery do expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100', time: stop_time) - expect(subject.query(deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil, - cpu_values: nil, cpu_before: nil, cpu_after: nil) + expect(subject.query(environment.id, deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil, + cpu_values: nil, cpu_before: nil, cpu_after: nil) end end diff --git a/spec/lib/gitlab/prometheus/queries/matched_metrics_query_spec.rb b/spec/lib/gitlab/prometheus/queries/matched_metrics_query_spec.rb index 2b488101496..c95719eff1d 100644 --- a/spec/lib/gitlab/prometheus/queries/matched_metrics_query_spec.rb +++ b/spec/lib/gitlab/prometheus/queries/matched_metrics_query_spec.rb @@ -24,7 +24,7 @@ describe Gitlab::Prometheus::Queries::MatchedMetricsQuery do context 'with one group where two metrics is found' do before do - allow(metric_group_class).to receive(:all).and_return([simple_metric_group]) + allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group]) allow(client).to receive(:label_values).and_return(metric_names) end @@ -70,7 +70,7 @@ describe Gitlab::Prometheus::Queries::MatchedMetricsQuery do context 'with one group where only one metric is found' do before do - allow(metric_group_class).to receive(:all).and_return([simple_metric_group]) + allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group]) allow(client).to receive(:label_values).and_return('metric_a') end @@ -99,7 +99,7 @@ describe Gitlab::Prometheus::Queries::MatchedMetricsQuery do let(:second_metric_group) { simple_metric_group(name: 'nameb', metrics: simple_metrics(added_metric_name: 'metric_c')) } before do - allow(metric_group_class).to receive(:all).and_return([simple_metric_group, second_metric_group]) + allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group, second_metric_group]) allow(client).to receive(:label_values).and_return('metric_c') end diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb index de625324092..4c3b8deefb9 100644 --- a/spec/lib/gitlab/prometheus_client_spec.rb +++ b/spec/lib/gitlab/prometheus_client_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Gitlab::PrometheusClient do include PrometheusHelpers - subject { described_class.new(api_url: 'https://prometheus.example.com') } + subject { described_class.new(RestClient::Resource.new('https://prometheus.example.com')) } describe '#ping' do it 'issues a "query" request to the API endpoint' do @@ -19,31 +19,41 @@ describe Gitlab::PrometheusClient do # - execute_query: A query call shared_examples 'failure response' do context 'when request returns 400 with an error message' do - it 'raises a Gitlab::PrometheusError error' do + it 'raises a Gitlab::PrometheusClient::Error error' do req_stub = stub_prometheus_request(query_url, status: 400, body: { error: 'bar!' }) expect { execute_query } - .to raise_error(Gitlab::PrometheusError, 'bar!') + .to raise_error(Gitlab::PrometheusClient::Error, 'bar!') expect(req_stub).to have_been_requested end end context 'when request returns 400 without an error message' do - it 'raises a Gitlab::PrometheusError error' do + it 'raises a Gitlab::PrometheusClient::Error error' do req_stub = stub_prometheus_request(query_url, status: 400) expect { execute_query } - .to raise_error(Gitlab::PrometheusError, 'Bad data received') + .to raise_error(Gitlab::PrometheusClient::Error, 'Bad data received') expect(req_stub).to have_been_requested end end context 'when request returns 500' do - it 'raises a Gitlab::PrometheusError error' do + it 'raises a Gitlab::PrometheusClient::Error error' do req_stub = stub_prometheus_request(query_url, status: 500, body: { message: 'FAIL!' }) expect { execute_query } - .to raise_error(Gitlab::PrometheusError, '500 - {"message":"FAIL!"}') + .to raise_error(Gitlab::PrometheusClient::Error, '500 - {"message":"FAIL!"}') + expect(req_stub).to have_been_requested + end + end + + context 'when request returns non json data' do + it 'raises a Gitlab::PrometheusClient::Error error' do + req_stub = stub_prometheus_request(query_url, status: 200, body: 'not json') + + expect { execute_query } + .to raise_error(Gitlab::PrometheusClient::Error, 'Parsing response failed') expect(req_stub).to have_been_requested end end @@ -52,28 +62,30 @@ describe Gitlab::PrometheusClient do describe 'failure to reach a provided prometheus url' do let(:prometheus_url) {"https://prometheus.invalid.example.com"} + subject { described_class.new(RestClient::Resource.new(prometheus_url)) } + context 'exceptions are raised' do - it 'raises a Gitlab::PrometheusError error when a SocketError is rescued' do + it 'raises a Gitlab::PrometheusClient::Error error when a SocketError is rescued' do req_stub = stub_prometheus_request_with_exception(prometheus_url, SocketError) - expect { subject.send(:get, prometheus_url) } - .to raise_error(Gitlab::PrometheusError, "Can't connect to #{prometheus_url}") + expect { subject.send(:get, '/', {}) } + .to raise_error(Gitlab::PrometheusClient::Error, "Can't connect to #{prometheus_url}") expect(req_stub).to have_been_requested end - it 'raises a Gitlab::PrometheusError error when a SSLError is rescued' do + it 'raises a Gitlab::PrometheusClient::Error error when a SSLError is rescued' do req_stub = stub_prometheus_request_with_exception(prometheus_url, OpenSSL::SSL::SSLError) - expect { subject.send(:get, prometheus_url) } - .to raise_error(Gitlab::PrometheusError, "#{prometheus_url} contains invalid SSL data") + expect { subject.send(:get, '/', {}) } + .to raise_error(Gitlab::PrometheusClient::Error, "#{prometheus_url} contains invalid SSL data") expect(req_stub).to have_been_requested end - it 'raises a Gitlab::PrometheusError error when a HTTParty::Error is rescued' do - req_stub = stub_prometheus_request_with_exception(prometheus_url, HTTParty::Error) + it 'raises a Gitlab::PrometheusClient::Error error when a RestClient::Exception is rescued' do + req_stub = stub_prometheus_request_with_exception(prometheus_url, RestClient::Exception) - expect { subject.send(:get, prometheus_url) } - .to raise_error(Gitlab::PrometheusError, "Network connection error") + expect { subject.send(:get, '/', {}) } + .to raise_error(Gitlab::PrometheusClient::Error, "Network connection error") expect(req_stub).to have_been_requested end end diff --git a/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb index b49bc5c328c..f8faeffb935 100644 --- a/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb +++ b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb @@ -1,19 +1,34 @@ require 'spec_helper' describe Gitlab::QueryLimiting::ActiveSupportSubscriber do + let(:transaction) { instance_double(Gitlab::QueryLimiting::Transaction, increment: true) } + + before do + allow(Gitlab::QueryLimiting::Transaction) + .to receive(:current) + .and_return(transaction) + end + describe '#sql' do it 'increments the number of executed SQL queries' do - transaction = double(:transaction) - - allow(Gitlab::QueryLimiting::Transaction) - .to receive(:current) - .and_return(transaction) + User.count expect(transaction) - .to receive(:increment) - .at_least(:once) + .to have_received(:increment) + .once + end - User.count + context 'when the query is actually a rails cache hit' do + it 'does not increment the number of executed SQL queries' do + ActiveRecord::Base.connection.cache do + User.count + User.count + end + + expect(transaction) + .to have_received(:increment) + .once + end end end end diff --git a/spec/lib/gitlab/query_limiting/transaction_spec.rb b/spec/lib/gitlab/query_limiting/transaction_spec.rb index b4231fcd0fa..b72b8574174 100644 --- a/spec/lib/gitlab/query_limiting/transaction_spec.rb +++ b/spec/lib/gitlab/query_limiting/transaction_spec.rb @@ -59,18 +59,6 @@ describe Gitlab::QueryLimiting::Transaction do expect { transaction.act_upon_results } .to raise_error(described_class::ThresholdExceededError) end - - it 'reports the error in Sentry if raising an error is disabled' do - expect(transaction) - .to receive(:raise_error?) - .and_return(false) - - expect(Raven) - .to receive(:capture_exception) - .with(an_instance_of(described_class::ThresholdExceededError)) - - transaction.act_upon_results - end end end diff --git a/spec/lib/gitlab/query_limiting_spec.rb b/spec/lib/gitlab/query_limiting_spec.rb index 2eddab0b8c3..42877b1e2dd 100644 --- a/spec/lib/gitlab/query_limiting_spec.rb +++ b/spec/lib/gitlab/query_limiting_spec.rb @@ -12,14 +12,16 @@ describe Gitlab::QueryLimiting do expect(described_class.enable?).to eq(true) end - it 'returns true on GitLab.com' do + it 'returns false on GitLab.com' do + expect(Rails.env).to receive(:development?).and_return(false) + expect(Rails.env).to receive(:test?).and_return(false) allow(Gitlab).to receive(:com?).and_return(true) - expect(described_class.enable?).to eq(true) + expect(described_class.enable?).to eq(false) end - it 'returns true in a non GitLab.com' do - expect(Gitlab).to receive(:com?).and_return(false) + it 'returns false in a non GitLab.com' do + allow(Gitlab).to receive(:com?).and_return(false) expect(Rails.env).to receive(:development?).and_return(false) expect(Rails.env).to receive(:test?).and_return(false) diff --git a/spec/lib/gitlab/quick_actions/command_definition_spec.rb b/spec/lib/gitlab/quick_actions/command_definition_spec.rb index f44a562dc63..b03c1e23ca3 100644 --- a/spec/lib/gitlab/quick_actions/command_definition_spec.rb +++ b/spec/lib/gitlab/quick_actions/command_definition_spec.rb @@ -40,7 +40,7 @@ describe Gitlab::QuickActions::CommandDefinition do end describe "#available?" do - let(:opts) { { go: false } } + let(:opts) { OpenStruct.new(go: false) } context "when the command has a condition block" do before do @@ -78,7 +78,7 @@ describe Gitlab::QuickActions::CommandDefinition do it "doesn't execute the command" do expect(context).not_to receive(:instance_exec) - subject.execute(context, {}, nil) + subject.execute(context, nil) expect(context.run).to be false end @@ -95,7 +95,7 @@ describe Gitlab::QuickActions::CommandDefinition do end it "doesn't execute the command" do - subject.execute(context, {}, nil) + subject.execute(context, nil) expect(context.run).to be false end @@ -109,7 +109,7 @@ describe Gitlab::QuickActions::CommandDefinition do context "when the command is provided an argument" do it "executes the command" do - subject.execute(context, {}, true) + subject.execute(context, true) expect(context.run).to be true end @@ -117,7 +117,7 @@ describe Gitlab::QuickActions::CommandDefinition do context "when the command is not provided an argument" do it "executes the command" do - subject.execute(context, {}, nil) + subject.execute(context, nil) expect(context.run).to be true end @@ -131,7 +131,7 @@ describe Gitlab::QuickActions::CommandDefinition do context "when the command is provided an argument" do it "executes the command" do - subject.execute(context, {}, true) + subject.execute(context, true) expect(context.run).to be true end @@ -139,7 +139,7 @@ describe Gitlab::QuickActions::CommandDefinition do context "when the command is not provided an argument" do it "doesn't execute the command" do - subject.execute(context, {}, nil) + subject.execute(context, nil) expect(context.run).to be false end @@ -153,7 +153,7 @@ describe Gitlab::QuickActions::CommandDefinition do context "when the command is provided an argument" do it "executes the command" do - subject.execute(context, {}, true) + subject.execute(context, true) expect(context.run).to be true end @@ -161,7 +161,7 @@ describe Gitlab::QuickActions::CommandDefinition do context "when the command is not provided an argument" do it "executes the command" do - subject.execute(context, {}, nil) + subject.execute(context, nil) expect(context.run).to be true end @@ -175,7 +175,7 @@ describe Gitlab::QuickActions::CommandDefinition do end it 'executes the command passing the parsed param' do - subject.execute(context, {}, 'something ') + subject.execute(context, 'something ') expect(context.received_arg).to eq('something') end @@ -192,7 +192,7 @@ describe Gitlab::QuickActions::CommandDefinition do end it 'returns nil' do - result = subject.explain({}, {}, nil) + result = subject.explain({}, nil) expect(result).to be_nil end @@ -204,7 +204,7 @@ describe Gitlab::QuickActions::CommandDefinition do end it 'returns this static string' do - result = subject.explain({}, {}, nil) + result = subject.explain({}, nil) expect(result).to eq 'Explanation' end @@ -216,7 +216,7 @@ describe Gitlab::QuickActions::CommandDefinition do end it 'invokes the proc' do - result = subject.explain({}, {}, 'explanation') + result = subject.explain({}, 'explanation') expect(result).to eq 'Dynamic explanation' end diff --git a/spec/lib/gitlab/quick_actions/dsl_spec.rb b/spec/lib/gitlab/quick_actions/dsl_spec.rb index ff59dc48bcb..067a30fd7e2 100644 --- a/spec/lib/gitlab/quick_actions/dsl_spec.rb +++ b/spec/lib/gitlab/quick_actions/dsl_spec.rb @@ -76,7 +76,7 @@ describe Gitlab::QuickActions::Dsl do expect(dynamic_description_def.name).to eq(:dynamic_description) expect(dynamic_description_def.aliases).to eq([]) - expect(dynamic_description_def.to_h(noteable: 'issue')[:description]).to eq('A dynamic description for ISSUE') + expect(dynamic_description_def.to_h(OpenStruct.new(noteable: 'issue'))[:description]).to eq('A dynamic description for ISSUE') expect(dynamic_description_def.explanation).to eq('') expect(dynamic_description_def.params).to eq(['The first argument', 'The second argument']) expect(dynamic_description_def.condition_block).to be_nil diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index 8b54d72d6f7..a1079e54975 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -18,6 +18,7 @@ describe Gitlab::Regex do subject { described_class.environment_name_regex } it { is_expected.to match('foo') } + it { is_expected.to match('a') } it { is_expected.to match('foo-1') } it { is_expected.to match('FOO') } it { is_expected.to match('foo/1') } @@ -25,6 +26,10 @@ describe Gitlab::Regex do it { is_expected.not_to match('9&foo') } it { is_expected.not_to match('foo-^') } it { is_expected.not_to match('!!()()') } + it { is_expected.not_to match('/foo') } + it { is_expected.not_to match('foo/') } + it { is_expected.not_to match('/foo/') } + it { is_expected.not_to match('/') } end describe '.environment_slug_regex' do diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb index 1a925a15e0c..b67bcc77bd4 100644 --- a/spec/lib/gitlab/repo_path_spec.rb +++ b/spec/lib/gitlab/repo_path_spec.rb @@ -6,11 +6,11 @@ describe ::Gitlab::RepoPath do context 'a repository storage path' do it 'parses a full repository path' do - expect(described_class.parse(project.repository.path)).to eq([project, false, nil]) + expect(described_class.parse(project.repository.full_path)).to eq([project, false, nil]) end it 'parses a full wiki path' do - expect(described_class.parse(project.wiki.repository.path)).to eq([project, true, nil]) + expect(described_class.parse(project.wiki.repository.full_path)).to eq([project, true, nil]) end end diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb index 2b61ce38418..4506cbc3982 100644 --- a/spec/lib/gitlab/shell_spec.rb +++ b/spec/lib/gitlab/shell_spec.rb @@ -443,7 +443,7 @@ describe Gitlab::Shell do end describe '#remove_repository' do - let!(:project) { create(:project, :repository) } + let!(:project) { create(:project, :repository, :legacy_storage) } let(:disk_path) { "#{project.disk_path}.git" } it 'returns true when the command succeeds' do diff --git a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb deleted file mode 100644 index 8fdbbacd04d..00000000000 --- a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb +++ /dev/null @@ -1,63 +0,0 @@ -require 'spec_helper' - -describe Gitlab::SidekiqMiddleware::MemoryKiller do - subject { described_class.new } - let(:pid) { 999 } - - let(:worker) { double(:worker, class: 'TestWorker') } - let(:job) { { 'jid' => 123 } } - let(:queue) { 'test_queue' } - - def run - thread = subject.call(worker, job, queue) { nil } - thread&.join - end - - before do - allow(subject).to receive(:get_rss).and_return(10.kilobytes) - allow(subject).to receive(:pid).and_return(pid) - end - - context 'when MAX_RSS is set to 0' do - before do - stub_const("#{described_class}::MAX_RSS", 0) - end - - it 'does nothing' do - expect(subject).not_to receive(:sleep) - - run - end - end - - context 'when MAX_RSS is exceeded' do - before do - stub_const("#{described_class}::MAX_RSS", 5.kilobytes) - end - - it 'sends the STP, TERM and KILL signals at expected times' do - expect(subject).to receive(:sleep).with(15 * 60).ordered - expect(Process).to receive(:kill).with('SIGSTP', pid).ordered - - expect(subject).to receive(:sleep).with(30).ordered - expect(Process).to receive(:kill).with('SIGTERM', pid).ordered - - expect(subject).to receive(:sleep).with(10).ordered - expect(Process).to receive(:kill).with('SIGKILL', pid).ordered - - run - end - end - - context 'when MAX_RSS is not exceeded' do - before do - stub_const("#{described_class}::MAX_RSS", 15.kilobytes) - end - - it 'does nothing' do - expect(subject).not_to receive(:sleep) - - run - end - end -end diff --git a/spec/lib/gitlab/sidekiq_middleware/shutdown_spec.rb b/spec/lib/gitlab/sidekiq_middleware/shutdown_spec.rb new file mode 100644 index 00000000000..0001795c3f0 --- /dev/null +++ b/spec/lib/gitlab/sidekiq_middleware/shutdown_spec.rb @@ -0,0 +1,88 @@ +require 'spec_helper' + +describe Gitlab::SidekiqMiddleware::Shutdown do + subject { described_class.new } + + let(:pid) { Process.pid } + let(:worker) { double(:worker, class: 'TestWorker') } + let(:job) { { 'jid' => 123 } } + let(:queue) { 'test_queue' } + let(:block) { proc { nil } } + + def run + subject.call(worker, job, queue) { block.call } + described_class.shutdown_thread&.join + end + + def pop_trace + subject.trace.pop(true) + end + + before do + allow(subject).to receive(:get_rss).and_return(10.kilobytes) + described_class.clear_shutdown_thread + end + + context 'when MAX_RSS is set to 0' do + before do + stub_const("#{described_class}::MAX_RSS", 0) + end + + it 'does nothing' do + expect(subject).not_to receive(:sleep) + + run + end + end + + def expect_shutdown_sequence + expect(pop_trace).to eq([:sleep, 15 * 60]) + expect(pop_trace).to eq([:kill, 'SIGTSTP', pid]) + + expect(pop_trace).to eq([:sleep, 30]) + expect(pop_trace).to eq([:kill, 'SIGTERM', pid]) + + expect(pop_trace).to eq([:sleep, 10]) + expect(pop_trace).to eq([:kill, 'SIGKILL', pid]) + end + + context 'when MAX_RSS is exceeded' do + before do + stub_const("#{described_class}::MAX_RSS", 5.kilobytes) + end + + it 'sends the TSTP, TERM and KILL signals at expected times' do + run + + expect_shutdown_sequence + end + end + + context 'when MAX_RSS is not exceeded' do + before do + stub_const("#{described_class}::MAX_RSS", 15.kilobytes) + end + + it 'does nothing' do + expect(subject).not_to receive(:sleep) + + run + end + end + + context 'when WantShutdown is raised' do + let(:block) { proc { raise described_class::WantShutdown } } + + it 'starts the shutdown sequence and re-raises the exception' do + expect { run }.to raise_exception(described_class::WantShutdown) + + # We can't expect 'run' to have joined on the shutdown thread, because + # it hit an exception. + shutdown_thread = described_class.shutdown_thread + expect(shutdown_thread).not_to be_nil + shutdown_thread.join + + expect_shutdown_sequence + end + end +end diff --git a/spec/lib/gitlab/slash_commands/command_spec.rb b/spec/lib/gitlab/slash_commands/command_spec.rb index 0173a45d480..e3447d974aa 100644 --- a/spec/lib/gitlab/slash_commands/command_spec.rb +++ b/spec/lib/gitlab/slash_commands/command_spec.rb @@ -3,10 +3,11 @@ require 'spec_helper' describe Gitlab::SlashCommands::Command do let(:project) { create(:project) } let(:user) { create(:user) } + let(:chat_name) { double(:chat_name, user: user) } describe '#execute' do subject do - described_class.new(project, user, params).execute + described_class.new(project, chat_name, params).execute end context 'when no command is available' do @@ -88,7 +89,7 @@ describe Gitlab::SlashCommands::Command do end describe '#match_command' do - subject { described_class.new(project, user, params).match_command.first } + subject { described_class.new(project, chat_name, params).match_command.first } context 'IssueShow is triggered' do let(:params) { { text: 'issue show 123' } } diff --git a/spec/lib/gitlab/slash_commands/deploy_spec.rb b/spec/lib/gitlab/slash_commands/deploy_spec.rb index 74b5ef4bb26..0d57334aa4c 100644 --- a/spec/lib/gitlab/slash_commands/deploy_spec.rb +++ b/spec/lib/gitlab/slash_commands/deploy_spec.rb @@ -4,6 +4,7 @@ describe Gitlab::SlashCommands::Deploy do describe '#execute' do let(:project) { create(:project) } let(:user) { create(:user) } + let(:chat_name) { double(:chat_name, user: user) } let(:regex_match) { described_class.match('deploy staging to production') } before do @@ -16,7 +17,7 @@ describe Gitlab::SlashCommands::Deploy do end subject do - described_class.new(project, user).execute(regex_match) + described_class.new(project, chat_name).execute(regex_match) end context 'if no environment is defined' do diff --git a/spec/lib/gitlab/slash_commands/issue_new_spec.rb b/spec/lib/gitlab/slash_commands/issue_new_spec.rb index 3b077c58c50..8e7df946529 100644 --- a/spec/lib/gitlab/slash_commands/issue_new_spec.rb +++ b/spec/lib/gitlab/slash_commands/issue_new_spec.rb @@ -4,6 +4,7 @@ describe Gitlab::SlashCommands::IssueNew do describe '#execute' do let(:project) { create(:project) } let(:user) { create(:user) } + let(:chat_name) { double(:chat_name, user: user) } let(:regex_match) { described_class.match("issue create bird is the word") } before do @@ -11,7 +12,7 @@ describe Gitlab::SlashCommands::IssueNew do end subject do - described_class.new(project, user).execute(regex_match) + described_class.new(project, chat_name).execute(regex_match) end context 'without description' do diff --git a/spec/lib/gitlab/slash_commands/issue_search_spec.rb b/spec/lib/gitlab/slash_commands/issue_search_spec.rb index 35d01efc1bd..189e9592f1b 100644 --- a/spec/lib/gitlab/slash_commands/issue_search_spec.rb +++ b/spec/lib/gitlab/slash_commands/issue_search_spec.rb @@ -6,10 +6,11 @@ describe Gitlab::SlashCommands::IssueSearch do let!(:confidential) { create(:issue, :confidential, project: project, title: 'mepmep find') } let(:project) { create(:project) } let(:user) { create(:user) } + let(:chat_name) { double(:chat_name, user: user) } let(:regex_match) { described_class.match("issue search find") } subject do - described_class.new(project, user).execute(regex_match) + described_class.new(project, chat_name).execute(regex_match) end context 'when the user has no access' do diff --git a/spec/lib/gitlab/slash_commands/issue_show_spec.rb b/spec/lib/gitlab/slash_commands/issue_show_spec.rb index e5834d5a2ee..b1db1638237 100644 --- a/spec/lib/gitlab/slash_commands/issue_show_spec.rb +++ b/spec/lib/gitlab/slash_commands/issue_show_spec.rb @@ -5,6 +5,7 @@ describe Gitlab::SlashCommands::IssueShow do let(:issue) { create(:issue, project: project) } let(:project) { create(:project) } let(:user) { issue.author } + let(:chat_name) { double(:chat_name, user: user) } let(:regex_match) { described_class.match("issue show #{issue.iid}") } before do @@ -12,7 +13,7 @@ describe Gitlab::SlashCommands::IssueShow do end subject do - described_class.new(project, user).execute(regex_match) + described_class.new(project, chat_name).execute(regex_match) end context 'the issue exists' do diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb index ef51e3cc8df..5b5052de372 100644 --- a/spec/lib/gitlab/sql/pattern_spec.rb +++ b/spec/lib/gitlab/sql/pattern_spec.rb @@ -154,6 +154,12 @@ describe Gitlab::SQL::Pattern do it 'returns a single equality condition' do expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE 'fo'/) end + + it 'uses LOWER instead of ILIKE when LOWER is enabled' do + rel = Issue.fuzzy_arel_match(:title, query, lower_exact_match: true) + + expect(rel.to_sql).to match(/LOWER\(.*title.*\).*=.*'fo'/) + end end context 'with two words both equal to 3 chars' do diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb index c15e29774b6..a6ea07e8b6d 100644 --- a/spec/lib/gitlab/ssh_public_key_spec.rb +++ b/spec/lib/gitlab/ssh_public_key_spec.rb @@ -76,7 +76,21 @@ describe Gitlab::SSHPublicKey, lib: true do subject { public_key } context 'with a valid SSH key' do - it { is_expected.to be_valid } + where(:factory) do + %i(rsa_key_2048 + rsa_key_4096 + rsa_key_5120 + rsa_key_8192 + dsa_key_2048 + ecdsa_key_256 + ed25519_key_256) + end + + with_them do + let(:key) { attributes_for(factory)[:key] } + + it { is_expected.to be_valid } + end end context 'with an invalid SSH key' do @@ -117,6 +131,9 @@ describe Gitlab::SSHPublicKey, lib: true do where(:factory, :bits) do [ [:rsa_key_2048, 2048], + [:rsa_key_4096, 4096], + [:rsa_key_5120, 5120], + [:rsa_key_8192, 8192], [:dsa_key_2048, 2048], [:ecdsa_key_256, 256], [:ed25519_key_256, 256] @@ -141,8 +158,11 @@ describe Gitlab::SSHPublicKey, lib: true do where(:factory, :fingerprint) do [ - [:rsa_key_2048, '2e:ca:dc:e0:37:29:ed:fc:f0:1d:bf:66:d4:cd:51:b1'], - [:dsa_key_2048, 'bc:c1:a4:be:7e:8c:84:56:b3:58:93:53:c6:80:78:8c'], + [:rsa_key_2048, '58:a8:9d:cd:1f:70:f8:5a:d9:e4:24:8e:da:89:e4:fc'], + [:rsa_key_4096, 'df:73:db:29:3c:a5:32:cf:09:17:7e:8e:9d:de:d7:f7'], + [:rsa_key_5120, 'fe:fa:3a:4d:7d:51:ec:bf:c7:64:0c:96:d0:17:8a:d0'], + [:rsa_key_8192, 'fb:53:7f:e9:2f:f7:17:aa:c8:32:52:06:8e:05:e2:82'], + [:dsa_key_2048, 'c8:85:1e:df:44:0f:20:00:3c:66:57:2b:21:10:5a:27'], [:ecdsa_key_256, '67:a3:a9:7d:b8:e1:15:d4:80:40:21:34:bb:ed:97:38'], [:ed25519_key_256, 'e6:eb:45:8a:3c:59:35:5f:e9:5b:80:12:be:7e:22:73'] ] diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index 0e9ecff25a6..138d21ede97 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -36,6 +36,7 @@ describe Gitlab::UsageData do gitlab_shared_runners git database + avg_cycle_analytics )) end diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index dc2bb5b9747..37a0bf1ad36 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -324,7 +324,7 @@ describe Gitlab::Workhorse do it 'includes a Repository param' do repo_param = { storage_name: 'default', - relative_path: project.full_path + '.git', + relative_path: project.disk_path + '.git', gl_repository: "project-#{project.id}" } diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb index f65e41dfea3..db9d9158b29 100644 --- a/spec/lib/google_api/cloud_platform/client_spec.rb +++ b/spec/lib/google_api/cloud_platform/client_spec.rb @@ -115,6 +115,9 @@ describe GoogleApi::CloudPlatform::Client do "initial_node_count": cluster_size, "node_config": { "machine_type": machine_type + }, + "legacy_abac": { + "enabled": true } } } ) diff --git a/spec/mailers/emails/pages_domains_spec.rb b/spec/mailers/emails/pages_domains_spec.rb new file mode 100644 index 00000000000..fe428ea657d --- /dev/null +++ b/spec/mailers/emails/pages_domains_spec.rb @@ -0,0 +1,71 @@ +require 'spec_helper' +require 'email_spec' + +describe Emails::PagesDomains do + include EmailSpec::Matchers + include_context 'gitlab email notification' + + set(:project) { create(:project) } + set(:domain) { create(:pages_domain, project: project) } + set(:user) { project.owner } + + shared_examples 'a pages domain email' do + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'a user cannot unsubscribe through footer link' + + it 'has the expected content' do + aggregate_failures do + is_expected.to have_subject(email_subject) + is_expected.to have_body_text(project.human_name) + is_expected.to have_body_text(domain.domain) + is_expected.to have_body_text domain.url + is_expected.to have_body_text project_pages_domain_url(project, domain) + is_expected.to have_body_text help_page_url('user/project/pages/getting_started_part_three.md', anchor: 'dns-txt-record') + end + end + end + + describe '#pages_domain_enabled_email' do + let(:email_subject) { "#{project.path} | GitLab Pages domain '#{domain.domain}' has been enabled" } + + subject { Notify.pages_domain_enabled_email(domain, user) } + + it_behaves_like 'a pages domain email' + + it { is_expected.to have_body_text 'has been enabled' } + end + + describe '#pages_domain_disabled_email' do + let(:email_subject) { "#{project.path} | GitLab Pages domain '#{domain.domain}' has been disabled" } + + subject { Notify.pages_domain_disabled_email(domain, user) } + + it_behaves_like 'a pages domain email' + + it { is_expected.to have_body_text 'has been disabled' } + end + + describe '#pages_domain_verification_succeeded_email' do + let(:email_subject) { "#{project.path} | Verification succeeded for GitLab Pages domain '#{domain.domain}'" } + + subject { Notify.pages_domain_verification_succeeded_email(domain, user) } + + it_behaves_like 'a pages domain email' + + it { is_expected.to have_body_text 'successfully verified' } + end + + describe '#pages_domain_verification_failed_email' do + let(:email_subject) { "#{project.path} | ACTION REQUIRED: Verification failed for GitLab Pages domain '#{domain.domain}'" } + + subject { Notify.pages_domain_verification_failed_email(domain, user) } + + it_behaves_like 'a pages domain email' + + it 'says verification has failed and when the domain is enabled until' do + is_expected.to have_body_text 'Verification has failed' + is_expected.to have_body_text domain.enabled_until.strftime('%F %T') + end + end +end diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index 59eda025108..bcbb9287199 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -463,59 +463,30 @@ describe Notify do end describe 'project access requested' do - context 'for a project in a user namespace' do - let(:project) do - create(:project, :public, :access_requestable) do |project| - project.add_master(project.owner, current_user: project.owner) - end - end - - let(:project_member) do - project.request_access(user) - project.requesters.find_by(user_id: user.id) - end - subject { described_class.member_access_requested_email('project', project_member.id) } - - it_behaves_like 'an email sent from GitLab' - it_behaves_like 'it should not have Gmail Actions links' - it_behaves_like "a user cannot unsubscribe through footer link" - - it 'contains all the useful information' do - to_emails = subject.header[:to].addrs - expect(to_emails.size).to eq(1) - expect(to_emails[0].address).to eq(project.members.owners_and_masters.first.user.notification_email) - - is_expected.to have_subject "Request to join the #{project.name_with_namespace} project" - is_expected.to have_html_escaped_body_text project.name_with_namespace - is_expected.to have_body_text project_project_members_url(project) - is_expected.to have_body_text project_member.human_access + let(:project) do + create(:project, :public, :access_requestable) do |project| + project.add_master(project.owner) end end - context 'for a project in a group' do - let(:group_owner) { create(:user) } - let(:group) { create(:group).tap { |g| g.add_owner(group_owner) } } - let(:project) { create(:project, :public, :access_requestable, namespace: group) } - let(:project_member) do - project.request_access(user) - project.requesters.find_by(user_id: user.id) - end - subject { described_class.member_access_requested_email('project', project_member.id) } + let(:project_member) do + project.request_access(user) + project.requesters.find_by(user_id: user.id) + end + subject { described_class.member_access_requested_email('project', project_member.id, recipient.notification_email) } - it_behaves_like 'an email sent from GitLab' - it_behaves_like 'it should not have Gmail Actions links' - it_behaves_like "a user cannot unsubscribe through footer link" + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like "a user cannot unsubscribe through footer link" - it 'contains all the useful information' do - to_emails = subject.header[:to].addrs - expect(to_emails.size).to eq(1) - expect(to_emails[0].address).to eq(group.members.owners_and_masters.first.user.notification_email) + it 'contains all the useful information' do + to_emails = subject.header[:to].addrs.map(&:address) + expect(to_emails).to eq([recipient.notification_email]) - is_expected.to have_subject "Request to join the #{project.name_with_namespace} project" - is_expected.to have_html_escaped_body_text project.name_with_namespace - is_expected.to have_body_text project_project_members_url(project) - is_expected.to have_body_text project_member.human_access - end + is_expected.to have_subject "Request to join the #{project.name_with_namespace} project" + is_expected.to have_html_escaped_body_text project.name_with_namespace + is_expected.to have_body_text project_project_members_url(project) + is_expected.to have_body_text project_member.human_access end end @@ -959,13 +930,16 @@ describe Notify do group.request_access(user) group.requesters.find_by(user_id: user.id) end - subject { described_class.member_access_requested_email('group', group_member.id) } + subject { described_class.member_access_requested_email('group', group_member.id, recipient.notification_email) } it_behaves_like 'an email sent from GitLab' it_behaves_like 'it should not have Gmail Actions links' it_behaves_like "a user cannot unsubscribe through footer link" it 'contains all the useful information' do + to_emails = subject.header[:to].addrs.map(&:address) + expect(to_emails).to eq([recipient.notification_email]) + is_expected.to have_subject "Request to join the #{group.name} group" is_expected.to have_html_escaped_body_text group.name is_expected.to have_body_text group_group_members_url(group) diff --git a/spec/migrations/README.md b/spec/migrations/README.md index 45cf25b96de..49760fa62b8 100644 --- a/spec/migrations/README.md +++ b/spec/migrations/README.md @@ -89,5 +89,5 @@ end ## Best practices 1. Note that this type of tests do not run within the transaction, we use -a truncation database cleanup strategy. Do not depend on transaction being +a deletion database cleanup strategy. Do not depend on transaction being present. diff --git a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb index b47f3314926..033d0e7584d 100644 --- a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb +++ b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespaceless_pending_delete_projects.rb') -describe CleanupNamespacelessPendingDeleteProjects do +describe CleanupNamespacelessPendingDeleteProjects, :migration, schema: 20180222043024 do before do # Stub after_save callbacks that will fail when Project has no namespace allow_any_instance_of(Project).to receive(:ensure_storage_path_exists).and_return(nil) diff --git a/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb b/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb index 759e77ac9db..d1bf6bdf9d6 100644 --- a/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb +++ b/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb @@ -21,7 +21,7 @@ describe ConvertCustomNotificationSettingsToColumns, :migration do events[event] = true end - user = build(:user).becomes(user_class).tap(&:save!) + user = user_class.create!(email: "user-#{SecureRandom.hex}@example.org", username: "user-#{SecureRandom.hex}", encrypted_password: '12345678') create_params = { user_id: user.id, level: params[:level], events: events } notification_setting = described_class::NotificationSetting.create(create_params) @@ -37,7 +37,7 @@ describe ConvertCustomNotificationSettingsToColumns, :migration do events[event] = true end - user = build(:user).becomes(user_class).tap(&:save!) + user = user_class.create!(email: "user-#{SecureRandom.hex}@example.org", username: "user-#{SecureRandom.hex}", encrypted_password: '12345678') create_params = events.merge(user_id: user.id, level: params[:level]) notification_setting = described_class::NotificationSetting.create(create_params) diff --git a/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb new file mode 100644 index 00000000000..afcaefa0591 --- /dev/null +++ b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb @@ -0,0 +1,23 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180216121030_enqueue_verify_pages_domain_workers') + +describe EnqueueVerifyPagesDomainWorkers, :sidekiq, :migration do + around do |example| + Sidekiq::Testing.fake! do + example.run + end + end + + describe '#up' do + it 'enqueues a verification worker for every domain' do + domains = 1.upto(3).map { |i| PagesDomain.create!(domain: "my#{i}.domain.com") } + + expect { migrate! }.to change(PagesDomainVerificationWorker.jobs, :size).by(3) + + enqueued_ids = PagesDomainVerificationWorker.jobs.map { |job| job['args'] } + expected_ids = domains.map { |domain| [domain.id] } + + expect(enqueued_ids).to match_array(expected_ids) + end + end +end diff --git a/spec/migrations/migrate_issues_to_ghost_user_spec.rb b/spec/migrations/migrate_issues_to_ghost_user_spec.rb index ff0d44e1ed2..9220b49a736 100644 --- a/spec/migrations/migrate_issues_to_ghost_user_spec.rb +++ b/spec/migrations/migrate_issues_to_ghost_user_spec.rb @@ -8,7 +8,7 @@ describe MigrateIssuesToGhostUser, :migration do let(:users) { table(:users) } before do - project = projects.create!(name: 'gitlab') + project = projects.create!(name: 'gitlab', namespace_id: 1) user = users.create(email: 'test@example.com') issues.create(title: 'Issue 1', author_id: nil, project_id: project.id) issues.create(title: 'Issue 2', author_id: user.id, project_id: project.id) diff --git a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb index e5793a3c0ee..657113812bd 100644 --- a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb +++ b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require Rails.root.join('db', 'migrate', '20161124141322_migrate_process_commit_worker_jobs.rb') describe MigrateProcessCommitWorkerJobs do - let(:project) { create(:project, :repository) } + let(:project) { create(:project, :legacy_storage, :repository) } let(:user) { create(:user) } let(:commit) { project.commit.raw.rugged_commit } diff --git a/spec/migrations/migrate_stages_statuses_spec.rb b/spec/migrations/migrate_stages_statuses_spec.rb index 79d2708f9ad..ce35276cbf5 100644 --- a/spec/migrations/migrate_stages_statuses_spec.rb +++ b/spec/migrations/migrate_stages_statuses_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20170711145558_migrate_stages_statuses.rb') -describe MigrateStagesStatuses, :migration do +describe MigrateStagesStatuses, :sidekiq, :migration do let(:jobs) { table(:ci_builds) } let(:stages) { table(:ci_stages) } let(:pipelines) { table(:ci_pipelines) } diff --git a/spec/migrations/rename_reserved_project_names_spec.rb b/spec/migrations/rename_reserved_project_names_spec.rb index e6555b1fe6b..34336d705b1 100644 --- a/spec/migrations/rename_reserved_project_names_spec.rb +++ b/spec/migrations/rename_reserved_project_names_spec.rb @@ -3,10 +3,14 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20161221153951_rename_reserved_project_names.rb') -# This migration uses multiple threads, and thus different transactions. This -# means data created in this spec may not be visible to some threads. To work -# around this we use the DELETE cleaning strategy. -describe RenameReservedProjectNames, :delete do +# This migration is using factories, which set fields that don't actually +# exist in the DB schema previous to 20161221153951. Thus we just use the +# latest schema when testing this migration. +# This is ok-ish because: +# 1. This migration is a data migration +# 2. It only relies on very stable DB fields: routes.id, routes.path, namespaces.id, projects.namespace_id +# Ideally, the test should not use factories and rely on the `table` helper instead. +describe RenameReservedProjectNames, :migration, schema: :latest do let(:migration) { described_class.new } let!(:project) { create(:project) } diff --git a/spec/migrations/schedule_build_stage_migration_spec.rb b/spec/migrations/schedule_build_stage_migration_spec.rb new file mode 100644 index 00000000000..e2ca35447fb --- /dev/null +++ b/spec/migrations/schedule_build_stage_migration_spec.rb @@ -0,0 +1,35 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180212101928_schedule_build_stage_migration') + +describe ScheduleBuildStageMigration, :sidekiq, :migration do + let(:projects) { table(:projects) } + let(:pipelines) { table(:ci_pipelines) } + let(:stages) { table(:ci_stages) } + let(:jobs) { table(:ci_builds) } + + before do + stub_const("#{described_class}::BATCH_SIZE", 1) + + projects.create!(id: 123, name: 'gitlab', path: 'gitlab-ce') + pipelines.create!(id: 1, project_id: 123, ref: 'master', sha: 'adf43c3a') + stages.create!(id: 1, project_id: 123, pipeline_id: 1, name: 'test') + + jobs.create!(id: 11, commit_id: 1, project_id: 123, stage_id: nil) + jobs.create!(id: 206, commit_id: 1, project_id: 123, stage_id: nil) + jobs.create!(id: 3413, commit_id: 1, project_id: 123, stage_id: nil) + jobs.create!(id: 4109, commit_id: 1, project_id: 123, stage_id: 1) + end + + it 'schedules delayed background migrations in batches in bulk' do + Sidekiq::Testing.fake! do + Timecop.freeze do + migrate! + + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, 11, 11) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, 206, 206) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(15.minutes, 3413, 3413) + expect(BackgroundMigrationWorker.jobs.size).to eq 3 + end + end + end +end diff --git a/spec/migrations/track_untracked_uploads_spec.rb b/spec/migrations/track_untracked_uploads_spec.rb index fe4d5b8a279..2fccfb3f12c 100644 --- a/spec/migrations/track_untracked_uploads_spec.rb +++ b/spec/migrations/track_untracked_uploads_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20171103140253_track_untracked_uploads') describe TrackUntrackedUploads, :migration, :sidekiq do - include TrackUntrackedUploadsHelpers + include MigrationsHelpers::TrackUntrackedUploadsHelpers it 'correctly schedules the follow-up background migration' do Sidekiq::Testing.fake! do diff --git a/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb b/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb index 6f7a730edff..528dc54781d 100644 --- a/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb +++ b/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb @@ -4,7 +4,7 @@ require Rails.root.join('db', 'migrate', '20170503140202_turn_nested_groups_into describe TurnNestedGroupsIntoRegularGroupsForMysql do let!(:parent_group) { create(:group) } let!(:child_group) { create(:group, parent: parent_group) } - let!(:project) { create(:project, :empty_repo, namespace: child_group) } + let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: child_group) } let!(:member) { create(:user) } let(:migration) { described_class.new } diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb index 38fb98d4f50..cd175dba6da 100644 --- a/spec/models/ability_spec.rb +++ b/spec/models/ability_spec.rb @@ -204,6 +204,78 @@ describe Ability do end end + describe '.merge_requests_readable_by_user' do + context 'with an admin' do + it 'returns all merge requests' do + user = build(:user, admin: true) + merge_request = build(:merge_request) + + expect(described_class.merge_requests_readable_by_user([merge_request], user)) + .to eq([merge_request]) + end + end + + context 'without a user' do + it 'returns merge_requests that are publicly visible' do + hidden_merge_request = build(:merge_request) + visible_merge_request = build(:merge_request, source_project: build(:project, :public)) + + merge_requests = described_class + .merge_requests_readable_by_user([hidden_merge_request, visible_merge_request]) + + expect(merge_requests).to eq([visible_merge_request]) + end + end + + context 'with a user' do + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:merge_request) { create(:merge_request, source_project: project) } + let(:cross_project_merge_request) do + create(:merge_request, source_project: create(:project, :public)) + end + let(:other_merge_request) { create(:merge_request) } + let(:all_merge_requests) do + [merge_request, cross_project_merge_request, other_merge_request] + end + + subject(:readable_merge_requests) do + described_class.merge_requests_readable_by_user(all_merge_requests, user) + end + + before do + project.add_developer(user) + end + + it 'returns projects visible to the user' do + expect(readable_merge_requests).to contain_exactly(merge_request, cross_project_merge_request) + end + + context 'when a user cannot read cross project and a filter is passed' do + before do + allow(described_class).to receive(:allowed?).and_call_original + expect(described_class).to receive(:allowed?).with(user, :read_cross_project) { false } + end + + subject(:readable_merge_requests) do + read_cross_project_filter = -> (merge_requests) do + merge_requests.select { |mr| mr.source_project == project } + end + described_class.merge_requests_readable_by_user( + all_merge_requests, user, + filters: { read_cross_project: read_cross_project_filter } + ) + end + + it 'returns only MRs of the specified project without checking access on others' do + expect(described_class).not_to receive(:allowed?).with(user, :read_merge_request, cross_project_merge_request) + + expect(readable_merge_requests).to contain_exactly(merge_request) + end + end + end + end + describe '.issues_readable_by_user' do context 'with an admin user' do it 'returns all given issues' do @@ -250,6 +322,29 @@ describe Ability do expect(issues).to eq([visible_issue]) end end + + context 'when the user cannot read cross project' do + let(:user) { create(:user) } + let(:issue) { create(:issue) } + let(:other_project_issue) { create(:issue) } + let(:project) { issue.project } + + before do + project.add_developer(user) + + allow(described_class).to receive(:allowed?).and_call_original + allow(described_class).to receive(:allowed?).with(user, :read_cross_project, any_args) { false } + end + + it 'excludes issues from other projects whithout checking separatly when passing a scope' do + expect(described_class).not_to receive(:allowed?).with(user, :read_issue, other_project_issue) + + filters = { read_cross_project: -> (issues) { issues.where(project: project) } } + result = described_class.issues_readable_by_user(Issue.all, user, filters: filters) + + expect(result).to contain_exactly(issue) + end + end end describe '.project_disabled_features_rules' do diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb index e89e534d914..504bc710b25 100644 --- a/spec/models/chat_name_spec.rb +++ b/spec/models/chat_name_spec.rb @@ -14,4 +14,24 @@ describe ChatName do it { is_expected.to validate_uniqueness_of(:user_id).scoped_to(:service_id) } it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:service_id, :team_id) } + + describe '#update_last_used_at', :clean_gitlab_redis_shared_state do + it 'updates the last_used_at timestamp' do + expect(subject.last_used_at).to be_nil + + subject.update_last_used_at + + expect(subject.last_used_at).to be_present + end + + it 'does not update last_used_at if it was recently updated' do + subject.update_last_used_at + + time = subject.last_used_at + + subject.update_last_used_at + + expect(subject.last_used_at).to eq(time) + end + end end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 0b3d5c6a0bd..c27313ed88b 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -277,7 +277,7 @@ describe Ci::Build do allow_any_instance_of(Project).to receive(:jobs_cache_index).and_return(1) end - it { is_expected.to be_an(Array).and all(include(key: "key_1")) } + it { is_expected.to be_an(Array).and all(include(key: "key-1")) } end context 'when project does not have jobs_cache_index' do @@ -1413,6 +1413,7 @@ describe Ci::Build do [ { key: 'CI', value: 'true', public: true }, { key: 'GITLAB_CI', value: 'true', public: true }, + { key: 'GITLAB_FEATURES', value: project.namespace.features.join(','), public: true }, { key: 'CI_SERVER_NAME', value: 'GitLab', public: true }, { key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true }, { key: 'CI_SERVER_REVISION', value: Gitlab::REVISION, public: true }, @@ -1589,7 +1590,7 @@ describe Ci::Build do context 'when the branch is protected' do before do - create(:protected_branch, project: build.project, name: build.ref) + allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true) end it { is_expected.to include(protected_variable) } @@ -1597,7 +1598,7 @@ describe Ci::Build do context 'when the tag is protected' do before do - create(:protected_tag, project: build.project, name: build.ref) + allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true) end it { is_expected.to include(protected_variable) } @@ -1634,7 +1635,7 @@ describe Ci::Build do context 'when the branch is protected' do before do - create(:protected_branch, project: build.project, name: build.ref) + allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true) end it { is_expected.to include(protected_variable) } @@ -1642,7 +1643,7 @@ describe Ci::Build do context 'when the tag is protected' do before do - create(:protected_tag, project: build.project, name: build.ref) + allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true) end it { is_expected.to include(protected_variable) } diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb index 145189e7469..1b10501701c 100644 --- a/spec/models/ci/group_variable_spec.rb +++ b/spec/models/ci/group_variable_spec.rb @@ -5,7 +5,7 @@ describe Ci::GroupVariable do it { is_expected.to include_module(HasVariable) } it { is_expected.to include_module(Presentable) } - it { is_expected.to validate_uniqueness_of(:key).scoped_to(:group_id) } + it { is_expected.to validate_uniqueness_of(:key).scoped_to(:group_id).with_message(/\(\w+\) has already been taken/) } describe '.unprotected' do subject { described_class.unprotected } diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb index e4ff551151e..875e8b2b682 100644 --- a/spec/models/ci/variable_spec.rb +++ b/spec/models/ci/variable_spec.rb @@ -6,7 +6,7 @@ describe Ci::Variable do describe 'validations' do it { is_expected.to include_module(HasVariable) } it { is_expected.to include_module(Presentable) } - it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope) } + it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope).with_message(/\(\w+\) has already been taken/) } end describe '.unprotected' do diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb index eb57abaf6ef..ba7bad617b4 100644 --- a/spec/models/clusters/applications/helm_spec.rb +++ b/spec/models/clusters/applications/helm_spec.rb @@ -1,102 +1,17 @@ require 'rails_helper' describe Clusters::Applications::Helm do - it { is_expected.to belong_to(:cluster) } - it { is_expected.to validate_presence_of(:cluster) } - - describe '#name' do - it 'is .application_name' do - expect(subject.name).to eq(described_class.application_name) - end - - it 'is recorded in Clusters::Cluster::APPLICATIONS' do - expect(Clusters::Cluster::APPLICATIONS[subject.name]).to eq(described_class) - end - end - - describe '#version' do - it 'defaults to Gitlab::Kubernetes::Helm::HELM_VERSION' do - expect(subject.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION) - end - end - - describe '#status' do - let(:cluster) { create(:cluster) } - - subject { described_class.new(cluster: cluster) } - - it 'defaults to :not_installable' do - expect(subject.status_name).to be(:not_installable) - end - - context 'when platform kubernetes is defined' do - let(:cluster) { create(:cluster, :provided_by_gcp) } - - it 'defaults to :installable' do - expect(subject.status_name).to be(:installable) - end - end - end + include_examples 'cluster application core specs', :clusters_applications_helm describe '#install_command' do - it 'has all the needed information' do - expect(subject.install_command).to have_attributes(name: subject.name, install_helm: true) - end - end - - describe 'status state machine' do - describe '#make_installing' do - subject { create(:clusters_applications_helm, :scheduled) } - - it 'is installing' do - subject.make_installing! - - expect(subject).to be_installing - end - end - - describe '#make_installed' do - subject { create(:clusters_applications_helm, :installing) } - - it 'is installed' do - subject.make_installed - - expect(subject).to be_installed - end - end - - describe '#make_errored' do - subject { create(:clusters_applications_helm, :installing) } - let(:reason) { 'some errors' } - - it 'is errored' do - subject.make_errored(reason) - - expect(subject).to be_errored - expect(subject.status_reason).to eq(reason) - end - end - - describe '#make_scheduled' do - subject { create(:clusters_applications_helm, :installable) } - - it 'is scheduled' do - subject.make_scheduled - - expect(subject).to be_scheduled - end - - describe 'when was errored' do - subject { create(:clusters_applications_helm, :errored) } + let(:helm) { create(:clusters_applications_helm) } - it 'clears #status_reason' do - expect(subject.status_reason).not_to be_nil + subject { helm.install_command } - subject.make_scheduled! + it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InitCommand) } - expect(subject.status_reason).to be_nil - end - end + it 'should be initialized with 1 arguments' do + expect(subject.name).to eq('helm') end end end diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb index 619c088b0bf..03f5b88a525 100644 --- a/spec/models/clusters/applications/ingress_spec.rb +++ b/spec/models/clusters/applications/ingress_spec.rb @@ -1,8 +1,78 @@ require 'rails_helper' describe Clusters::Applications::Ingress do - it { is_expected.to belong_to(:cluster) } - it { is_expected.to validate_presence_of(:cluster) } + let(:ingress) { create(:clusters_applications_ingress) } - include_examples 'cluster application specs', described_class + include_examples 'cluster application core specs', :clusters_applications_ingress + include_examples 'cluster application status specs', :cluster_application_ingress + + before do + allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in) + allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async) + end + + describe '#make_installed!' do + before do + application.make_installed! + end + + let(:application) { create(:clusters_applications_ingress, :installing) } + + it 'schedules a ClusterWaitForIngressIpAddressWorker' do + expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_in) + .with(Clusters::Applications::Ingress::FETCH_IP_ADDRESS_DELAY, 'ingress', application.id) + end + end + + describe '#schedule_status_update' do + let(:application) { create(:clusters_applications_ingress, :installed) } + + before do + application.schedule_status_update + end + + it 'schedules a ClusterWaitForIngressIpAddressWorker' do + expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_async) + .with('ingress', application.id) + end + + context 'when the application is not installed' do + let(:application) { create(:clusters_applications_ingress, :installing) } + + it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do + expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_async) + end + end + + context 'when there is already an external_ip' do + let(:application) { create(:clusters_applications_ingress, :installed, external_ip: '111.222.222.111') } + + it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do + expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in) + end + end + end + + describe '#install_command' do + subject { ingress.install_command } + + it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) } + + it 'should be initialized with ingress arguments' do + expect(subject.name).to eq('ingress') + expect(subject.chart).to eq('stable/nginx-ingress') + expect(subject.values).to eq(ingress.values) + end + end + + describe '#values' do + subject { ingress.values } + + it 'should include ingress valid keys' do + is_expected.to include('image') + is_expected.to include('repository') + is_expected.to include('stats') + is_expected.to include('podAnnotations') + end + end end diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb index 696099f7cf7..df8a508e021 100644 --- a/spec/models/clusters/applications/prometheus_spec.rb +++ b/spec/models/clusters/applications/prometheus_spec.rb @@ -1,16 +1,107 @@ require 'rails_helper' describe Clusters::Applications::Prometheus do - it { is_expected.to belong_to(:cluster) } - it { is_expected.to validate_presence_of(:cluster) } + include_examples 'cluster application core specs', :clusters_applications_prometheus + include_examples 'cluster application status specs', :cluster_application_prometheus - include_examples 'cluster application specs', described_class + describe 'transition to installed' do + let(:project) { create(:project) } + let(:cluster) { create(:cluster, projects: [project]) } + let(:prometheus_service) { double('prometheus_service') } - describe "#chart_values_file" do - subject { create(:clusters_applications_prometheus).chart_values_file } + subject { create(:clusters_applications_prometheus, :installing, cluster: cluster) } - it 'should return chart values file path' do - expect(subject).to eq("#{Rails.root}/vendor/prometheus/values.yaml") + before do + allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service + end + + it 'ensures Prometheus service is activated' do + expect(prometheus_service).to receive(:update).with(active: true) + + subject.make_installed + end + end + + describe '#proxy_client' do + context 'cluster is nil' do + it 'returns nil' do + expect(subject.cluster).to be_nil + expect(subject.proxy_client).to be_nil + end + end + + context "cluster doesn't have kubeclient" do + let(:cluster) { create(:cluster) } + subject { create(:clusters_applications_prometheus, cluster: cluster) } + + it 'returns nil' do + expect(subject.proxy_client).to be_nil + end + end + + context 'cluster has kubeclient' do + let(:kubernetes_url) { 'http://example.com' } + let(:k8s_discover_response) do + { + resources: [ + { + name: 'service', + kind: 'Service' + } + ] + } + end + + let(:kube_client) { Kubeclient::Client.new(kubernetes_url) } + + let(:cluster) { create(:cluster) } + subject { create(:clusters_applications_prometheus, cluster: cluster) } + + before do + allow(kube_client.rest_client).to receive(:get).and_return(k8s_discover_response.to_json) + allow(subject.cluster).to receive(:kubeclient).and_return(kube_client) + end + + it 'creates proxy prometheus rest client' do + expect(subject.proxy_client).to be_instance_of(RestClient::Resource) + end + + it 'creates proper url' do + expect(subject.proxy_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80') + end + + it 'copies options and headers from kube client to proxy client' do + expect(subject.proxy_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers)) + end + end + end + + describe '#install_command' do + let(:kubeclient) { double('kubernetes client') } + let(:prometheus) { create(:clusters_applications_prometheus) } + + subject { prometheus.install_command } + + it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) } + + it 'should be initialized with 3 arguments' do + expect(subject.name).to eq('prometheus') + expect(subject.chart).to eq('stable/prometheus') + expect(subject.values).to eq(prometheus.values) + end + end + + describe '#values' do + let(:prometheus) { create(:clusters_applications_prometheus) } + + subject { prometheus.values } + + it 'should include prometheus valid values' do + is_expected.to include('alertmanager') + is_expected.to include('kubeStateMetrics') + is_expected.to include('nodeExporter') + is_expected.to include('pushgateway') + is_expected.to include('serverFiles') end end end diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb new file mode 100644 index 00000000000..612a3c8e413 --- /dev/null +++ b/spec/models/clusters/applications/runner_spec.rb @@ -0,0 +1,69 @@ +require 'rails_helper' + +describe Clusters::Applications::Runner do + let(:ci_runner) { create(:ci_runner) } + + include_examples 'cluster application core specs', :clusters_applications_runner + include_examples 'cluster application status specs', :cluster_application_runner + + it { is_expected.to belong_to(:runner) } + + describe '#install_command' do + let(:kubeclient) { double('kubernetes client') } + let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) } + + subject { gitlab_runner.install_command } + + it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) } + + it 'should be initialized with 4 arguments' do + expect(subject.name).to eq('runner') + expect(subject.chart).to eq('runner/gitlab-runner') + expect(subject.repository).to eq('https://charts.gitlab.io') + expect(subject.values).to eq(gitlab_runner.values) + end + end + + describe '#values' do + let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) } + + subject { gitlab_runner.values } + + it 'should include runner valid values' do + is_expected.to include('concurrent') + is_expected.to include('checkInterval') + is_expected.to include('rbac') + is_expected.to include('runners') + is_expected.to include('resources') + is_expected.to include("runnerToken: #{ci_runner.token}") + is_expected.to include("gitlabUrl: #{Gitlab::Routing.url_helpers.root_url}") + end + + context 'without a runner' do + let(:project) { create(:project) } + let(:cluster) { create(:cluster) } + let(:gitlab_runner) { create(:clusters_applications_runner, cluster: cluster) } + + before do + cluster.projects << project + end + + it 'creates a runner' do + expect do + subject + end.to change { Ci::Runner.count }.by(1) + end + + it 'uses the new runner token' do + expect(subject).to include("runnerToken: #{gitlab_runner.reload.runner.token}") + end + + it 'assigns the new runner to runner' do + subject + gitlab_runner.reload + + expect(gitlab_runner.runner).not_to be_nil + end + end + end +end diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb index 799d7ced116..8f12a0e3085 100644 --- a/spec/models/clusters/cluster_spec.rb +++ b/spec/models/clusters/cluster_spec.rb @@ -8,6 +8,7 @@ describe Clusters::Cluster do it { is_expected.to have_one(:application_helm) } it { is_expected.to have_one(:application_ingress) } it { is_expected.to have_one(:application_prometheus) } + it { is_expected.to have_one(:application_runner) } it { is_expected.to delegate_method(:status).to(:provider) } it { is_expected.to delegate_method(:status_reason).to(:provider) } it { is_expected.to delegate_method(:status_name).to(:provider) } @@ -196,9 +197,10 @@ describe Clusters::Cluster do let!(:helm) { create(:clusters_applications_helm, cluster: cluster) } let!(:ingress) { create(:clusters_applications_ingress, cluster: cluster) } let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) } + let!(:runner) { create(:clusters_applications_runner, cluster: cluster) } it 'returns a list of created applications' do - is_expected.to contain_exactly(helm, ingress, prometheus) + is_expected.to contain_exactly(helm, ingress, prometheus, runner) end end end diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb index c536dab2681..b7ed8be69fc 100644 --- a/spec/models/commit_status_spec.rb +++ b/spec/models/commit_status_spec.rb @@ -368,7 +368,9 @@ describe CommitStatus do 'rspec:windows 0 : / 1' => 'rspec:windows', 'rspec:windows 0 : / 1 name' => 'rspec:windows name', '0 1 name ruby' => 'name ruby', - '0 :/ 1 name ruby' => 'name ruby' + '0 :/ 1 name ruby' => 'name ruby', + 'golang test 1.8' => 'golang test', + '1.9 golang test' => 'golang test' } tests.each do |name, group_name| diff --git a/spec/models/concerns/protected_ref_access_spec.rb b/spec/models/concerns/protected_ref_access_spec.rb new file mode 100644 index 00000000000..a62ca391e25 --- /dev/null +++ b/spec/models/concerns/protected_ref_access_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe ProtectedRefAccess do + subject(:protected_ref_access) do + create(:protected_branch, :masters_can_push).push_access_levels.first + end + + let(:project) { protected_ref_access.project } + + describe '#check_access' do + it 'is always true for admins' do + admin = create(:admin) + + expect(protected_ref_access.check_access(admin)).to be_truthy + end + + it 'is true for masters' do + master = create(:user) + project.add_master(master) + + expect(protected_ref_access.check_access(master)).to be_truthy + end + + it 'is for developers of the project' do + developer = create(:user) + project.add_developer(developer) + + expect(protected_ref_access.check_access(developer)).to be_falsy + end + end +end diff --git a/spec/models/cycle_analytics/code_spec.rb b/spec/models/cycle_analytics/code_spec.rb index f2f1928926c..6a6b58fb52b 100644 --- a/spec/models/cycle_analytics/code_spec.rb +++ b/spec/models/cycle_analytics/code_spec.rb @@ -18,11 +18,11 @@ describe 'CycleAnalytics#code' do end]], end_time_conditions: [["merge request that closes issue is created", -> (context, data) do - context.create_merge_request_closing_issue(data[:issue]) + context.create_merge_request_closing_issue(context.user, context.project, data[:issue]) end]], post_fn: -> (context, data) do - context.merge_merge_requests_closing_issue(data[:issue]) - context.deploy_master + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) + context.deploy_master(context.user, context.project) end) context "when a regular merge request (that doesn't close the issue) is created" do @@ -30,10 +30,10 @@ describe 'CycleAnalytics#code' do issue = create(:issue, project: project) create_commit_referencing_issue(issue) - create_merge_request_closing_issue(issue, message: "Closes nothing") + create_merge_request_closing_issue(user, project, issue, message: "Closes nothing") - merge_merge_requests_closing_issue(issue) - deploy_master + merge_merge_requests_closing_issue(user, project, issue) + deploy_master(user, project) expect(subject[:code].median).to be_nil end @@ -50,10 +50,10 @@ describe 'CycleAnalytics#code' do end]], end_time_conditions: [["merge request that closes issue is created", -> (context, data) do - context.create_merge_request_closing_issue(data[:issue]) + context.create_merge_request_closing_issue(context.user, context.project, data[:issue]) end]], post_fn: -> (context, data) do - context.merge_merge_requests_closing_issue(data[:issue]) + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) end) context "when a regular merge request (that doesn't close the issue) is created" do @@ -61,9 +61,9 @@ describe 'CycleAnalytics#code' do issue = create(:issue, project: project) create_commit_referencing_issue(issue) - create_merge_request_closing_issue(issue, message: "Closes nothing") + create_merge_request_closing_issue(user, project, issue, message: "Closes nothing") - merge_merge_requests_closing_issue(issue) + merge_merge_requests_closing_issue(user, project, issue) expect(subject[:code].median).to be_nil end diff --git a/spec/models/cycle_analytics/issue_spec.rb b/spec/models/cycle_analytics/issue_spec.rb index 985e1bf80be..45f1b4fe8a3 100644 --- a/spec/models/cycle_analytics/issue_spec.rb +++ b/spec/models/cycle_analytics/issue_spec.rb @@ -26,8 +26,8 @@ describe 'CycleAnalytics#issue' do end]], post_fn: -> (context, data) do if data[:issue].persisted? - context.create_merge_request_closing_issue(data[:issue].reload) - context.merge_merge_requests_closing_issue(data[:issue]) + context.create_merge_request_closing_issue(context.user, context.project, data[:issue].reload) + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) end end) @@ -37,8 +37,8 @@ describe 'CycleAnalytics#issue' do issue = create(:issue, project: project) issue.update(label_ids: [regular_label.id]) - create_merge_request_closing_issue(issue) - merge_merge_requests_closing_issue(issue) + create_merge_request_closing_issue(user, project, issue) + merge_merge_requests_closing_issue(user, project, issue) expect(subject[:issue].median).to be_nil end diff --git a/spec/models/cycle_analytics/plan_spec.rb b/spec/models/cycle_analytics/plan_spec.rb index 6fbb2a2d102..d366e2b723a 100644 --- a/spec/models/cycle_analytics/plan_spec.rb +++ b/spec/models/cycle_analytics/plan_spec.rb @@ -29,8 +29,8 @@ describe 'CycleAnalytics#plan' do context.create_commit_referencing_issue(data[:issue], branch_name: data[:branch_name]) end]], post_fn: -> (context, data) do - context.create_merge_request_closing_issue(data[:issue], source_branch: data[:branch_name]) - context.merge_merge_requests_closing_issue(data[:issue]) + context.create_merge_request_closing_issue(context.user, context.project, data[:issue], source_branch: data[:branch_name]) + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) end) context "when a regular label (instead of a list label) is added to the issue" do @@ -41,8 +41,8 @@ describe 'CycleAnalytics#plan' do issue.update(label_ids: [label.id]) create_commit_referencing_issue(issue, branch_name: branch_name) - create_merge_request_closing_issue(issue, source_branch: branch_name) - merge_merge_requests_closing_issue(issue) + create_merge_request_closing_issue(user, project, issue, source_branch: branch_name) + merge_merge_requests_closing_issue(user, project, issue) expect(subject[:issue].median).to be_nil end diff --git a/spec/models/cycle_analytics/production_spec.rb b/spec/models/cycle_analytics/production_spec.rb index f8681c0a2f9..156eb96cfce 100644 --- a/spec/models/cycle_analytics/production_spec.rb +++ b/spec/models/cycle_analytics/production_spec.rb @@ -13,11 +13,11 @@ describe 'CycleAnalytics#production' do data_fn: -> (context) { { issue: context.build(:issue, project: context.project) } }, start_time_conditions: [["issue is created", -> (context, data) { data[:issue].save }]], before_end_fn: lambda do |context, data| - context.create_merge_request_closing_issue(data[:issue]) - context.merge_merge_requests_closing_issue(data[:issue]) + context.create_merge_request_closing_issue(context.user, context.project, data[:issue]) + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) end, end_time_conditions: - [["merge request that closes issue is deployed to production", -> (context, data) { context.deploy_master }], + [["merge request that closes issue is deployed to production", -> (context, data) { context.deploy_master(context.user, context.project) }], ["production deploy happens after merge request is merged (along with other changes)", lambda do |context, data| # Make other changes on master @@ -29,14 +29,14 @@ describe 'CycleAnalytics#production' do branch_name: 'master') context.project.repository.commit(sha) - context.deploy_master + context.deploy_master(context.user, context.project) end]]) context "when a regular merge request (that doesn't close the issue) is merged and deployed" do it "returns nil" do merge_request = create(:merge_request) MergeRequests::MergeService.new(project, user).execute(merge_request) - deploy_master + deploy_master(user, project) expect(subject[:production].median).to be_nil end @@ -45,9 +45,9 @@ describe 'CycleAnalytics#production' do context "when the deployment happens to a non-production environment" do it "returns nil" do issue = create(:issue, project: project) - merge_request = create_merge_request_closing_issue(issue) + merge_request = create_merge_request_closing_issue(user, project, issue) MergeRequests::MergeService.new(project, user).execute(merge_request) - deploy_master(environment: 'staging') + deploy_master(user, project, environment: 'staging') expect(subject[:production].median).to be_nil end diff --git a/spec/models/cycle_analytics/review_spec.rb b/spec/models/cycle_analytics/review_spec.rb index 0ac58695b35..0aedfb49cb5 100644 --- a/spec/models/cycle_analytics/review_spec.rb +++ b/spec/models/cycle_analytics/review_spec.rb @@ -13,11 +13,11 @@ describe 'CycleAnalytics#review' do data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } }, start_time_conditions: [["merge request that closes issue is created", -> (context, data) do - context.create_merge_request_closing_issue(data[:issue]) + context.create_merge_request_closing_issue(context.user, context.project, data[:issue]) end]], end_time_conditions: [["merge request that closes issue is merged", -> (context, data) do - context.merge_merge_requests_closing_issue(data[:issue]) + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) end]], post_fn: nil) diff --git a/spec/models/cycle_analytics/staging_spec.rb b/spec/models/cycle_analytics/staging_spec.rb index b66d5623910..0cbda50c688 100644 --- a/spec/models/cycle_analytics/staging_spec.rb +++ b/spec/models/cycle_analytics/staging_spec.rb @@ -13,15 +13,15 @@ describe 'CycleAnalytics#staging' do phase: :staging, data_fn: lambda do |context| issue = context.create(:issue, project: context.project) - { issue: issue, merge_request: context.create_merge_request_closing_issue(issue) } + { issue: issue, merge_request: context.create_merge_request_closing_issue(context.user, context.project, issue) } end, start_time_conditions: [["merge request that closes issue is merged", -> (context, data) do - context.merge_merge_requests_closing_issue(data[:issue]) + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) end]], end_time_conditions: [["merge request that closes issue is deployed to production", -> (context, data) do - context.deploy_master + context.deploy_master(context.user, context.project) end], ["production deploy happens after merge request is merged (along with other changes)", lambda do |context, data| @@ -34,14 +34,14 @@ describe 'CycleAnalytics#staging' do branch_name: 'master') context.project.repository.commit(sha) - context.deploy_master + context.deploy_master(context.user, context.project) end]]) context "when a regular merge request (that doesn't close the issue) is merged and deployed" do it "returns nil" do merge_request = create(:merge_request) MergeRequests::MergeService.new(project, user).execute(merge_request) - deploy_master + deploy_master(user, project) expect(subject[:staging].median).to be_nil end @@ -50,9 +50,9 @@ describe 'CycleAnalytics#staging' do context "when the deployment happens to a non-production environment" do it "returns nil" do issue = create(:issue, project: project) - merge_request = create_merge_request_closing_issue(issue) + merge_request = create_merge_request_closing_issue(user, project, issue) MergeRequests::MergeService.new(project, user).execute(merge_request) - deploy_master(environment: 'staging') + deploy_master(user, project, environment: 'staging') expect(subject[:staging].median).to be_nil end diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb index 690c09bc2dc..e58b8fdff58 100644 --- a/spec/models/cycle_analytics/test_spec.rb +++ b/spec/models/cycle_analytics/test_spec.rb @@ -12,26 +12,26 @@ describe 'CycleAnalytics#test' do phase: :test, data_fn: lambda do |context| issue = context.create(:issue, project: context.project) - merge_request = context.create_merge_request_closing_issue(issue) + merge_request = context.create_merge_request_closing_issue(context.user, context.project, issue) pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project, head_pipeline_of: merge_request) { pipeline: pipeline, issue: issue } end, start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]], end_time_conditions: [["pipeline is finished", -> (context, data) { data[:pipeline].succeed! }]], post_fn: -> (context, data) do - context.merge_merge_requests_closing_issue(data[:issue]) + context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue]) end) context "when the pipeline is for a regular merge request (that doesn't close an issue)" do it "returns nil" do issue = create(:issue, project: project) - merge_request = create_merge_request_closing_issue(issue) + merge_request = create_merge_request_closing_issue(user, project, issue) pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha) pipeline.run! pipeline.succeed! - merge_merge_requests_closing_issue(issue) + merge_merge_requests_closing_issue(user, project, issue) expect(subject[:test].median).to be_nil end @@ -51,13 +51,13 @@ describe 'CycleAnalytics#test' do context "when the pipeline is dropped (failed)" do it "returns nil" do issue = create(:issue, project: project) - merge_request = create_merge_request_closing_issue(issue) + merge_request = create_merge_request_closing_issue(user, project, issue) pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha) pipeline.run! pipeline.drop! - merge_merge_requests_closing_issue(issue) + merge_merge_requests_closing_issue(user, project, issue) expect(subject[:test].median).to be_nil end @@ -66,13 +66,13 @@ describe 'CycleAnalytics#test' do context "when the pipeline is cancelled" do it "returns nil" do issue = create(:issue, project: project) - merge_request = create_merge_request_closing_issue(issue) + merge_request = create_merge_request_closing_issue(user, project, issue) pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha) pipeline.run! pipeline.cancel! - merge_merge_requests_closing_issue(issue) + merge_merge_requests_closing_issue(user, project, issue) expect(subject[:test].median).to be_nil end diff --git a/spec/models/cycle_analytics_spec.rb b/spec/models/cycle_analytics_spec.rb new file mode 100644 index 00000000000..0fe24870f02 --- /dev/null +++ b/spec/models/cycle_analytics_spec.rb @@ -0,0 +1,30 @@ +require 'spec_helper' + +describe CycleAnalytics do + let(:project) { create(:project, :repository) } + let(:from_date) { 10.days.ago } + let(:user) { create(:user, :admin) } + let(:issue) { create(:issue, project: project, created_at: 2.days.ago) } + let(:milestone) { create(:milestone, project: project) } + let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") } + let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) } + + subject { described_class.new(project, from: from_date) } + + describe '#all_medians_per_stage' do + before do + allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) + + create_cycle(user, project, issue, mr, milestone, pipeline) + deploy_master(user, project) + end + + it 'returns every median for each stage for a specific project' do + values = described_class::STAGES.each_with_object({}) do |stage_name, hsh| + hsh[stage_name] = subject[stage_name].median.presence + end + + expect(subject.all_medians_per_stage).to eq(values) + end + end +end diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index 338fb314ee9..4f16b73ef38 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -549,7 +549,7 @@ describe Group do context 'when the ref is a protected branch' do before do - create(:protected_branch, name: 'ref', project: project) + allow(project).to receive(:protected_for?).with('ref').and_return(true) end it_behaves_like 'ref is protected' @@ -557,7 +557,7 @@ describe Group do context 'when the ref is a protected tag' do before do - create(:protected_tag, name: 'ref', project: project) + allow(project).to receive(:protected_for?).with('ref').and_return(true) end it_behaves_like 'ref is protected' @@ -571,6 +571,10 @@ describe Group do let(:variable_child_2) { create(:ci_group_variable, group: group_child_2) } let(:variable_child_3) { create(:ci_group_variable, group: group_child_3) } + before do + allow(project).to receive(:protected_for?).with('ref').and_return(true) + end + it 'returns all variables belong to the group and parent groups' do expected_array1 = [protected_variable, secret_variable] expected_array2 = [variable_child, variable_child_2, variable_child_3] diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb index 7c66c98231b..a5ce245c21d 100644 --- a/spec/models/identity_spec.rb +++ b/spec/models/identity_spec.rb @@ -70,5 +70,38 @@ describe Identity do end end end + + context 'after_destroy' do + let!(:user) { create(:user) } + let(:ldap_identity) { create(:identity, provider: 'ldapmain', extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', user: user) } + let(:ldap_user_synced_attributes) { { provider: 'ldapmain', name_synced: true, email_synced: true } } + let(:other_provider_user_synced_attributes) { { provider: 'other', name_synced: true, email_synced: true } } + + describe 'if user synced attributes metadada provider' do + context 'matches the identity provider ' do + it 'removes the user synced attributes' do + user.create_user_synced_attributes_metadata(ldap_user_synced_attributes) + + expect(user.user_synced_attributes_metadata.provider).to eq 'ldapmain' + + ldap_identity.destroy + + expect(user.reload.user_synced_attributes_metadata).to be_nil + end + end + + context 'does not matche the identity provider' do + it 'does not remove the user synced attributes' do + user.create_user_synced_attributes_metadata(other_provider_user_synced_attributes) + + expect(user.user_synced_attributes_metadata.provider).to eq 'other' + + ldap_identity.destroy + + expect(user.reload.user_synced_attributes_metadata.provider).to eq 'other' + end + end + end + end end end diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb index f5c9f551e65..feed7968f09 100644 --- a/spec/models/issue_spec.rb +++ b/spec/models/issue_spec.rb @@ -221,27 +221,55 @@ describe Issue do end describe '#referenced_merge_requests' do - it 'returns the referenced merge requests' do - project = create(:project, :public) - - mr1 = create(:merge_request, - source_project: project, - source_branch: 'master', - target_branch: 'feature') + let(:project) { create(:project, :public) } + let(:issue) do + create(:issue, description: merge_request.to_reference, project: project) + end + let!(:merge_request) do + create(:merge_request, + source_project: project, + source_branch: 'master', + target_branch: 'feature') + end + it 'returns the referenced merge requests' do mr2 = create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') - issue = create(:issue, description: mr1.to_reference, project: project) - create(:note_on_issue, noteable: issue, note: mr2.to_reference, project_id: project.id) - expect(issue.referenced_merge_requests).to eq([mr1, mr2]) + expect(issue.referenced_merge_requests).to eq([merge_request, mr2]) + end + + it 'returns cross project referenced merge requests' do + other_project = create(:project, :public) + cross_project_merge_request = create(:merge_request, source_project: other_project) + create(:note_on_issue, + noteable: issue, + note: cross_project_merge_request.to_reference(issue.project), + project_id: issue.project.id) + + expect(issue.referenced_merge_requests).to eq([merge_request, cross_project_merge_request]) + end + + it 'excludes cross project references if the user cannot read cross project' do + user = create(:user) + allow(Ability).to receive(:allowed?).and_call_original + expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + + other_project = create(:project, :public) + cross_project_merge_request = create(:merge_request, source_project: other_project) + create(:note_on_issue, + noteable: issue, + note: cross_project_merge_request.to_reference(issue.project), + project_id: issue.project.id) + + expect(issue.referenced_merge_requests(user)).to eq([merge_request]) end end @@ -309,7 +337,7 @@ describe Issue do end describe '#related_branches' do - let(:user) { build(:admin) } + let(:user) { create(:admin) } before do allow(subject.project.repository).to receive(:branch_names) diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb index bf5703ac986..06d26ef89f1 100644 --- a/spec/models/key_spec.rb +++ b/spec/models/key_spec.rb @@ -12,6 +12,9 @@ describe Key, :mailer do it { is_expected.to validate_presence_of(:key) } it { is_expected.to validate_length_of(:key).is_at_most(5000) } it { is_expected.to allow_value(attributes_for(:rsa_key_2048)[:key]).for(:key) } + it { is_expected.to allow_value(attributes_for(:rsa_key_4096)[:key]).for(:key) } + it { is_expected.to allow_value(attributes_for(:rsa_key_5120)[:key]).for(:key) } + it { is_expected.to allow_value(attributes_for(:rsa_key_8192)[:key]).for(:key) } it { is_expected.to allow_value(attributes_for(:dsa_key_2048)[:key]).for(:key) } it { is_expected.to allow_value(attributes_for(:ecdsa_key_256)[:key]).for(:key) } it { is_expected.to allow_value(attributes_for(:ed25519_key_256)[:key]).for(:key) } @@ -103,24 +106,6 @@ describe Key, :mailer do end end - context 'validate size' do - where(:key_content, :result) do - [ - [Spec::Support::Helpers::KeyGeneratorHelper.new(512).generate, false], - [Spec::Support::Helpers::KeyGeneratorHelper.new(8192).generate, false], - [Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate, true] - ] - end - - with_them do - it 'validates the size of the key' do - key = build(:key, key: key_content) - - expect(key.valid?).to eq(result) - end - end - end - context 'validate it meets key restrictions' do where(:factory, :minimum, :result) do forbidden = ApplicationSetting::FORBIDDEN_KEY_VALUE diff --git a/spec/models/lfs_file_lock_spec.rb b/spec/models/lfs_file_lock_spec.rb new file mode 100644 index 00000000000..ce87b01b49c --- /dev/null +++ b/spec/models/lfs_file_lock_spec.rb @@ -0,0 +1,57 @@ +require 'rails_helper' + +describe LfsFileLock do + set(:lfs_file_lock) { create(:lfs_file_lock) } + subject { lfs_file_lock } + + it { is_expected.to belong_to(:project) } + it { is_expected.to belong_to(:user) } + + it { is_expected.to validate_presence_of(:project_id) } + it { is_expected.to validate_presence_of(:user_id) } + it { is_expected.to validate_presence_of(:path) } + + describe '#can_be_unlocked_by?' do + let(:developer) { create(:user) } + let(:master) { create(:user) } + + before do + project = lfs_file_lock.project + + project.add_developer(developer) + project.add_master(master) + end + + context "when it's forced" do + it 'can be unlocked by the author' do + user = lfs_file_lock.user + + expect(lfs_file_lock.can_be_unlocked_by?(user, true)).to eq(true) + end + + it 'can be unlocked by a master' do + expect(lfs_file_lock.can_be_unlocked_by?(master, true)).to eq(true) + end + + it "can't be unlocked by other user" do + expect(lfs_file_lock.can_be_unlocked_by?(developer, true)).to eq(false) + end + end + + context "when it isn't forced" do + it 'can be unlocked by the author' do + user = lfs_file_lock.user + + expect(lfs_file_lock.can_be_unlocked_by?(user)).to eq(true) + end + + it "can't be unlocked by a master" do + expect(lfs_file_lock.can_be_unlocked_by?(master)).to eq(false) + end + + it "can't be unlocked by other user" do + expect(lfs_file_lock.can_be_unlocked_by?(developer)).to eq(false) + end + end + end +end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 191b60e4383..e626efd054d 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -168,84 +168,105 @@ describe Namespace do end describe '#move_dir', :request_store do - let(:namespace) { create(:namespace) } - let!(:project) { create(:project_empty_repo, namespace: namespace) } + shared_examples "namespace restrictions" do + context "when any project has container images" do + let(:container_repository) { create(:container_repository) } - it "raises error when directory exists" do - expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved") - end + before do + stub_container_registry_config(enabled: true) + stub_container_registry_tags(repository: :any, tags: ['tag']) - it "moves dir if path changed" do - namespace.update_attributes(path: namespace.full_path + '_new') + create(:project, namespace: namespace, container_repositories: [container_repository]) - expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy - end + allow(namespace).to receive(:path_was).and_return(namespace.path) + allow(namespace).to receive(:path).and_return('new_path') + end - context "when any project has container images" do - let(:container_repository) { create(:container_repository) } + it 'raises an error about not movable project' do + expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/) + end + end + end - before do - stub_container_registry_config(enabled: true) - stub_container_registry_tags(repository: :any, tags: ['tag']) + context 'legacy storage' do + let(:namespace) { create(:namespace) } + let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) } - create(:project, namespace: namespace, container_repositories: [container_repository]) + it_behaves_like 'namespace restrictions' - allow(namespace).to receive(:path_was).and_return(namespace.path) - allow(namespace).to receive(:path).and_return('new_path') + it "raises error when directory exists" do + expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved") end - it 'raises an error about not movable project' do - expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/) + it "moves dir if path changed" do + namespace.update_attributes(path: namespace.full_path + '_new') + + expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy end - end - context 'with subgroups' do - let(:parent) { create(:group, name: 'parent', path: 'parent') } - let(:child) { create(:group, name: 'child', path: 'child', parent: parent) } - let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) } - let(:uploads_dir) { FileUploader.root } - let(:pages_dir) { File.join(TestEnv.pages_path) } + context 'with subgroups' do + let(:parent) { create(:group, name: 'parent', path: 'parent') } + let(:child) { create(:group, name: 'child', path: 'child', parent: parent) } + let!(:project) { create(:project_empty_repo, :legacy_storage, path: 'the-project', namespace: child, skip_disk_validation: true) } + let(:uploads_dir) { FileUploader.root } + let(:pages_dir) { File.join(TestEnv.pages_path) } - before do - FileUtils.mkdir_p(File.join(uploads_dir, 'parent', 'child', 'the-project')) - FileUtils.mkdir_p(File.join(pages_dir, 'parent', 'child', 'the-project')) - end + before do + FileUtils.mkdir_p(File.join(uploads_dir, project.full_path)) + FileUtils.mkdir_p(File.join(pages_dir, project.full_path)) + end + + context 'renaming child' do + it 'correctly moves the repository, uploads and pages' do + expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git') + expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project') + expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project') - context 'renaming child' do - it 'correctly moves the repository, uploads and pages' do - expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git') - expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project') - expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project') + child.update_attributes!(path: 'renamed') - child.update_attributes!(path: 'renamed') + expect(File.directory?(expected_repository_path)).to be(true) + expect(File.directory?(expected_upload_path)).to be(true) + expect(File.directory?(expected_pages_path)).to be(true) + end + end + + context 'renaming parent' do + it 'correctly moves the repository, uploads and pages' do + expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git') + expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project') + expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project') - expect(File.directory?(expected_repository_path)).to be(true) - expect(File.directory?(expected_upload_path)).to be(true) - expect(File.directory?(expected_pages_path)).to be(true) + parent.update_attributes!(path: 'renamed') + + expect(File.directory?(expected_repository_path)).to be(true) + expect(File.directory?(expected_upload_path)).to be(true) + expect(File.directory?(expected_pages_path)).to be(true) + end end end + end - context 'renaming parent' do - it 'correctly moves the repository, uploads and pages' do - expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git') - expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project') - expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project') + context 'hashed storage' do + let(:namespace) { create(:namespace) } + let!(:project) { create(:project_empty_repo, namespace: namespace) } - parent.update_attributes!(path: 'renamed') + it_behaves_like 'namespace restrictions' - expect(File.directory?(expected_repository_path)).to be(true) - expect(File.directory?(expected_upload_path)).to be(true) - expect(File.directory?(expected_pages_path)).to be(true) - end + it "repository directory remains unchanged if path changed" do + before_disk_path = project.disk_path + namespace.update_attributes(path: namespace.full_path + '_new') + + expect(before_disk_path).to eq(project.disk_path) + expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy end end it 'updates project full path in .git/config for each project inside namespace' do parent = create(:group, name: 'mygroup', path: 'mygroup') subgroup = create(:group, name: 'mysubgroup', path: 'mysubgroup', parent: parent) - project_in_parent_group = create(:project, :repository, namespace: parent, name: 'foo1') - hashed_project_in_subgroup = create(:project, :repository, :hashed, namespace: subgroup, name: 'foo2') - legacy_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo3') + project_in_parent_group = create(:project, :legacy_storage, :repository, namespace: parent, name: 'foo1') + hashed_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo2') + legacy_project_in_subgroup = create(:project, :legacy_storage, :repository, namespace: subgroup, name: 'foo3') parent.update(path: 'mygroup_new') @@ -260,38 +281,18 @@ describe Namespace do end describe '#rm_dir', 'callback' do - let!(:project) { create(:project_empty_repo, namespace: namespace) } let(:repository_storage_path) { Gitlab.config.repositories.storages.default['path'] } let(:path_in_dir) { File.join(repository_storage_path, namespace.full_path) } let(:deleted_path) { namespace.full_path.gsub(namespace.path, "#{namespace.full_path}+#{namespace.id}+deleted") } let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) } - it 'renames its dirs when deleted' do - allow(GitlabShellWorker).to receive(:perform_in) - - namespace.destroy - - expect(File.exist?(deleted_path_in_dir)).to be(true) - end - - it 'schedules the namespace for deletion' do - expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path) - - namespace.destroy - end - - context 'in sub-groups' do - let(:parent) { create(:group, path: 'parent') } - let(:child) { create(:group, parent: parent, path: 'child') } - let!(:project) { create(:project_empty_repo, namespace: child) } - let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') } - let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") } - let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) } + context 'legacy storage' do + let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) } it 'renames its dirs when deleted' do allow(GitlabShellWorker).to receive(:perform_in) - child.destroy + namespace.destroy expect(File.exist?(deleted_path_in_dir)).to be(true) end @@ -299,14 +300,57 @@ describe Namespace do it 'schedules the namespace for deletion' do expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path) - child.destroy + namespace.destroy + end + + context 'in sub-groups' do + let(:parent) { create(:group, path: 'parent') } + let(:child) { create(:group, parent: parent, path: 'child') } + let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: child) } + let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') } + let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") } + let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) } + + it 'renames its dirs when deleted' do + allow(GitlabShellWorker).to receive(:perform_in) + + child.destroy + + expect(File.exist?(deleted_path_in_dir)).to be(true) + end + + it 'schedules the namespace for deletion' do + expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path) + + child.destroy + end + end + + it 'removes the exports folder' do + expect(namespace).to receive(:remove_exports!) + + namespace.destroy end end - it 'removes the exports folder' do - expect(namespace).to receive(:remove_exports!) + context 'hashed storage' do + let!(:project) { create(:project_empty_repo, namespace: namespace) } + + it 'has no repositories base directories to remove' do + allow(GitlabShellWorker).to receive(:perform_in) + + expect(File.exist?(path_in_dir)).to be(false) - namespace.destroy + namespace.destroy + + expect(File.exist?(deleted_path_in_dir)).to be(false) + end + + it 'removes the exports folder' do + expect(namespace).to receive(:remove_exports!) + + namespace.destroy + end end end @@ -567,8 +611,8 @@ describe Namespace do end describe '#remove_exports' do - let(:legacy_project) { create(:project, :with_export, namespace: namespace) } - let(:hashed_project) { create(:project, :with_export, :hashed, namespace: namespace) } + let(:legacy_project) { create(:project, :with_export, :legacy_storage, namespace: namespace) } + let(:hashed_project) { create(:project, :with_export, namespace: namespace) } let(:export_path) { Dir.mktmpdir('namespace_remove_exports_spec') } let(:legacy_export) { legacy_project.export_project_path } let(:hashed_export) { hashed_project.export_project_path } diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb new file mode 100644 index 00000000000..eda0e1da835 --- /dev/null +++ b/spec/models/notification_recipient_spec.rb @@ -0,0 +1,16 @@ +require 'spec_helper' + +describe NotificationRecipient do + let(:user) { create(:user) } + let(:project) { create(:project, namespace: user.namespace) } + let(:target) { create(:issue, project: project) } + + subject(:recipient) { described_class.new(user, :watch, target: target, project: project) } + + it 'denies access to a target when cross project access is denied' do + allow(Ability).to receive(:allowed?).and_call_original + expect(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(false) + + expect(recipient.has_access?).to be_falsy + end +end diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb index 9d12f96c642..95713d8b85b 100644 --- a/spec/models/pages_domain_spec.rb +++ b/spec/models/pages_domain_spec.rb @@ -1,6 +1,10 @@ require 'spec_helper' describe PagesDomain do + using RSpec::Parameterized::TableSyntax + + subject(:pages_domain) { described_class.new } + describe 'associations' do it { is_expected.to belong_to(:project) } end @@ -64,19 +68,51 @@ describe PagesDomain do end end + describe 'validations' do + it { is_expected.to validate_presence_of(:verification_code) } + end + + describe '#verification_code' do + subject { pages_domain.verification_code } + + it 'is set automatically with 128 bits of SecureRandom data' do + expect(SecureRandom).to receive(:hex).with(16) { 'verification code' } + + is_expected.to eq('verification code') + end + end + + describe '#keyed_verification_code' do + subject { pages_domain.keyed_verification_code } + + it { is_expected.to eq("gitlab-pages-verification-code=#{pages_domain.verification_code}") } + end + + describe '#verification_domain' do + subject { pages_domain.verification_domain } + + it { is_expected.to be_nil } + + it 'is a well-known subdomain if the domain is present' do + pages_domain.domain = 'example.com' + + is_expected.to eq('_gitlab-pages-verification-code.example.com') + end + end + describe '#url' do subject { domain.url } context 'without the certificate' do let(:domain) { build(:pages_domain, certificate: '') } - it { is_expected.to eq('http://my.domain.com') } + it { is_expected.to eq("http://#{domain.domain}") } end context 'with a certificate' do let(:domain) { build(:pages_domain, :with_certificate) } - it { is_expected.to eq('https://my.domain.com') } + it { is_expected.to eq("https://#{domain.domain}") } end end @@ -154,4 +190,108 @@ describe PagesDomain do # We test only existence of output, since the output is long it { is_expected.not_to be_empty } end + + describe '#update_daemon' do + it 'runs when the domain is created' do + domain = build(:pages_domain) + + expect(domain).to receive(:update_daemon) + + domain.save! + end + + it 'runs when the domain is destroyed' do + domain = create(:pages_domain) + + expect(domain).to receive(:update_daemon) + + domain.destroy! + end + + it 'delegates to Projects::UpdatePagesConfigurationService' do + service = instance_double('Projects::UpdatePagesConfigurationService') + expect(Projects::UpdatePagesConfigurationService).to receive(:new) { service } + expect(service).to receive(:execute) + + create(:pages_domain) + end + + context 'configuration updates when attributes change' do + set(:project1) { create(:project) } + set(:project2) { create(:project) } + set(:domain) { create(:pages_domain) } + + where(:attribute, :old_value, :new_value, :update_expected) do + now = Time.now + future = now + 1.day + + :project | nil | :project1 | true + :project | :project1 | :project1 | false + :project | :project1 | :project2 | true + :project | :project1 | nil | true + + # domain can't be set to nil + :domain | 'a.com' | 'a.com' | false + :domain | 'a.com' | 'b.com' | true + + # verification_code can't be set to nil + :verification_code | 'foo' | 'foo' | false + :verification_code | 'foo' | 'bar' | false + + :verified_at | nil | now | false + :verified_at | now | now | false + :verified_at | now | future | false + :verified_at | now | nil | false + + :enabled_until | nil | now | true + :enabled_until | now | now | false + :enabled_until | now | future | false + :enabled_until | now | nil | true + end + + with_them do + it 'runs if a relevant attribute has changed' do + a = old_value.is_a?(Symbol) ? send(old_value) : old_value + b = new_value.is_a?(Symbol) ? send(new_value) : new_value + + domain.update!(attribute => a) + + if update_expected + expect(domain).to receive(:update_daemon) + else + expect(domain).not_to receive(:update_daemon) + end + + domain.update!(attribute => b) + end + end + + context 'TLS configuration' do + set(:domain_with_tls) { create(:pages_domain, :with_key, :with_certificate) } + + let(:cert1) { domain_with_tls.certificate } + let(:cert2) { cert1 + ' ' } + let(:key1) { domain_with_tls.key } + let(:key2) { key1 + ' ' } + + it 'updates when added' do + expect(domain).to receive(:update_daemon) + + domain.update!(key: key1, certificate: cert1) + end + + it 'updates when changed' do + expect(domain_with_tls).to receive(:update_daemon) + + domain_with_tls.update!(key: key2, certificate: cert2) + end + + it 'updates when removed' do + expect(domain_with_tls).to receive(:update_daemon) + + domain_with_tls.update!(key: nil, certificate: nil) + end + end + end + end end diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb index bf39e8d7a39..6693e5783a5 100644 --- a/spec/models/project_services/prometheus_service_spec.rb +++ b/spec/models/project_services/prometheus_service_spec.rb @@ -13,17 +13,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do end describe 'Validations' do - context 'when service is active' do + context 'when manual_configuration is enabled' do before do - subject.active = true + subject.manual_configuration = true end it { is_expected.to validate_presence_of(:api_url) } end - context 'when service is inactive' do + context 'when manual configuration is disabled' do before do - subject.active = false + subject.manual_configuration = false end it { is_expected.not_to validate_presence_of(:api_url) } @@ -31,12 +31,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do end describe '#test' do + before do + service.manual_configuration = true + end + let!(:req_stub) { stub_prometheus_request(prometheus_query_url('1'), body: prometheus_value_body('vector')) } context 'success' do it 'reads the discovery endpoint' do + expect(service.test[:result]).to eq('Checked API endpoint') expect(service.test[:success]).to be_truthy - expect(req_stub).to have_been_requested + expect(req_stub).to have_been_requested.twice end end @@ -70,6 +75,25 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do end end + describe '#matched_metrics' do + let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricsQuery } + let(:client) { double(:client, label_values: nil) } + + context 'with valid data' do + subject { service.matched_metrics } + + before do + allow(service).to receive(:client).and_return(client) + synchronous_reactive_cache(service) + end + + it 'returns reactive data' do + expect(subject[:success]).to be_truthy + expect(subject[:data]).to eq([]) + end + end + end + describe '#deployment_metrics' do let(:deployment) { build_stubbed(:deployment) } let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery } @@ -83,7 +107,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do let(:fake_deployment_time) { 10 } before do - stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id) + stub_reactive_cache(service, prometheus_data, deployment_query, deployment.environment.id, deployment.id) end it 'returns reactive data' do @@ -96,13 +120,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do describe '#calculate_reactive_cache' do let(:environment) { create(:environment, slug: 'env-slug') } - - around do |example| - Timecop.freeze { example.run } + before do + service.manual_configuration = true + service.active = true end subject do - service.calculate_reactive_cache(environment_query.to_s, environment.id) + service.calculate_reactive_cache(environment_query.name, environment.id) + end + + around do |example| + Timecop.freeze { example.run } end context 'when service is inactive' do @@ -132,4 +160,193 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do end end end + + describe '#client' do + context 'manual configuration is enabled' do + let(:api_url) { 'http://some_url' } + before do + subject.manual_configuration = true + subject.api_url = api_url + end + + it 'returns simple rest client from api_url' do + expect(subject.client).to be_instance_of(Gitlab::PrometheusClient) + expect(subject.client.rest_client.url).to eq(api_url) + end + end + + context 'manual configuration is disabled' do + let!(:cluster_for_all) { create(:cluster, environment_scope: '*', projects: [project]) } + let!(:cluster_for_dev) { create(:cluster, environment_scope: 'dev', projects: [project]) } + + let!(:prometheus_for_dev) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_dev) } + let(:proxy_client) { double('proxy_client') } + + before do + service.manual_configuration = false + end + + context 'with cluster for all environments with prometheus installed' do + let!(:prometheus_for_all) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_all) } + + context 'without environment supplied' do + it 'returns client handling all environments' do + expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice + + expect(service.client).to be_instance_of(Gitlab::PrometheusClient) + expect(service.client.rest_client).to eq(proxy_client) + end + end + + context 'with dev environment supplied' do + let!(:environment) { create(:environment, project: project, name: 'dev') } + + it 'returns dev cluster client' do + expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice + + expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient) + expect(service.client(environment.id).rest_client).to eq(proxy_client) + end + end + + context 'with prod environment supplied' do + let!(:environment) { create(:environment, project: project, name: 'prod') } + + it 'returns dev cluster client' do + expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice + + expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient) + expect(service.client(environment.id).rest_client).to eq(proxy_client) + end + end + end + + context 'with cluster for all environments without prometheus installed' do + context 'without environment supplied' do + it 'raises PrometheusClient::Error because cluster was not found' do + expect { service.client }.to raise_error(Gitlab::PrometheusClient::Error, /couldn't find cluster with Prometheus installed/) + end + end + + context 'with dev environment supplied' do + let!(:environment) { create(:environment, project: project, name: 'dev') } + + it 'returns dev cluster client' do + expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice + + expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient) + expect(service.client(environment.id).rest_client).to eq(proxy_client) + end + end + + context 'with prod environment supplied' do + let!(:environment) { create(:environment, project: project, name: 'prod') } + + it 'raises PrometheusClient::Error because cluster was not found' do + expect { service.client }.to raise_error(Gitlab::PrometheusClient::Error, /couldn't find cluster with Prometheus installed/) + end + end + end + end + end + + describe '#prometheus_installed?' do + context 'clusters with installed prometheus' do + let!(:cluster) { create(:cluster, projects: [project]) } + let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) } + + it 'returns true' do + expect(service.prometheus_installed?).to be(true) + end + end + + context 'clusters without prometheus installed' do + let(:cluster) { create(:cluster, projects: [project]) } + let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) } + + it 'returns false' do + expect(service.prometheus_installed?).to be(false) + end + end + + context 'clusters without prometheus' do + let(:cluster) { create(:cluster, projects: [project]) } + + it 'returns false' do + expect(service.prometheus_installed?).to be(false) + end + end + + context 'no clusters' do + it 'returns false' do + expect(service.prometheus_installed?).to be(false) + end + end + end + + describe '#synchronize_service_state! before_save callback' do + context 'no clusters with prometheus are installed' do + context 'when service is inactive' do + before do + service.active = false + end + + it 'activates service when manual_configuration is enabled' do + expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true) + end + + it 'keeps service inactive when manual_configuration is disabled' do + expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(false) + end + end + + context 'when service is active' do + before do + service.active = true + end + + it 'keeps the service active when manual_configuration is enabled' do + expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true) + end + + it 'inactivates the service when manual_configuration is disabled' do + expect { service.update!(manual_configuration: false) }.to change { service.active }.from(true).to(false) + end + end + end + + context 'with prometheus installed in the cluster' do + before do + allow(service).to receive(:prometheus_installed?).and_return(true) + end + + context 'when service is inactive' do + before do + service.active = false + end + + it 'activates service when manual_configuration is enabled' do + expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true) + end + + it 'activates service when manual_configuration is disabled' do + expect { service.update!(manual_configuration: false) }.to change { service.active }.from(false).to(true) + end + end + + context 'when service is active' do + before do + service.active = true + end + + it 'keeps service active when manual_configuration is enabled' do + expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true) + end + + it 'keeps service active when manual_configuration is disabled' do + expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(true) + end + end + end + end end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 9dca7f326d3..f4faec9e52a 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -80,6 +80,7 @@ describe Project do it { is_expected.to have_many(:members_and_requesters) } it { is_expected.to have_many(:clusters) } it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') } + it { is_expected.to have_many(:lfs_file_locks) } context 'after initialized' do it "has a project_feature" do @@ -1472,6 +1473,13 @@ describe Project do expect(project.user_can_push_to_empty_repo?(user)).to be_truthy end + + it 'returns false when the repo is not empty' do + project.add_master(user) + expect(project).to receive(:empty_repo?).and_return(false) + + expect(project.user_can_push_to_empty_repo?(user)).to be_falsey + end end describe '#container_registry_url' do @@ -2091,7 +2099,7 @@ describe Project do context 'when the ref is a protected branch' do before do - create(:protected_branch, name: 'ref', project: project) + allow(project).to receive(:protected_for?).with('ref').and_return(true) end it_behaves_like 'ref is protected' @@ -2099,7 +2107,7 @@ describe Project do context 'when the ref is a protected tag' do before do - create(:protected_tag, name: 'ref', project: project) + allow(project).to receive(:protected_for?).with('ref').and_return(true) end it_behaves_like 'ref is protected' @@ -2124,6 +2132,8 @@ describe Project do context 'when the ref is a protected branch' do before do + allow(project).to receive(:repository).and_call_original + allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(true) create(:protected_branch, name: 'ref', project: project) end @@ -2134,6 +2144,8 @@ describe Project do context 'when the ref is a protected tag' do before do + allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(false) + allow(project).to receive_message_chain(:repository, :tag_exists?).and_return(true) create(:protected_tag, name: 'ref', project: project) end @@ -2487,7 +2499,8 @@ describe Project do end it 'is a no-op when there is no namespace' do - project.update_column(:namespace_id, nil) + project.namespace.delete + project.reload expect_any_instance_of(Projects::UpdatePagesConfigurationService).not_to receive(:execute) expect_any_instance_of(Gitlab::PagesTransfer).not_to receive(:rename_project) @@ -2503,6 +2516,7 @@ describe Project do end describe '#remove_exports' do + let(:legacy_project) { create(:project, :legacy_storage, :with_export) } let(:project) { create(:project, :with_export) } it 'removes the exports directory for the project' do @@ -2515,17 +2529,33 @@ describe Project do expect(File.exist?(project.export_path)).to be_falsy end - it 'is a no-op when there is no namespace' do - export_path = project.export_path - project.update_column(:namespace_id, nil) + it 'is a no-op on legacy projects when there is no namespace' do + export_path = legacy_project.export_path + + legacy_project.namespace.delete + legacy_project.reload expect(FileUtils).not_to receive(:rm_rf).with(export_path) - project.remove_exports + legacy_project.remove_exports expect(File.exist?(export_path)).to be_truthy end + it 'runs on hashed storage projects when there is no namespace' do + export_path = project.export_path + + project.namespace.delete + legacy_project.reload + + allow(FileUtils).to receive(:rm_rf).and_call_original + expect(FileUtils).to receive(:rm_rf).with(export_path).and_call_original + + project.remove_exports + + expect(File.exist?(export_path)).to be_falsy + end + it 'is run when the project is destroyed' do expect(project).to receive(:remove_exports).and_call_original @@ -2544,7 +2574,7 @@ describe Project do end context 'legacy storage' do - let(:project) { create(:project, :repository) } + let(:project) { create(:project, :repository, :legacy_storage) } let(:gitlab_shell) { Gitlab::Shell.new } let(:project_storage) { project.send(:storage) } @@ -2718,6 +2748,8 @@ describe Project do let(:project) { create(:project, :repository, skip_disk_validation: true) } let(:gitlab_shell) { Gitlab::Shell.new } let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) } + let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) } + let(:hashed_path) { File.join(hashed_prefix, hash) } before do stub_application_setting(hashed_storage_enabled: true) @@ -2743,14 +2775,12 @@ describe Project do describe '#base_dir' do it 'returns base_dir based on hash of project id' do - expect(project.base_dir).to eq("@hashed/#{hash[0..1]}/#{hash[2..3]}") + expect(project.base_dir).to eq(hashed_prefix) end end describe '#disk_path' do it 'returns disk_path based on hash of project id' do - hashed_path = "@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}" - expect(project.disk_path).to eq(hashed_path) end end @@ -2759,7 +2789,7 @@ describe Project do it 'delegates to gitlab_shell to ensure namespace is created' do allow(project).to receive(:gitlab_shell).and_return(gitlab_shell) - expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, "@hashed/#{hash[0..1]}/#{hash[2..3]}") + expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, hashed_prefix) project.ensure_storage_path_exists end diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 02a5ee54262..38653e18306 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -242,23 +242,73 @@ describe Repository do end describe '#commits' do - it 'sets follow when path is a single path' do - expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice + context 'when neither the all flag nor a ref are specified' do + it 'returns every commit from default branch' do + expect(repository.commits(limit: 60).size).to eq(37) + end + end + + context 'when ref is passed' do + it 'returns every commit from the specified ref' do + expect(repository.commits('master', limit: 60).size).to eq(37) + end + + context 'when all' do + it 'returns every commit from the repository' do + expect(repository.commits('master', limit: 60, all: true).size).to eq(60) + end + end + + context 'with path' do + it 'sets follow when it is a single path' do + expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice - repository.commits('master', limit: 1, path: 'README.md') - repository.commits('master', limit: 1, path: ['README.md']) + repository.commits('master', limit: 1, path: 'README.md') + repository.commits('master', limit: 1, path: ['README.md']) + end + + it 'does not set follow when it is multiple paths' do + expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original + + repository.commits('master', limit: 1, path: ['README.md', 'CHANGELOG']) + end + end + + context 'without path' do + it 'does not set follow' do + expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original + + repository.commits('master', limit: 1) + end + end end - it 'does not set follow when path is multiple paths' do - expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original + context "when 'all' flag is set" do + it 'returns every commit from the repository' do + expect(repository.commits(all: true, limit: 60).size).to eq(60) + end + end + end - repository.commits('master', limit: 1, path: ['README.md', 'CHANGELOG']) + describe '#new_commits' do + let(:new_refs) do + double(:git_rev_list, new_refs: %w[ + c1acaa58bbcbc3eafe538cb8274ba387047b69f8 + 5937ac0a7beb003549fc5fd26fc247adbce4a52e + ]) end - it 'does not set follow when there are no paths' do - expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original + it 'delegates to Gitlab::Git::RevList' do + expect(Gitlab::Git::RevList).to receive(:new).with( + repository.raw, + newrev: 'aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj').and_return(new_refs) + + commits = repository.new_commits('aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj') - repository.commits('master', limit: 1) + expect(commits).to eq([ + repository.commit('c1acaa58bbcbc3eafe538cb8274ba387047b69f8'), + repository.commit('5937ac0a7beb003549fc5fd26fc247adbce4a52e') + ]) end end @@ -851,6 +901,18 @@ describe Repository do expect(repository.license_key).to be_nil end + it 'returns nil when the commit SHA does not exist' do + allow(repository.head_commit).to receive(:sha).and_return('1' * 40) + + expect(repository.license_key).to be_nil + end + + it 'returns nil when master does not exist' do + repository.rm_branch(user, 'master') + + expect(repository.license_key).to be_nil + end + it 'returns the license key' do repository.create_file(user, 'LICENSE', Licensee::License.new('mit').content, diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index cb02d526a98..3531de244bd 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -496,6 +496,14 @@ describe User do user2.update_tracked_fields!(request) end.to change { user2.reload.current_sign_in_at } end + + it 'does not write if the DB is in read-only mode' do + expect(Gitlab::Database).to receive(:read_only?).and_return(true) + + expect do + user.update_tracked_fields!(request) + end.not_to change { user.reload.current_sign_in_at } + end end shared_context 'user keys' do @@ -893,6 +901,14 @@ describe User do end end + describe '.find_for_database_authentication' do + it 'strips whitespace from login' do + user = create(:user) + + expect(described_class.find_for_database_authentication({ login: " #{user.username} " })).to eq user + end + end + describe '.find_by_any_email' do it 'finds by primary email' do user = create(:user, email: 'foo@example.com') @@ -1586,14 +1602,37 @@ describe User do describe '#authorized_groups' do let!(:user) { create(:user) } let!(:private_group) { create(:group) } + let!(:child_group) { create(:group, parent: private_group) } + + let!(:project_group) { create(:group) } + let!(:project) { create(:project, group: project_group) } before do private_group.add_user(user, Gitlab::Access::MASTER) + project.add_master(user) end subject { user.authorized_groups } - it { is_expected.to eq([private_group]) } + it { is_expected.to contain_exactly private_group, project_group } + end + + describe '#membership_groups' do + let!(:user) { create(:user) } + let!(:parent_group) { create(:group) } + let!(:child_group) { create(:group, parent: parent_group) } + + before do + parent_group.add_user(user, Gitlab::Access::MASTER) + end + + subject { user.membership_groups } + + if Group.supports_nested_groups? + it { is_expected.to contain_exactly parent_group, child_group } + else + it { is_expected.to contain_exactly parent_group } + end end describe '#authorized_projects', :delete do diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb index d53ba497ed1..b2b7721674c 100644 --- a/spec/models/wiki_page_spec.rb +++ b/spec/models/wiki_page_spec.rb @@ -188,162 +188,181 @@ describe WikiPage do end end - describe '#create', :skip_gitaly_mock do - context 'with valid attributes' do - it 'raises an error if a page with the same path already exists' do - create_page('New Page', 'content') - create_page('foo/bar', 'content') - expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError - expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError - - destroy_page('New Page') - destroy_page('bar', 'foo') - end + describe '#create' do + shared_examples 'create method' do + context 'with valid attributes' do + it 'raises an error if a page with the same path already exists' do + create_page('New Page', 'content') + create_page('foo/bar', 'content') + expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError + expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError + + destroy_page('New Page') + destroy_page('bar', 'foo') + end - it 'if the title is preceded by a / it is removed' do - create_page('/New Page', 'content') + it 'if the title is preceded by a / it is removed' do + create_page('/New Page', 'content') - expect(wiki.find_page('New Page')).not_to be_nil + expect(wiki.find_page('New Page')).not_to be_nil - destroy_page('New Page') + destroy_page('New Page') + end end end - end - # Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages - describe "#update", :skip_gitaly_mock do - before do - create_page("Update", "content") - @page = wiki.find_page("Update") + context 'when Gitaly is enabled' do + it_behaves_like 'create method' end - after do - destroy_page(@page.title, @page.directory) + context 'when Gitaly is disabled', :skip_gitaly_mock do + it_behaves_like 'create method' end + end - context "with valid attributes" do - it "updates the content of the page" do - new_content = "new content" - - @page.update(content: new_content) + describe "#update" do + shared_examples 'update method' do + before do + create_page("Update", "content") @page = wiki.find_page("Update") + end - expect(@page.content).to eq("new content") + after do + destroy_page(@page.title, @page.directory) end - it "updates the title of the page" do - new_title = "Index v.1.2.4" + context "with valid attributes" do + it "updates the content of the page" do + new_content = "new content" - @page.update(title: new_title) - @page = wiki.find_page(new_title) + @page.update(content: new_content) + @page = wiki.find_page("Update") - expect(@page.title).to eq(new_title) - end + expect(@page.content).to eq("new content") + end - it "returns true" do - expect(@page.update(content: "more content")).to be_truthy + it "updates the title of the page" do + new_title = "Index v.1.2.4" + + @page.update(title: new_title) + @page = wiki.find_page(new_title) + + expect(@page.title).to eq(new_title) + end + + it "returns true" do + expect(@page.update(content: "more content")).to be_truthy + end end - end - context 'with same last commit sha' do - it 'returns true' do - expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy + context 'with same last commit sha' do + it 'returns true' do + expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy + end end - end - context 'with different last commit sha' do - it 'raises exception' do - expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError) + context 'with different last commit sha' do + it 'raises exception' do + expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError) + end end - end - context 'when renaming a page' do - it 'raises an error if the page already exists' do - create_page('Existing Page', 'content') + context 'when renaming a page' do + it 'raises an error if the page already exists' do + create_page('Existing Page', 'content') - expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError) - expect(@page.title).to eq 'Update' - expect(@page.content).to eq 'new_content' + expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError) + expect(@page.title).to eq 'Update' + expect(@page.content).to eq 'new_content' - destroy_page('Existing Page') - end + destroy_page('Existing Page') + end - it 'updates the content and rename the file' do - new_title = 'Renamed Page' - new_content = 'updated content' + it 'updates the content and rename the file' do + new_title = 'Renamed Page' + new_content = 'updated content' - expect(@page.update(title: new_title, content: new_content)).to be_truthy + expect(@page.update(title: new_title, content: new_content)).to be_truthy - @page = wiki.find_page(new_title) + @page = wiki.find_page(new_title) - expect(@page).not_to be_nil - expect(@page.content).to eq new_content + expect(@page).not_to be_nil + expect(@page.content).to eq new_content + end end - end - - context 'when moving a page' do - it 'raises an error if the page already exists' do - create_page('foo/Existing Page', 'content') - expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError) - expect(@page.title).to eq 'Update' - expect(@page.content).to eq 'new_content' + context 'when moving a page' do + it 'raises an error if the page already exists' do + create_page('foo/Existing Page', 'content') - destroy_page('Existing Page', 'foo') - end + expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError) + expect(@page.title).to eq 'Update' + expect(@page.content).to eq 'new_content' - it 'updates the content and moves the file' do - new_title = 'foo/Other Page' - new_content = 'new_content' + destroy_page('Existing Page', 'foo') + end - expect(@page.update(title: new_title, content: new_content)).to be_truthy + it 'updates the content and moves the file' do + new_title = 'foo/Other Page' + new_content = 'new_content' - page = wiki.find_page(new_title) + expect(@page.update(title: new_title, content: new_content)).to be_truthy - expect(page).not_to be_nil - expect(page.content).to eq new_content - end + page = wiki.find_page(new_title) - context 'in subdir' do - before do - create_page('foo/Existing Page', 'content') - @page = wiki.find_page('foo/Existing Page') + expect(page).not_to be_nil + expect(page.content).to eq new_content end - it 'moves the page to the root folder if the title is preceded by /' do - expect(@page.slug).to eq 'foo/Existing-Page' - expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy - expect(@page.slug).to eq 'Existing-Page' + context 'in subdir' do + before do + create_page('foo/Existing Page', 'content') + @page = wiki.find_page('foo/Existing Page') + end + + it 'moves the page to the root folder if the title is preceded by /', :skip_gitaly_mock do + expect(@page.slug).to eq 'foo/Existing-Page' + expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy + expect(@page.slug).to eq 'Existing-Page' + end + + it 'does nothing if it has the same title' do + original_path = @page.slug + + expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy + expect(@page.slug).to eq original_path + end end - it 'does nothing if it has the same title' do - original_path = @page.slug + context 'in root dir' do + it 'does nothing if the title is preceded by /' do + original_path = @page.slug - expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy - expect(@page.slug).to eq original_path + expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy + expect(@page.slug).to eq original_path + end end end - context 'in root dir' do - it 'does nothing if the title is preceded by /' do - original_path = @page.slug + context "with invalid attributes" do + it 'aborts update if title blank' do + expect(@page.update(title: '', content: 'new_content')).to be_falsey + expect(@page.content).to eq 'new_content' - expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy - expect(@page.slug).to eq original_path + page = wiki.find_page('Update') + expect(page.content).to eq 'content' + + @page.title = 'Update' end end end - context "with invalid attributes" do - it 'aborts update if title blank' do - expect(@page.update(title: '', content: 'new_content')).to be_falsey - expect(@page.content).to eq 'new_content' - - page = wiki.find_page('Update') - expect(page.content).to eq 'content' + context 'when Gitaly is enabled' do + it_behaves_like 'update method' + end - @page.title = 'Update' - end + context 'when Gitaly is disabled', :skip_gitaly_mock do + it_behaves_like 'update method' end end diff --git a/spec/policies/issuable_policy_spec.rb b/spec/policies/issuable_policy_spec.rb index 2cf669e8191..d1bf98995e7 100644 --- a/spec/policies/issuable_policy_spec.rb +++ b/spec/policies/issuable_policy_spec.rb @@ -1,12 +1,14 @@ require 'spec_helper' describe IssuablePolicy, models: true do + let(:user) { create(:user) } + let(:project) { create(:project, :public) } + let(:issue) { create(:issue, project: project) } + let(:policies) { described_class.new(user, issue) } + describe '#rules' do context 'when discussion is locked for the issuable' do - let(:user) { create(:user) } - let(:project) { create(:project, :public) } let(:issue) { create(:issue, project: project, discussion_locked: true) } - let(:policies) { described_class.new(user, issue) } context 'when the user is not a project member' do it 'can not create a note' do diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb index a4af9361ea6..793b724bfca 100644 --- a/spec/policies/issue_policy_spec.rb +++ b/spec/policies/issue_policy_spec.rb @@ -30,41 +30,41 @@ describe IssuePolicy do end it 'does not allow non-members to read issues' do - expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows guests to read issues' do - expect(permissions(guest, issue)).to be_allowed(:read_issue) + expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue) - expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue) + expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue) end it 'allows reporters to read, update, and admin issues' do - expect(permissions(reporter, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows reporters from group links to read, update, and admin issues' do - expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows issue authors to read and update their issues' do - expect(permissions(author, issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(author, issue)).to be_disallowed(:admin_issue) - expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue) + expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue) end it 'allows issue assignees to read and update their issues' do - expect(permissions(assignee, issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(assignee, issue)).to be_disallowed(:admin_issue) - expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue) + expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue) end @@ -73,37 +73,37 @@ describe IssuePolicy do let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) } it 'does not allow non-members to read confidential issues' do - expect(permissions(non_member, confidential_issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(non_member, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(non_member, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(non_member, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'does not allow guests to read confidential issues' do - expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows reporters to read, update, and admin confidential issues' do - expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows reporters from group links to read, update, and admin confidential issues' do - expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows issue authors to read and update their confidential issues' do - expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(author, confidential_issue)).to be_disallowed(:admin_issue) - expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows issue assignees to read and update their confidential issues' do - expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(assignee, confidential_issue)).to be_disallowed(:admin_issue) - expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end end end @@ -123,36 +123,36 @@ describe IssuePolicy do end it 'allows guests to read issues' do - expect(permissions(guest, issue)).to be_allowed(:read_issue) + expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue) - expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue) + expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue) end it 'allows reporters to read, update, and admin issues' do - expect(permissions(reporter, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows reporters from group links to read, update, and admin issues' do - expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows issue authors to read and update their issues' do - expect(permissions(author, issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(author, issue)).to be_disallowed(:admin_issue) - expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue) + expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue) end it 'allows issue assignees to read and update their issues' do - expect(permissions(assignee, issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(assignee, issue)).to be_disallowed(:admin_issue) - expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue) + expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid) expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue) end @@ -161,32 +161,32 @@ describe IssuePolicy do let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) } it 'does not allow guests to read confidential issues' do - expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows reporters to read, update, and admin confidential issues' do - expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows reporter from group links to read, update, and admin confidential issues' do - expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue) - expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) + expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows issue authors to read and update their confidential issues' do - expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(author, confidential_issue)).to be_disallowed(:admin_issue) - expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end it 'allows issue assignees to read and update their confidential issues' do - expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :update_issue) + expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue) expect(permissions(assignee, confidential_issue)).to be_disallowed(:admin_issue) - expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue) + expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue) end end end diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb index b70c8646a3d..50bb0899eba 100644 --- a/spec/policies/personal_snippet_policy_spec.rb +++ b/spec/policies/personal_snippet_policy_spec.rb @@ -1,5 +1,6 @@ require 'spec_helper' +# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb describe PersonalSnippetPolicy do let(:regular_user) { create(:user) } let(:external_user) { create(:user, :external) } diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb index cdba1b09fc1..4d32e06b553 100644 --- a/spec/policies/project_snippet_policy_spec.rb +++ b/spec/policies/project_snippet_policy_spec.rb @@ -1,5 +1,6 @@ require 'spec_helper' +# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb describe ProjectSnippetPolicy do let(:regular_user) { create(:user) } let(:external_user) { create(:user, :external) } diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb new file mode 100644 index 00000000000..f8c93d91ec5 --- /dev/null +++ b/spec/presenters/project_presenter_spec.rb @@ -0,0 +1,397 @@ +require 'spec_helper' + +describe ProjectPresenter do + let(:user) { create(:user) } + + describe '#license_short_name' do + let(:project) { create(:project) } + let(:presenter) { described_class.new(project, current_user: user) } + + context 'when project.repository has a license_key' do + it 'returns the nickname of the license if present' do + allow(project.repository).to receive(:license_key).and_return('agpl-3.0') + + expect(presenter.license_short_name).to eq('GNU AGPLv3') + end + + it 'returns the name of the license if nickname is not present' do + allow(project.repository).to receive(:license_key).and_return('mit') + + expect(presenter.license_short_name).to eq('MIT License') + end + end + + context 'when project.repository has no license_key but a license_blob' do + it 'returns LICENSE' do + allow(project.repository).to receive(:license_key).and_return(nil) + + expect(presenter.license_short_name).to eq('LICENSE') + end + end + end + + describe '#default_view' do + let(:presenter) { described_class.new(project, current_user: user) } + + context 'user not signed in' do + let(:user) { nil } + + context 'when repository is empty' do + let(:project) { create(:project_empty_repo, :public) } + + it 'returns activity if user has repository access' do + allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(true) + + expect(presenter.default_view).to eq('activity') + end + + it 'returns activity if user does not have repository access' do + allow(project).to receive(:can?).with(nil, :download_code, project).and_return(false) + + expect(presenter.default_view).to eq('activity') + end + end + + context 'when repository is not empty' do + let(:project) { create(:project, :public, :repository) } + + it 'returns files and readme if user has repository access' do + allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(true) + + expect(presenter.default_view).to eq('files') + end + + it 'returns activity if user does not have repository access' do + allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(false) + + expect(presenter.default_view).to eq('activity') + end + end + end + + context 'user signed in' do + let(:user) { create(:user, :readme) } + let(:project) { create(:project, :public, :repository) } + + context 'when the user is allowed to see the code' do + it 'returns the project view' do + allow(presenter).to receive(:can?).with(user, :download_code, project).and_return(true) + + expect(presenter.default_view).to eq('readme') + end + end + + context 'with wikis enabled and the right policy for the user' do + before do + project.project_feature.update_attribute(:issues_access_level, 0) + allow(presenter).to receive(:can?).with(user, :download_code, project).and_return(false) + end + + it 'returns wiki if the user has the right policy' do + allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(true) + + expect(presenter.default_view).to eq('wiki') + end + + it 'returns customize_workflow if the user does not have the right policy' do + allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(false) + + expect(presenter.default_view).to eq('customize_workflow') + end + end + + context 'with issues as a feature available' do + it 'return issues' do + allow(presenter).to receive(:can?).with(user, :download_code, project).and_return(false) + allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(false) + + expect(presenter.default_view).to eq('projects/issues/issues') + end + end + + context 'with no activity, no wikies and no issues' do + it 'returns customize_workflow as default' do + project.project_feature.update_attribute(:issues_access_level, 0) + allow(presenter).to receive(:can?).with(user, :download_code, project).and_return(false) + allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(false) + + expect(presenter.default_view).to eq('customize_workflow') + end + end + end + end + + describe '#can_current_user_push_code?' do + let(:project) { create(:project, :repository) } + let(:presenter) { described_class.new(project, current_user: user) } + + context 'empty repo' do + let(:project) { create(:project) } + + it 'returns true if user can push_code' do + project.add_developer(user) + + expect(presenter.can_current_user_push_code?).to be(true) + end + + it 'returns false if user cannot push_code' do + project.add_reporter(user) + + expect(presenter.can_current_user_push_code?).to be(false) + end + end + + context 'not empty repo' do + let(:project) { create(:project, :repository) } + + it 'returns true if user can push to default branch' do + project.add_developer(user) + + expect(presenter.can_current_user_push_code?).to be(true) + end + + it 'returns false if default branch is protected' do + project.add_developer(user) + create(:protected_branch, project: project, name: project.default_branch) + + expect(presenter.can_current_user_push_code?).to be(false) + end + end + end + + context 'statistics anchors' do + let(:project) { create(:project, :repository) } + let(:presenter) { described_class.new(project, current_user: user) } + + describe '#files_anchor_data' do + it 'returns files data' do + expect(presenter.files_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Files (0 Bytes)', + link: presenter.project_tree_path(project))) + end + end + + describe '#commits_anchor_data' do + it 'returns commits data' do + expect(presenter.commits_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Commits (0)', + link: presenter.project_commits_path(project, project.repository.root_ref))) + end + end + + describe '#branches_anchor_data' do + it 'returns branches data' do + expect(presenter.branches_anchor_data).to eq(OpenStruct.new(enabled: true, + label: "Branches (#{project.repository.branches.size})", + link: presenter.project_branches_path(project))) + end + end + + describe '#tags_anchor_data' do + it 'returns tags data' do + expect(presenter.tags_anchor_data).to eq(OpenStruct.new(enabled: true, + label: "Tags (#{project.repository.tags.size})", + link: presenter.project_tags_path(project))) + end + end + + describe '#new_file_anchor_data' do + it 'returns new file data if user can push' do + project.add_developer(user) + + expect(presenter.new_file_anchor_data).to eq(OpenStruct.new(enabled: false, + label: "New file", + link: presenter.project_new_blob_path(project, 'master'), + class_modifier: 'new')) + end + + it 'returns nil if user cannot push' do + expect(presenter.new_file_anchor_data).to be_nil + end + end + + describe '#readme_anchor_data' do + context 'when user can push and README does not exists' do + it 'returns anchor data' do + project.add_developer(user) + allow(project.repository).to receive(:readme).and_return(nil) + + expect(presenter.readme_anchor_data).to eq(OpenStruct.new(enabled: false, + label: 'Add Readme', + link: presenter.add_readme_path)) + end + end + + context 'when README exists' do + it 'returns anchor data' do + allow(project.repository).to receive(:readme).and_return(double(name: 'readme')) + + expect(presenter.readme_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Readme', + link: presenter.readme_path)) + end + end + end + + describe '#changelog_anchor_data' do + context 'when user can push and CHANGELOG does not exists' do + it 'returns anchor data' do + project.add_developer(user) + allow(project.repository).to receive(:changelog).and_return(nil) + + expect(presenter.changelog_anchor_data).to eq(OpenStruct.new(enabled: false, + label: 'Add Changelog', + link: presenter.add_changelog_path)) + end + end + + context 'when CHANGELOG exists' do + it 'returns anchor data' do + allow(project.repository).to receive(:changelog).and_return(double(name: 'foo')) + + expect(presenter.changelog_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Changelog', + link: presenter.changelog_path)) + end + end + end + + describe '#license_anchor_data' do + context 'when user can push and LICENSE does not exists' do + it 'returns anchor data' do + project.add_developer(user) + allow(project.repository).to receive(:license_blob).and_return(nil) + + expect(presenter.license_anchor_data).to eq(OpenStruct.new(enabled: false, + label: 'Add License', + link: presenter.add_license_path)) + end + end + + context 'when LICENSE exists' do + it 'returns anchor data' do + allow(project.repository).to receive(:license_blob).and_return(double(name: 'foo')) + + expect(presenter.license_anchor_data).to eq(OpenStruct.new(enabled: true, + label: presenter.license_short_name, + link: presenter.license_path)) + end + end + end + + describe '#contribution_guide_anchor_data' do + context 'when user can push and CONTRIBUTING does not exists' do + it 'returns anchor data' do + project.add_developer(user) + allow(project.repository).to receive(:contribution_guide).and_return(nil) + + expect(presenter.contribution_guide_anchor_data).to eq(OpenStruct.new(enabled: false, + label: 'Add Contribution guide', + link: presenter.add_contribution_guide_path)) + end + end + + context 'when CONTRIBUTING exists' do + it 'returns anchor data' do + allow(project.repository).to receive(:contribution_guide).and_return(double(name: 'foo')) + + expect(presenter.contribution_guide_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Contribution guide', + link: presenter.contribution_guide_path)) + end + end + end + + describe '#autodevops_anchor_data' do + context 'when Auto Devops is enabled' do + it 'returns anchor data' do + allow(project).to receive(:auto_devops_enabled?).and_return(true) + + expect(presenter.autodevops_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Auto DevOps enabled', + link: nil)) + end + end + + context 'when user can admin pipeline and CI yml does not exists' do + it 'returns anchor data' do + project.add_master(user) + allow(project).to receive(:auto_devops_enabled?).and_return(false) + allow(project.repository).to receive(:gitlab_ci_yml).and_return(nil) + + expect(presenter.autodevops_anchor_data).to eq(OpenStruct.new(enabled: false, + label: 'Enable Auto DevOps', + link: presenter.project_settings_ci_cd_path(project, anchor: 'js-general-pipeline-settings'))) + end + end + end + + describe '#kubernetes_cluster_anchor_data' do + context 'when user can create Kubernetes cluster' do + it 'returns link to cluster if only one exists' do + project.add_master(user) + cluster = create(:cluster, projects: [project]) + + expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Kubernetes configured', + link: presenter.project_cluster_path(project, cluster))) + end + + it 'returns link to clusters page if more than one exists' do + project.add_master(user) + create(:cluster, projects: [project]) + create(:cluster, projects: [project]) + + expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: true, + label: 'Kubernetes configured', + link: presenter.project_clusters_path(project))) + end + + it 'returns link to create a cluster if no cluster exists' do + project.add_master(user) + + expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: false, + label: 'Add Kubernetes cluster', + link: presenter.new_project_cluster_path(project))) + end + end + + context 'when user cannot create Kubernetes cluster' do + it 'returns nil' do + expect(presenter.kubernetes_cluster_anchor_data).to be_nil + end + end + end + + describe '#koding_anchor_data' do + it 'returns link to setup Koding if user can push and no koding YML exists' do + project.add_developer(user) + allow(project.repository).to receive(:koding_yml).and_return(nil) + allow(Gitlab::CurrentSettings).to receive(:koding_enabled?).and_return(true) + + expect(presenter.koding_anchor_data).to eq(OpenStruct.new(enabled: false, + label: 'Set up Koding', + link: presenter.add_koding_stack_path)) + end + + it 'returns nil if user cannot push' do + expect(presenter.koding_anchor_data).to be_nil + end + + it 'returns nil if koding is not enabled' do + project.add_developer(user) + allow(Gitlab::CurrentSettings).to receive(:koding_enabled?).and_return(false) + + expect(presenter.koding_anchor_data).to be_nil + end + + it 'returns nil if koding YML already exists' do + project.add_developer(user) + allow(project.repository).to receive(:koding_yml).and_return(double) + allow(Gitlab::CurrentSettings).to receive(:koding_enabled?).and_return(true) + + expect(presenter.koding_anchor_data).to be_nil + end + end + end +end diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index ff5f207487b..852f67db958 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -149,6 +149,18 @@ describe API::Commits do end end + context 'all optional parameter' do + it 'returns all project commits' do + commit_count = project.repository.count_commits(all: true) + + get api("/projects/#{project_id}/repository/commits?all=true", user) + + expect(response).to include_pagination_headers + expect(response.headers['X-Total']).to eq(commit_count.to_s) + expect(response.headers['X-Page']).to eql('1') + end + end + context 'with pagination params' do let(:page) { 1 } let(:per_page) { 5 } @@ -465,6 +477,72 @@ describe API::Commits do end end + describe 'GET /projects/:id/repository/commits/:sha/refs' do + let(:project) { create(:project, :public, :repository) } + let(:tag) { project.repository.find_tag('v1.1.0') } + let(:commit_id) { tag.dereferenced_target.id } + let(:route) { "/projects/#{project_id}/repository/commits/#{commit_id}/refs" } + + context 'when ref does not exist' do + let(:commit_id) { 'unknown' } + + it_behaves_like '404 response' do + let(:request) { get api(route, current_user) } + let(:message) { '404 Commit Not Found' } + end + end + + context 'when repository is disabled' do + include_context 'disabled repository' + + it_behaves_like '403 response' do + let(:request) { get api(route, current_user) } + end + end + + context 'for a valid commit' do + it 'returns all refs with no scope' do + get api(route, current_user), per_page: 100 + + refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]} + refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]}) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + expect(json_response).to be_an Array + expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs) + end + + it 'returns all refs' do + get api(route, current_user), type: 'all', per_page: 100 + + refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]} + refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]}) + + expect(response).to have_gitlab_http_status(200) + expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs) + end + + it 'returns the branch refs' do + get api(route, current_user), type: 'branch', per_page: 100 + + refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]} + + expect(response).to have_gitlab_http_status(200) + expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs) + end + + it 'returns the tag refs' do + get api(route, current_user), type: 'tag', per_page: 100 + + refs = project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]} + + expect(response).to have_gitlab_http_status(200) + expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs) + end + end + end + describe 'GET /projects/:id/repository/commits/:sha' do let(:commit) { project.repository.commit } let(:commit_id) { commit.id } @@ -632,6 +710,7 @@ describe API::Commits do get api(route, current_user) expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers expect(json_response.size).to be >= 1 expect(json_response.first.keys).to include 'diff' end diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index ea6b0a71849..827f4c04324 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe API::Internal do let(:user) { create(:user) } let(:key) { create(:key, user: user) } - let(:project) { create(:project, :repository) } + let(:project) { create(:project, :repository, :wiki_repo) } let(:secret_token) { Gitlab::Shell.secret_token } let(:gl_repository) { "project-#{project.id}" } let(:reference_counter) { double('ReferenceCounter') } @@ -366,20 +366,9 @@ describe API::Internal do end end - context 'project as /namespace/project' do - it do - push(key, project_with_repo_path('/' + project.full_path)) - - expect(response).to have_gitlab_http_status(200) - expect(json_response["status"]).to be_truthy - expect(json_response["repository_path"]).to eq(project.repository.path_to_repo) - expect(json_response["gl_repository"]).to eq("project-#{project.id}") - end - end - context 'project as namespace/project' do it do - push(key, project_with_repo_path(project.full_path)) + push(key, project) expect(response).to have_gitlab_http_status(200) expect(json_response["status"]).to be_truthy @@ -496,8 +485,10 @@ describe API::Internal do end context 'project does not exist' do - it do - pull(key, project_with_repo_path('gitlab/notexist')) + it 'returns a 200 response with status: false' do + project.destroy + + pull(key, project) expect(response).to have_gitlab_http_status(200) expect(json_response["status"]).to be_falsey @@ -569,6 +560,7 @@ describe API::Internal do end context 'the project path was changed' do + let(:project) { create(:project, :repository, :legacy_storage) } let!(:old_path_to_repo) { project.repository.path_to_repo } let!(:repository) { project.repository } @@ -858,9 +850,14 @@ describe API::Internal do end end - def project_with_repo_path(path) - double().tap do |fake_project| - allow(fake_project).to receive_message_chain('repository.path_to_repo' => path) + def gl_repository_for(project_or_wiki) + case project_or_wiki + when ProjectWiki + project_or_wiki.project.gl_repository(is_wiki: true) + when Project + project_or_wiki.gl_repository(is_wiki: false) + else + nil end end @@ -868,18 +865,8 @@ describe API::Internal do post( api("/internal/allowed"), key_id: key.id, - project: project.repository.path_to_repo, - action: 'git-upload-pack', - secret_token: secret_token, - protocol: protocol - ) - end - - def pull_with_path(key, path_to_repo, protocol = 'ssh') - post( - api("/internal/allowed"), - key_id: key.id, - project: path_to_repo, + project: project.full_path, + gl_repository: gl_repository_for(project), action: 'git-upload-pack', secret_token: secret_token, protocol: protocol @@ -891,20 +878,8 @@ describe API::Internal do api("/internal/allowed"), changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master', key_id: key.id, - project: project.repository.path_to_repo, - action: 'git-receive-pack', - secret_token: secret_token, - protocol: protocol, - env: env - ) - end - - def push_with_path(key, path_to_repo, protocol = 'ssh', env: nil) - post( - api("/internal/allowed"), - changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master', - key_id: key.id, - project: path_to_repo, + project: project.full_path, + gl_repository: gl_repository_for(project), action: 'git-receive-pack', secret_token: secret_token, protocol: protocol, @@ -917,7 +892,8 @@ describe API::Internal do api("/internal/allowed"), ref: 'master', key_id: key.id, - project: project.repository.path_to_repo, + project: project.full_path, + gl_repository: gl_repository_for(project), action: 'git-upload-archive', secret_token: secret_token, protocol: 'ssh' @@ -929,7 +905,7 @@ describe API::Internal do api("/internal/lfs_authenticate"), key_id: key_id, secret_token: secret_token, - project: project.repository.path_to_repo + project: project.full_path ) end end diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb index 13db40d21a5..d1569e5d650 100644 --- a/spec/requests/api/issues_spec.rb +++ b/spec/requests/api/issues_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe API::Issues, :mailer do +describe API::Issues do set(:user) { create(:user) } set(:project) do create(:project, :public, creator_id: user.id, namespace: user.namespace) @@ -932,18 +932,6 @@ describe API::Issues, :mailer do expect(json_response['error']).to eq('confidential is invalid') end - it "sends notifications for subscribers of newly added labels" do - label = project.labels.first - label.toggle_subscription(user2, project) - - perform_enqueued_jobs do - post api("/projects/#{project.id}/issues", user), - title: 'new issue', labels: label.title - end - - should_email(user2) - end - it "returns a 400 bad request if title not given" do post api("/projects/#{project.id}/issues", user), labels: 'label, label2' expect(response).to have_gitlab_http_status(400) @@ -1246,18 +1234,6 @@ describe API::Issues, :mailer do expect(json_response['labels']).to eq([label.title]) end - it "sends notifications for subscribers of newly added labels when issue is updated" do - label = create(:label, title: 'foo', color: '#FFAABB', project: project) - label.toggle_subscription(user2, project) - - perform_enqueued_jobs do - put api("/projects/#{project.id}/issues/#{issue.iid}", user), - title: 'updated title', labels: label.title - end - - should_email(user2) - end - it 'removes all labels' do put api("/projects/#{project.id}/issues/#{issue.iid}", user), labels: '' @@ -1380,7 +1356,7 @@ describe API::Issues, :mailer do end describe '/projects/:id/issues/:issue_iid/move' do - let!(:target_project) { create(:project, path: 'project2', creator_id: user.id, namespace: user.namespace ) } + let!(:target_project) { create(:project, creator_id: user.id, namespace: user.namespace ) } let!(:target_project2) { create(:project, creator_id: non_member.id, namespace: non_member.namespace ) } it 'moves an issue' do @@ -1441,7 +1417,7 @@ describe API::Issues, :mailer do context 'when source project does not exist' do it 'returns 404 when trying to move an issue' do - post api("/projects/12345/issues/#{issue.iid}/move", user), + post api("/projects/0/issues/#{issue.iid}/move", user), to_project_id: target_project.id expect(response).to have_gitlab_http_status(404) @@ -1452,7 +1428,7 @@ describe API::Issues, :mailer do context 'when target project does not exist' do it 'returns 404 when trying to move an issue' do post api("/projects/#{project.id}/issues/#{issue.iid}/move", user), - to_project_id: 12345 + to_project_id: 0 expect(response).to have_gitlab_http_status(404) end diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb index 14dd9da119d..658cedd6b5f 100644 --- a/spec/requests/api/merge_requests_spec.rb +++ b/spec/requests/api/merge_requests_spec.rb @@ -151,6 +151,26 @@ describe API::MergeRequests do expect(json_response.first['id']).to eq(merge_request3.id) end + context 'source_branch param' do + it 'returns merge requests with the given source branch' do + get api('/merge_requests', user), source_branch: merge_request_closed.source_branch, state: 'all' + + expect(json_response.length).to eq(2) + expect(json_response.map { |mr| mr['id'] }) + .to contain_exactly(merge_request_closed.id, merge_request_merged.id) + end + end + + context 'target_branch param' do + it 'returns merge requests with the given target branch' do + get api('/merge_requests', user), target_branch: merge_request_closed.target_branch, state: 'all' + + expect(json_response.length).to eq(2) + expect(json_response.map { |mr| mr['id'] }) + .to contain_exactly(merge_request_closed.id, merge_request_merged.id) + end + end + context 'search params' do before do merge_request.update(title: 'Search title', description: 'Search description') @@ -426,6 +446,26 @@ describe API::MergeRequests do expect(response_dates).to eq(response_dates.sort) end end + + context 'source_branch param' do + it 'returns merge requests with the given source branch' do + get api('/merge_requests', user), source_branch: merge_request_closed.source_branch, state: 'all' + + expect(json_response.length).to eq(2) + expect(json_response.map { |mr| mr['id'] }) + .to contain_exactly(merge_request_closed.id, merge_request_merged.id) + end + end + + context 'target_branch param' do + it 'returns merge requests with the given target branch' do + get api('/merge_requests', user), target_branch: merge_request_closed.target_branch, state: 'all' + + expect(json_response.length).to eq(2) + expect(json_response.map { |mr| mr['id'] }) + .to contain_exactly(merge_request_closed.id, merge_request_merged.id) + end + end end end diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb index 5d01dc37f0e..025165622b7 100644 --- a/spec/requests/api/pages_domains_spec.rb +++ b/spec/requests/api/pages_domains_spec.rb @@ -1,7 +1,7 @@ require 'rails_helper' describe API::PagesDomains do - set(:project) { create(:project) } + set(:project) { create(:project, path: 'my.project') } set(:user) { create(:user) } set(:admin) { create(:admin) } @@ -16,6 +16,7 @@ describe API::PagesDomains do let(:route) { "/projects/#{project.id}/pages/domains" } let(:route_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain.domain}" } + let(:route_domain_path) { "/projects/#{project.path_with_namespace.gsub('/', '%2F')}/pages/domains/#{pages_domain.domain}" } let(:route_secure_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain_secure.domain}" } let(:route_expired_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain_expired.domain}" } let(:route_vacant_domain) { "/projects/#{project.id}/pages/domains/www.vacant-domain.test" } @@ -144,6 +145,16 @@ describe API::PagesDomains do expect(json_response['certificate']).to be_nil end + it 'returns pages domain with project path' do + get api(route_domain_path, user) + + expect(response).to have_gitlab_http_status(200) + expect(response).to match_response_schema('public_api/v4/pages_domain/detail') + expect(json_response['domain']).to eq(pages_domain.domain) + expect(json_response['url']).to eq(pages_domain.url) + expect(json_response['certificate']).to be_nil + end + it 'returns pages domain with a certificate' do get api(route_secure_domain, user) diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb index 1fd082ecc38..392cad667be 100644 --- a/spec/requests/api/project_hooks_spec.rb +++ b/spec/requests/api/project_hooks_spec.rb @@ -28,6 +28,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(json_response.count).to eq(1) expect(json_response.first['url']).to eq("http://example.com") expect(json_response.first['issues_events']).to eq(true) + expect(json_response.first['confidential_issues_events']).to eq(true) expect(json_response.first['push_events']).to eq(true) expect(json_response.first['merge_requests_events']).to eq(true) expect(json_response.first['tag_push_events']).to eq(true) @@ -56,6 +57,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(response).to have_gitlab_http_status(200) expect(json_response['url']).to eq(hook.url) expect(json_response['issues_events']).to eq(hook.issues_events) + expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events) expect(json_response['push_events']).to eq(hook.push_events) expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) @@ -90,13 +92,14 @@ describe API::ProjectHooks, 'ProjectHooks' do it "adds hook to project" do expect do post api("/projects/#{project.id}/hooks", user), - url: "http://example.com", issues_events: true, wiki_page_events: true, + url: "http://example.com", issues_events: true, confidential_issues_events: true, wiki_page_events: true, job_events: true end.to change {project.hooks.count}.by(1) expect(response).to have_gitlab_http_status(201) expect(json_response['url']).to eq('http://example.com') expect(json_response['issues_events']).to eq(true) + expect(json_response['confidential_issues_events']).to eq(true) expect(json_response['push_events']).to eq(true) expect(json_response['merge_requests_events']).to eq(false) expect(json_response['tag_push_events']).to eq(false) @@ -144,6 +147,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(response).to have_gitlab_http_status(200) expect(json_response['url']).to eq('http://example.org') expect(json_response['issues_events']).to eq(hook.issues_events) + expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events) expect(json_response['push_events']).to eq(false) expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb new file mode 100644 index 00000000000..987f6e26971 --- /dev/null +++ b/spec/requests/api/project_import_spec.rb @@ -0,0 +1,102 @@ +require 'spec_helper' + +describe API::ProjectImport do + let(:export_path) { "#{Dir.tmpdir}/project_export_spec" } + let(:user) { create(:user) } + let(:file) { File.join(Rails.root, 'spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') } + let(:namespace) { create(:group) } + before do + allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + + namespace.add_owner(user) + end + + after do + FileUtils.rm_rf(export_path, secure: true) + end + + describe 'POST /projects/import' do + it 'schedules an import using a namespace' do + stub_import(namespace) + + post api('/projects/import', user), path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id + + expect(response).to have_gitlab_http_status(201) + end + + it 'schedules an import using the namespace path' do + stub_import(namespace) + + post api('/projects/import', user), path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path + + expect(response).to have_gitlab_http_status(201) + end + + it 'schedules an import at the user namespace level' do + stub_import(user.namespace) + + post api('/projects/import', user), path: 'test-import2', file: fixture_file_upload(file) + + expect(response).to have_gitlab_http_status(201) + end + + it 'schedules an import at the user namespace level' do + expect_any_instance_of(Project).not_to receive(:import_schedule) + expect(::Projects::CreateService).not_to receive(:new) + + post api('/projects/import', user), namespace: 'nonexistent', path: 'test-import2', file: fixture_file_upload(file) + + expect(response).to have_gitlab_http_status(404) + expect(json_response['message']).to eq('404 Namespace Not Found') + end + + it 'does not schedule an import if the user has no permission to the namespace' do + expect_any_instance_of(Project).not_to receive(:import_schedule) + + post(api('/projects/import', create(:user)), + path: 'test-import3', + file: fixture_file_upload(file), + namespace: namespace.full_path) + + expect(response).to have_gitlab_http_status(404) + expect(json_response['message']).to eq('404 Namespace Not Found') + end + + it 'does not schedule an import if the user uploads no valid file' do + expect_any_instance_of(Project).not_to receive(:import_schedule) + + post api('/projects/import', user), path: 'test-import3', file: './random/test' + + expect(response).to have_gitlab_http_status(400) + expect(json_response['error']).to eq('file is invalid') + end + + def stub_import(namespace) + expect_any_instance_of(Project).to receive(:import_schedule) + expect(::Projects::CreateService).to receive(:new).with(user, hash_including(namespace_id: namespace.id)).and_call_original + end + end + + describe 'GET /projects/:id/import' do + it 'returns the import status' do + project = create(:project, import_status: 'started') + project.add_master(user) + + get api("/projects/#{project.id}/import", user) + + expect(response).to have_gitlab_http_status(200) + expect(json_response).to include('import_status' => 'started') + end + + it 'returns the import status and the error if failed' do + project = create(:project, import_status: 'failed', import_error: 'error') + project.add_master(user) + + get api("/projects/#{project.id}/import", user) + + expect(response).to have_gitlab_http_status(200) + expect(json_response).to include('import_status' => 'failed', + 'import_error' => 'error') + end + end +end diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index f11cd638d96..cee93f6ed14 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -7,7 +7,7 @@ describe API::Projects do let(:user3) { create(:user) } let(:admin) { create(:admin) } let(:project) { create(:project, namespace: user.namespace) } - let(:project2) { create(:project, path: 'project2', namespace: user.namespace) } + let(:project2) { create(:project, namespace: user.namespace) } let(:snippet) { create(:project_snippet, :public, author: user, project: project, title: 'example') } let(:project_member) { create(:project_member, :developer, user: user3, project: project) } let(:user4) { create(:user) } @@ -315,7 +315,7 @@ describe API::Projects do context 'and with all query parameters' do let!(:project5) { create(:project, :public, path: 'gitlab5', namespace: create(:namespace)) } - let!(:project6) { create(:project, :public, path: 'project6', namespace: user.namespace) } + let!(:project6) { create(:project, :public, namespace: user.namespace) } let!(:project7) { create(:project, :public, path: 'gitlab7', namespace: user.namespace) } let!(:project8) { create(:project, path: 'gitlab8', namespace: user.namespace) } let!(:project9) { create(:project, :public, path: 'gitlab9') } @@ -460,7 +460,7 @@ describe API::Projects do expect(response).to have_gitlab_http_status(201) project.each_pair do |k, v| - next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k) + next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled storage_version].include?(k) expect(json_response[k.to_s]).to eq(v) end @@ -622,12 +622,8 @@ describe API::Projects do end describe 'POST /projects/user/:id' do - before do - expect(project).to be_persisted - end - it 'creates new project without path but with name and return 201' do - expect { post api("/projects/user/#{user.id}", admin), name: 'Foo Project' }.to change {Project.count}.by(1) + expect { post api("/projects/user/#{user.id}", admin), name: 'Foo Project' }.to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(201) project = Project.last @@ -666,8 +662,9 @@ describe API::Projects do post api("/projects/user/#{user.id}", admin), project expect(response).to have_gitlab_http_status(201) + project.each_pair do |k, v| - next if %i[has_external_issue_tracker path].include?(k) + next if %i[has_external_issue_tracker path storage_version].include?(k) expect(json_response[k.to_s]).to eq(v) end diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index f10b6e43d09..72cafac3f90 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -122,6 +122,15 @@ describe API::Runner do end end end + + it "sets the runner's ip_address" do + post api('/runners'), + { token: registration_token }, + { 'REMOTE_ADDR' => '123.111.123.111' } + + expect(response).to have_gitlab_http_status 201 + expect(Ci::Runner.first.ip_address).to eq('123.111.123.111') + end end describe 'DELETE /api/v4/runners' do @@ -422,6 +431,15 @@ describe API::Runner do end end + it "sets the runner's ip_address" do + post api('/jobs/request'), + { token: runner.token }, + { 'User-Agent' => user_agent, 'REMOTE_ADDR' => '123.222.123.222' } + + expect(response).to have_gitlab_http_status 201 + expect(runner.reload.ip_address).to eq('123.222.123.222') + end + context 'when concurrently updating a job' do before do expect_any_instance_of(Ci::Build).to receive(:run!) @@ -682,7 +700,7 @@ describe API::Runner do context 'when tace is given' do it 'creates a trace artifact' do - allow_any_instance_of(BuildFinishedWorker).to receive(:perform).with(job.id) do + allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do CreateTraceArtifactWorker.new.perform(job.id) end diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb index a0026c6e11c..9052a18c60b 100644 --- a/spec/requests/api/search_spec.rb +++ b/spec/requests/api/search_spec.rb @@ -180,6 +180,18 @@ describe API::Search do it_behaves_like 'response is correct', schema: 'public_api/v4/milestones' end + + context 'for milestones scope with group path as id' do + before do + another_project = create(:project, :public) + create(:milestone, project: project, title: 'awesome milestone') + create(:milestone, project: another_project, title: 'awesome milestone other project') + + get api("/groups/#{CGI.escape(group.full_path)}/-/search", user), scope: 'milestones', search: 'awesome' + end + + it_behaves_like 'response is correct', schema: 'public_api/v4/milestones' + end end end @@ -283,7 +295,15 @@ describe API::Search do get api("/projects/#{repo_project.id}/-/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6' end - it_behaves_like 'response is correct', schema: 'public_api/v4/commits' + it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details' + end + + context 'for commits scope with project path as id' do + before do + get api("/projects/#{CGI.escape(repo_project.full_path)}/-/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6' + end + + it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details' end context 'for blobs scope' do diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb index 74198c8eb4f..b3e253befc6 100644 --- a/spec/requests/api/snippets_spec.rb +++ b/spec/requests/api/snippets_spec.rb @@ -32,6 +32,27 @@ describe API::Snippets do expect(json_response).to be_an Array expect(json_response.size).to eq(0) end + + it 'returns 404 for non-authenticated' do + create(:personal_snippet, :internal) + + get api("/snippets/") + + expect(response).to have_gitlab_http_status(401) + end + + it 'does not return snippets related to a project with disable feature visibility' do + project = create(:project) + create(:project_member, project: project, user: user) + public_snippet = create(:personal_snippet, :public, author: user, project: project) + project.project_feature.update_attribute(:snippets_access_level, 0) + + get api("/snippets/", user) + + json_response.each do |snippet| + expect(snippet["id"]).not_to eq(public_snippet.id) + end + end end describe 'GET /snippets/public' do diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb index fb3a33cadff..2ee8d150dc8 100644 --- a/spec/requests/api/todos_spec.rb +++ b/spec/requests/api/todos_spec.rb @@ -129,6 +129,12 @@ describe API::Todos do post api("/todos/#{pending_1.id}/mark_as_done", john_doe) end + + it 'returns 404 if the todo does not belong to the current user' do + post api("/todos/#{pending_1.id}/mark_as_done", author_1) + + expect(response.status).to eq(404) + end end end diff --git a/spec/requests/api/v3/issues_spec.rb b/spec/requests/api/v3/issues_spec.rb index 0dd6d673625..11b5469be7b 100644 --- a/spec/requests/api/v3/issues_spec.rb +++ b/spec/requests/api/v3/issues_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe API::V3::Issues, :mailer do +describe API::V3::Issues do set(:user) { create(:user) } set(:user2) { create(:user) } set(:non_member) { create(:user) } @@ -780,18 +780,6 @@ describe API::V3::Issues, :mailer do expect(json_response['error']).to eq('confidential is invalid') end - it "sends notifications for subscribers of newly added labels" do - label = project.labels.first - label.toggle_subscription(user2, project) - - perform_enqueued_jobs do - post v3_api("/projects/#{project.id}/issues", user), - title: 'new issue', labels: label.title - end - - should_email(user2) - end - it "returns a 400 bad request if title not given" do post v3_api("/projects/#{project.id}/issues", user), labels: 'label, label2' @@ -1045,18 +1033,6 @@ describe API::V3::Issues, :mailer do expect(json_response['labels']).to eq([label.title]) end - it "sends notifications for subscribers of newly added labels when issue is updated" do - label = create(:label, title: 'foo', color: '#FFAABB', project: project) - label.toggle_subscription(user2, project) - - perform_enqueued_jobs do - put v3_api("/projects/#{project.id}/issues/#{issue.id}", user), - title: 'updated title', labels: label.title - end - - should_email(user2) - end - it 'removes all labels' do put v3_api("/projects/#{project.id}/issues/#{issue.id}", user), labels: '' @@ -1191,7 +1167,7 @@ describe API::V3::Issues, :mailer do end describe '/projects/:id/issues/:issue_id/move' do - let!(:target_project) { create(:project, path: 'project2', creator_id: user.id, namespace: user.namespace ) } + let!(:target_project) { create(:project, creator_id: user.id, namespace: user.namespace ) } let!(:target_project2) { create(:project, creator_id: non_member.id, namespace: non_member.namespace ) } it 'moves an issue' do @@ -1242,7 +1218,7 @@ describe API::V3::Issues, :mailer do context 'when source project does not exist' do it 'returns 404 when trying to move an issue' do - post v3_api("/projects/123/issues/#{issue.id}/move", user), + post v3_api("/projects/0/issues/#{issue.id}/move", user), to_project_id: target_project.id expect(response).to have_gitlab_http_status(404) @@ -1253,7 +1229,7 @@ describe API::V3::Issues, :mailer do context 'when target project does not exist' do it 'returns 404 when trying to move an issue' do post v3_api("/projects/#{project.id}/issues/#{issue.id}/move", user), - to_project_id: 123 + to_project_id: 0 expect(response).to have_gitlab_http_status(404) end diff --git a/spec/requests/api/v3/project_hooks_spec.rb b/spec/requests/api/v3/project_hooks_spec.rb index 248ae97f875..8f6a2330d25 100644 --- a/spec/requests/api/v3/project_hooks_spec.rb +++ b/spec/requests/api/v3/project_hooks_spec.rb @@ -27,6 +27,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(json_response.count).to eq(1) expect(json_response.first['url']).to eq("http://example.com") expect(json_response.first['issues_events']).to eq(true) + expect(json_response.first['confidential_issues_events']).to eq(true) expect(json_response.first['push_events']).to eq(true) expect(json_response.first['merge_requests_events']).to eq(true) expect(json_response.first['tag_push_events']).to eq(true) @@ -54,6 +55,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(response).to have_gitlab_http_status(200) expect(json_response['url']).to eq(hook.url) expect(json_response['issues_events']).to eq(hook.issues_events) + expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events) expect(json_response['push_events']).to eq(hook.push_events) expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) @@ -87,12 +89,13 @@ describe API::ProjectHooks, 'ProjectHooks' do it "adds hook to project" do expect do post v3_api("/projects/#{project.id}/hooks", user), - url: "http://example.com", issues_events: true, wiki_page_events: true, build_events: true + url: "http://example.com", issues_events: true, confidential_issues_events: true, wiki_page_events: true, build_events: true end.to change {project.hooks.count}.by(1) expect(response).to have_gitlab_http_status(201) expect(json_response['url']).to eq('http://example.com') expect(json_response['issues_events']).to eq(true) + expect(json_response['confidential_issues_events']).to eq(true) expect(json_response['push_events']).to eq(true) expect(json_response['merge_requests_events']).to eq(false) expect(json_response['tag_push_events']).to eq(false) @@ -139,6 +142,7 @@ describe API::ProjectHooks, 'ProjectHooks' do expect(response).to have_gitlab_http_status(200) expect(json_response['url']).to eq('http://example.org') expect(json_response['issues_events']).to eq(hook.issues_events) + expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events) expect(json_response['push_events']).to eq(false) expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events) expect(json_response['tag_push_events']).to eq(hook.tag_push_events) diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb index 5d99d9495f3..4c25bd935c6 100644 --- a/spec/requests/api/v3/projects_spec.rb +++ b/spec/requests/api/v3/projects_spec.rb @@ -6,7 +6,7 @@ describe API::V3::Projects do let(:user3) { create(:user) } let(:admin) { create(:admin) } let(:project) { create(:project, creator_id: user.id, namespace: user.namespace) } - let(:project2) { create(:project, path: 'project2', creator_id: user.id, namespace: user.namespace) } + let(:project2) { create(:project, creator_id: user.id, namespace: user.namespace) } let(:snippet) { create(:project_snippet, :public, author: user, project: project, title: 'example') } let(:project_member) { create(:project_member, :developer, user: user3, project: project) } let(:user4) { create(:user) } @@ -401,7 +401,7 @@ describe API::V3::Projects do post v3_api('/projects', user), project project.each_pair do |k, v| - next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k) + next if %i[storage_version has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k) expect(json_response[k.to_s]).to eq(v) end @@ -545,7 +545,7 @@ describe API::V3::Projects do expect(response).to have_gitlab_http_status(201) project.each_pair do |k, v| - next if %i[has_external_issue_tracker path].include?(k) + next if %i[storage_version has_external_issue_tracker path].include?(k) expect(json_response[k.to_s]).to eq(v) end diff --git a/spec/requests/api/v3/todos_spec.rb b/spec/requests/api/v3/todos_spec.rb index 53fd962272a..ea648e3917f 100644 --- a/spec/requests/api/v3/todos_spec.rb +++ b/spec/requests/api/v3/todos_spec.rb @@ -38,6 +38,12 @@ describe API::V3::Todos do delete v3_api("/todos/#{pending_1.id}", john_doe) end + + it 'returns 404 if the todo does not belong to the current user' do + delete v3_api("/todos/#{pending_1.id}", author_1) + + expect(response.status).to eq(404) + end end end diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb index 2e2dccdafad..0467e0251b3 100644 --- a/spec/requests/git_http_spec.rb +++ b/spec/requests/git_http_spec.rb @@ -150,7 +150,7 @@ describe 'Git HTTP requests' do let(:path) { "/#{wiki.repository.full_path}.git" } context "when the project is public" do - let(:project) { create(:project, :repository, :public, :wiki_enabled) } + let(:project) { create(:project, :wiki_repo, :public, :wiki_enabled) } it_behaves_like 'pushes require Basic HTTP Authentication' @@ -163,7 +163,7 @@ describe 'Git HTTP requests' do download(path) do |response| json_body = ActiveSupport::JSON.decode(response.body) - expect(json_body['RepoPath']).to include(wiki.repository.full_path) + expect(json_body['RepoPath']).to include(wiki.repository.disk_path) end end end @@ -177,7 +177,7 @@ describe 'Git HTTP requests' do end context 'but the repo is disabled' do - let(:project) { create(:project, :repository, :public, :repository_disabled, :wiki_enabled) } + let(:project) { create(:project, :wiki_repo, :public, :repository_disabled, :wiki_enabled) } it_behaves_like 'pulls are allowed' it_behaves_like 'pushes are allowed' @@ -198,7 +198,7 @@ describe 'Git HTTP requests' do end context "when the project is private" do - let(:project) { create(:project, :repository, :private, :wiki_enabled) } + let(:project) { create(:project, :wiki_repo, :private, :wiki_enabled) } it_behaves_like 'pulls require Basic HTTP Authentication' it_behaves_like 'pushes require Basic HTTP Authentication' @@ -210,7 +210,7 @@ describe 'Git HTTP requests' do end context 'but the repo is disabled' do - let(:project) { create(:project, :repository, :private, :repository_disabled, :wiki_enabled) } + let(:project) { create(:project, :wiki_repo, :private, :repository_disabled, :wiki_enabled) } it 'allows clones' do download(path, user: user.username, password: user.password) do |response| @@ -506,8 +506,8 @@ describe 'Git HTTP requests' do context 'when LDAP is configured' do before do - allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true) - allow_any_instance_of(Gitlab::LDAP::Authentication) + allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) + allow_any_instance_of(Gitlab::Auth::LDAP::Authentication) .to receive(:login).and_return(nil) end @@ -597,7 +597,7 @@ describe 'Git HTTP requests' do context "when a gitlab ci token is provided" do let(:project) { create(:project, :repository) } let(:build) { create(:ci_build, :running) } - let(:other_project) { create(:project) } + let(:other_project) { create(:project, :repository) } before do build.update!(project: project) # can't associate it on factory create @@ -648,10 +648,10 @@ describe 'Git HTTP requests' do context 'when the repo does not exist' do let(:project) { create(:project) } - it 'rejects pulls with 403 Forbidden' do + it 'rejects pulls with 404 Not Found' do clone_get path, env - expect(response).to have_gitlab_http_status(:forbidden) + expect(response).to have_gitlab_http_status(:not_found) expect(response.body).to eq(git_access_error(:no_repo)) end end @@ -795,9 +795,9 @@ describe 'Git HTTP requests' do let(:path) { 'doesnt/exist.git' } before do - allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true) - allow(Gitlab::LDAP::Authentication).to receive(:login).and_return(nil) - allow(Gitlab::LDAP::Authentication).to receive(:login).with(user.username, user.password).and_return(user) + allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) + allow(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(nil) + allow(Gitlab::Auth::LDAP::Authentication).to receive(:login).with(user.username, user.password).and_return(user) end it_behaves_like 'pulls require Basic HTTP Authentication' diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index 930ef49b7f3..971b45c411d 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -1208,7 +1208,7 @@ describe 'Git LFS API and storage' do end def post_lfs_json(url, body = nil, headers = nil) - post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => 'application/vnd.git-lfs+json')) + post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE)) end def json_response diff --git a/spec/requests/lfs_locks_api_spec.rb b/spec/requests/lfs_locks_api_spec.rb new file mode 100644 index 00000000000..e44a11a7232 --- /dev/null +++ b/spec/requests/lfs_locks_api_spec.rb @@ -0,0 +1,159 @@ +require 'spec_helper' + +describe 'Git LFS File Locking API' do + include WorkhorseHelpers + + let(:project) { create(:project) } + let(:master) { create(:user) } + let(:developer) { create(:user) } + let(:guest) { create(:user) } + let(:path) { 'README.md' } + let(:headers) do + { + 'Authorization' => authorization + }.compact + end + + shared_examples 'unauthorized request' do + context 'when user is not authorized' do + let(:authorization) { authorize_user(guest) } + + it 'returns a forbidden 403 response' do + post_lfs_json url, body, headers + + expect(response).to have_gitlab_http_status(403) + end + end + end + + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + + project.add_developer(master) + project.add_developer(developer) + project.add_guest(guest) + end + + describe 'Create File Lock endpoint' do + let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" } + let(:authorization) { authorize_user(developer) } + let(:body) { { path: path } } + + include_examples 'unauthorized request' + + context 'with an existent lock' do + before do + lock_file('README.md', developer) + end + + it 'return an error message' do + post_lfs_json url, body, headers + + expect(response).to have_gitlab_http_status(409) + + expect(json_response.keys).to match_array(%w(lock message documentation_url)) + expect(json_response['message']).to match(/already locked/) + end + + it 'returns the existen lock' do + post_lfs_json url, body, headers + + expect(json_response['lock']['path']).to eq('README.md') + end + end + + context 'without an existent lock' do + it 'creates the lock' do + post_lfs_json url, body, headers + + expect(response).to have_gitlab_http_status(201) + + expect(json_response['lock'].keys).to match_array(%w(id path locked_at owner)) + end + end + end + + describe 'Listing File Locks endpoint' do + let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" } + let(:authorization) { authorize_user(developer) } + + include_examples 'unauthorized request' + + it 'returns the list of locked files' do + lock_file('README.md', developer) + lock_file('README', developer) + + do_get url, nil, headers + + expect(response).to have_gitlab_http_status(200) + + expect(json_response['locks'].size).to eq(2) + expect(json_response['locks'].first.keys).to match_array(%w(id path locked_at owner)) + end + end + + describe 'List File Locks for verification endpoint' do + let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/verify" } + let(:authorization) { authorize_user(developer) } + + include_examples 'unauthorized request' + + it 'returns the list of locked files grouped by owner' do + lock_file('README.md', master) + lock_file('README', developer) + + post_lfs_json url, nil, headers + + expect(response).to have_gitlab_http_status(200) + + expect(json_response['ours'].size).to eq(1) + expect(json_response['ours'].first['path']).to eq('README') + expect(json_response['theirs'].size).to eq(1) + expect(json_response['theirs'].first['path']).to eq('README.md') + end + end + + describe 'Delete File Lock endpoint' do + let!(:lock) { lock_file('README.md', developer) } + let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/#{lock[:id]}/unlock" } + let(:authorization) { authorize_user(developer) } + + include_examples 'unauthorized request' + + context 'with an existent lock' do + it 'deletes the lock' do + post_lfs_json url, nil, headers + + expect(response).to have_gitlab_http_status(200) + end + + it 'returns the deleted lock' do + post_lfs_json url, nil, headers + + expect(json_response['lock'].keys).to match_array(%w(id path locked_at owner)) + end + end + end + + def lock_file(path, author) + result = Lfs::LockFileService.new(project, author, { path: path }).execute + + result[:lock] + end + + def authorize_user(user) + ActionController::HttpAuthentication::Basic.encode_credentials(user.username, user.password) + end + + def post_lfs_json(url, body = nil, headers = nil) + post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE)) + end + + def do_get(url, params = nil, headers = nil) + get(url, (params || {}), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE)) + end + + def json_response + @json_response ||= JSON.parse(response.body) + end +end diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb index 1a5ad9b04e4..6bed8e812c0 100644 --- a/spec/requests/openid_connect_spec.rb +++ b/spec/requests/openid_connect_spec.rb @@ -65,13 +65,23 @@ describe 'OpenID Connect requests' do ) end - let(:public_email) { build :email, email: 'public@example.com' } - let(:private_email) { build :email, email: 'private@example.com' } + let!(:public_email) { build :email, email: 'public@example.com' } + let!(:private_email) { build :email, email: 'private@example.com' } - it 'includes all user information' do + let!(:group1) { create :group } + let!(:group2) { create :group } + let!(:group3) { create :group, parent: group2 } + let!(:group4) { create :group, parent: group3 } + + before do + group1.add_user(user, GroupMember::OWNER) + group3.add_user(user, Gitlab::Access::DEVELOPER) + end + + it 'includes all user information and group memberships' do request_user_info - expect(json_response).to eq({ + expect(json_response).to match(a_hash_including({ 'sub' => hashed_subject, 'name' => 'Alice', 'nickname' => 'alice', @@ -79,8 +89,13 @@ describe 'OpenID Connect requests' do 'email_verified' => true, 'website' => 'https://example.com', 'profile' => 'http://localhost/alice', - 'picture' => "http://localhost/uploads/-/system/user/avatar/#{user.id}/dk.png" - }) + 'picture' => "http://localhost/uploads/-/system/user/avatar/#{user.id}/dk.png", + 'groups' => anything + })) + + expected_groups = [group1.full_path, group3.full_path] + expected_groups << group4.full_path if Group.supports_nested_groups? + expect(json_response['groups']).to match_array(expected_groups) end end diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb index 98f70e2101b..eef860821e5 100644 --- a/spec/requests/projects/cycle_analytics_events_spec.rb +++ b/spec/requests/projects/cycle_analytics_events_spec.rb @@ -15,7 +15,7 @@ describe 'cycle analytics events' do end end - deploy_master + deploy_master(user, project) login_as(user) end @@ -119,7 +119,7 @@ describe 'cycle analytics events' do def create_cycle milestone = create(:milestone, project: project) issue.update(milestone: milestone) - mr = create_merge_request_closing_issue(issue, commit_message: "References #{issue.to_reference}") + mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) pipeline.run @@ -127,7 +127,7 @@ describe 'cycle analytics events' do create(:ci_build, pipeline: pipeline, status: :success, author: user) create(:ci_build, pipeline: pipeline, status: :success, author: user) - merge_merge_requests_closing_issue(issue) + merge_merge_requests_closing_issue(user, project, issue) ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash) end diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb index 0fec14d0cce..b18e922b063 100644 --- a/spec/requests/rack_attack_global_spec.rb +++ b/spec/requests/rack_attack_global_spec.rb @@ -22,6 +22,7 @@ describe 'Rack Attack global throttles' do let(:url_that_does_not_require_authentication) { '/users/sign_in' } let(:url_that_requires_authentication) { '/dashboard/snippets' } + let(:url_api_internal) { '/api/v4/internal/check' } let(:api_partial_url) { '/todos' } around do |example| @@ -172,6 +173,15 @@ describe 'Rack Attack global throttles' do get url_that_does_not_require_authentication expect(response).to have_http_status 200 end + + context 'when the request is to the api internal endpoints' do + it 'allows requests over the rate limit' do + (1 + requests_per_period).times do + get url_api_internal, secret_token: Gitlab::Shell.secret_token + expect(response).to have_http_status 200 + end + end + end end context 'when the throttle is disabled' do diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb index 91aefa84d0e..56d025f0176 100644 --- a/spec/routing/routing_spec.rb +++ b/spec/routing/routing_spec.rb @@ -37,6 +37,22 @@ describe UsersController, "routing" do it "to #calendar_activities" do expect(get("/users/User/calendar_activities")).to route_to('users#calendar_activities', username: 'User') end + + describe 'redirect alias routes' do + include RSpec::Rails::RequestExampleGroup + + it '/u/user1 redirects to /user1' do + expect(get("/u/user1")).to redirect_to('/user1') + end + + it '/u/user1/groups redirects to /user1/groups' do + expect(get("/u/user1/groups")).to redirect_to('/users/user1/groups') + end + + it '/u/user1/projects redirects to /user1/projects' do + expect(get("/u/user1/projects")).to redirect_to('/users/user1/projects') + end + end end # search GET /search(.:format) search#show diff --git a/spec/serializers/cluster_application_entity_spec.rb b/spec/serializers/cluster_application_entity_spec.rb index b5a55b4ef6e..852b6af9f7f 100644 --- a/spec/serializers/cluster_application_entity_spec.rb +++ b/spec/serializers/cluster_application_entity_spec.rb @@ -26,5 +26,19 @@ describe ClusterApplicationEntity do expect(subject[:status_reason]).to eq(application.status_reason) end end + + context 'for ingress application' do + let(:application) do + build( + :clusters_applications_ingress, + :installed, + external_ip: '111.222.111.222' + ) + end + + it 'includes external_ip' do + expect(subject[:external_ip]).to eq('111.222.111.222') + end + end end end diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb new file mode 100644 index 00000000000..45d7c703df3 --- /dev/null +++ b/spec/serializers/diff_file_entity_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe DiffFileEntity do + include RepoHelpers + + let(:project) { create(:project, :repository) } + let(:repository) { project.repository } + let(:commit) { project.commit(sample_commit.id) } + let(:diff_refs) { commit.diff_refs } + let(:diff) { commit.raw_diffs.first } + let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs: diff_refs, repository: repository) } + let(:entity) { described_class.new(diff_file) } + + subject { entity.as_json } + + it 'exposes correct attributes' do + expect(subject).to include( + :submodule, :submodule_link, :file_path, + :deleted_file, :old_path, :new_path, :mode_changed, + :a_mode, :b_mode, :text, :old_path_html, + :new_path_html + ) + end +end diff --git a/spec/serializers/discussion_entity_spec.rb b/spec/serializers/discussion_entity_spec.rb new file mode 100644 index 00000000000..7ee8e38af1c --- /dev/null +++ b/spec/serializers/discussion_entity_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe DiscussionEntity do + include RepoHelpers + + let(:user) { create(:user) } + let(:note) { create(:discussion_note_on_merge_request) } + let(:discussion) { note.discussion } + let(:request) { double('request') } + let(:controller) { double('controller') } + let(:entity) { described_class.new(discussion, request: request, context: controller) } + + subject { entity.as_json } + + before do + allow(controller).to receive(:render_to_string) + allow(request).to receive(:current_user).and_return(user) + allow(request).to receive(:noteable).and_return(note.noteable) + end + + it 'exposes correct attributes' do + expect(subject).to include( + :id, :expanded, :notes, :individual_note, + :resolvable, :resolved, :resolve_path, + :resolve_with_issue_path, :diff_discussion + ) + end + + context 'when diff file is present' do + let(:note) { create(:diff_note_on_merge_request) } + + it 'exposes diff file attributes' do + expect(subject).to include(:diff_file, :truncated_diff_lines, :image_diff_html) + end + end +end diff --git a/spec/serializers/lfs_file_lock_entity_spec.rb b/spec/serializers/lfs_file_lock_entity_spec.rb new file mode 100644 index 00000000000..5919f473a90 --- /dev/null +++ b/spec/serializers/lfs_file_lock_entity_spec.rb @@ -0,0 +1,19 @@ +require 'spec_helper' + +describe LfsFileLockEntity do + let(:user) { create(:user) } + let(:resource) { create(:lfs_file_lock, user: user) } + + let(:request) { double('request', current_user: user) } + + subject { described_class.new(resource, request: request).as_json } + + it 'exposes basic attrs of the lock' do + expect(subject).to include(:id, :path, :locked_at) + end + + it 'exposes the owner info' do + expect(subject).to include(:owner) + expect(subject[:owner][:name]).to eq(user.name) + end +end diff --git a/spec/serializers/note_entity_spec.rb b/spec/serializers/note_entity_spec.rb index 3459cc72063..51a8587ace9 100644 --- a/spec/serializers/note_entity_spec.rb +++ b/spec/serializers/note_entity_spec.rb @@ -48,4 +48,15 @@ describe NoteEntity do expect(subject).to include(:system_note_icon_name) end end + + context 'when note is part of resolvable discussion' do + before do + allow(note).to receive(:part_of_discussion?).and_return(true) + allow(note).to receive(:resolvable?).and_return(true) + end + + it 'exposes paths to resolve note' do + expect(subject).to include(:resolve_path, :resolve_with_issue_path) + end + end end diff --git a/spec/services/chat_names/find_user_service_spec.rb b/spec/services/chat_names/find_user_service_spec.rb index 79aaac3aeb6..5734b10109a 100644 --- a/spec/services/chat_names/find_user_service_spec.rb +++ b/spec/services/chat_names/find_user_service_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe ChatNames::FindUserService do +describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state do describe '#execute' do let(:service) { create(:service) } @@ -13,21 +13,30 @@ describe ChatNames::FindUserService do context 'when existing user is requested' do let(:params) { { team_id: chat_name.team_id, user_id: chat_name.chat_id } } - it 'returns the existing user' do - is_expected.to eq(user) + it 'returns the existing chat_name' do + is_expected.to eq(chat_name) end - it 'updates when last time chat name was used' do + it 'updates the last used timestamp if one is not already set' do expect(chat_name.last_used_at).to be_nil subject - initial_last_used = chat_name.reload.last_used_at - expect(initial_last_used).to be_present + expect(chat_name.reload.last_used_at).to be_present + end + + it 'only updates an existing timestamp once within a certain time frame' do + service = described_class.new(service, params) + + expect(chat_name.last_used_at).to be_nil + + service.execute + + time = chat_name.reload.last_used_at - Timecop.travel(2.days.from_now) { described_class.new(service, params).execute } + service.execute - expect(chat_name.reload.last_used_at).to be > initial_last_used + expect(chat_name.reload.last_used_at).to eq(time) end end diff --git a/spec/services/ci/create_trace_artifact_service_spec.rb b/spec/services/ci/create_trace_artifact_service_spec.rb index 847a88920fe..8c5e8e438c7 100644 --- a/spec/services/ci/create_trace_artifact_service_spec.rb +++ b/spec/services/ci/create_trace_artifact_service_spec.rb @@ -4,40 +4,60 @@ describe Ci::CreateTraceArtifactService do describe '#execute' do subject { described_class.new(nil, nil).execute(job) } - let(:job) { create(:ci_build) } - context 'when the job does not have trace artifact' do context 'when the job has a trace file' do - before do - allow_any_instance_of(Gitlab::Ci::Trace) - .to receive(:default_path) { expand_fixture_path('trace/sample_trace') } + let!(:job) { create(:ci_build, :trace_live) } + let!(:legacy_path) { job.trace.read { |stream| return stream.path } } + let!(:legacy_checksum) { Digest::SHA256.file(legacy_path).hexdigest } + let(:new_path) { job.job_artifacts_trace.file.path } + let(:new_checksum) { Digest::SHA256.file(new_path).hexdigest } - allow_any_instance_of(JobArtifactUploader).to receive(:move_to_cache) { false } - allow_any_instance_of(JobArtifactUploader).to receive(:move_to_store) { false } - end + it { expect(File.exist?(legacy_path)).to be_truthy } it 'creates trace artifact' do expect { subject }.to change { Ci::JobArtifact.count }.by(1) - expect(job.job_artifacts_trace.read_attribute(:file)).to eq('sample_trace') + expect(File.exist?(legacy_path)).to be_falsy + expect(File.exist?(new_path)).to be_truthy + expect(new_checksum).to eq(legacy_checksum) + expect(job.job_artifacts_trace.file.exists?).to be_truthy + expect(job.job_artifacts_trace.file.filename).to eq('job.log') end - context 'when the job has already had trace artifact' do + context 'when failed to create trace artifact record' do before do - create(:ci_job_artifact, :trace, job: job) + # When ActiveRecord error happens + allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) + allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) + .and_return("Error") + + subject rescue nil + + job.reload end - it 'does not create trace artifact' do - expect { subject }.not_to change { Ci::JobArtifact.count } + it 'keeps legacy trace and removes trace artifact' do + expect(File.exist?(legacy_path)).to be_truthy + expect(job.job_artifacts_trace).to be_nil end end end context 'when the job does not have a trace file' do + let!(:job) { create(:ci_build) } + it 'does not create trace artifact' do expect { subject }.not_to change { Ci::JobArtifact.count } end end end + + context 'when the job has already had trace artifact' do + let!(:job) { create(:ci_build, :trace_artifact) } + + it 'does not create trace artifact' do + expect { subject }.not_to change { Ci::JobArtifact.count } + end + end end end diff --git a/spec/services/clusters/applications/check_ingress_ip_address_service_spec.rb b/spec/services/clusters/applications/check_ingress_ip_address_service_spec.rb new file mode 100644 index 00000000000..bf038595a4d --- /dev/null +++ b/spec/services/clusters/applications/check_ingress_ip_address_service_spec.rb @@ -0,0 +1,73 @@ +require 'spec_helper' + +describe Clusters::Applications::CheckIngressIpAddressService do + let(:application) { create(:clusters_applications_ingress, :installed) } + let(:service) { described_class.new(application) } + let(:kubeclient) { double(::Kubeclient::Client, get_service: kube_service) } + let(:ingress) { [{ ip: '111.222.111.222' }] } + let(:exclusive_lease) { instance_double(Gitlab::ExclusiveLease, try_obtain: true) } + + let(:kube_service) do + ::Kubeclient::Resource.new( + { + status: { + loadBalancer: { + ingress: ingress + } + } + } + ) + end + + subject { service.execute } + + before do + allow(application.cluster).to receive(:kubeclient).and_return(kubeclient) + allow(Gitlab::ExclusiveLease) + .to receive(:new) + .with("check_ingress_ip_address_service:#{application.id}", timeout: 15.seconds.to_i) + .and_return(exclusive_lease) + end + + describe '#execute' do + context 'when the ingress ip address is available' do + it 'updates the external_ip for the app' do + subject + + expect(application.external_ip).to eq('111.222.111.222') + end + end + + context 'when the ingress ip address is not available' do + let(:ingress) { nil } + + it 'does not error' do + subject + end + end + + context 'when the exclusive lease cannot be obtained' do + before do + allow(exclusive_lease) + .to receive(:try_obtain) + .and_return(false) + end + + it 'does not call kubeclient' do + subject + + expect(kubeclient).not_to have_received(:get_service) + end + end + + context 'when there is already an external_ip' do + let(:application) { create(:clusters_applications_ingress, :installed, external_ip: '001.111.002.111') } + + it 'does not call kubeclient' do + subject + + expect(kubeclient).not_to have_received(:get_service) + end + end + end +end diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb index ac4b9c02ba7..e8216abb08b 100644 --- a/spec/services/groups/destroy_service_spec.rb +++ b/spec/services/groups/destroy_service_spec.rb @@ -6,7 +6,7 @@ describe Groups::DestroyService do let!(:user) { create(:user) } let!(:group) { create(:group) } let!(:nested_group) { create(:group, parent: group) } - let!(:project) { create(:project, namespace: group) } + let!(:project) { create(:project, :legacy_storage, namespace: group) } let!(:notification_setting) { create(:notification_setting, source: group)} let(:gitlab_shell) { Gitlab::Shell.new } let(:remove_path) { group.path + "+#{group.id}+deleted" } @@ -141,7 +141,7 @@ describe Groups::DestroyService do end context 'legacy storage' do - let!(:project) { create(:project, :empty_repo, namespace: group) } + let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: group) } it 'removes repository' do expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey @@ -149,7 +149,7 @@ describe Groups::DestroyService do end context 'hashed storage' do - let!(:project) { create(:project, :hashed, :empty_repo, namespace: group) } + let!(:project) { create(:project, :empty_repo, namespace: group) } it 'removes repository' do expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey diff --git a/spec/services/issues/fetch_referenced_merge_requests_service_spec.rb b/spec/services/issues/fetch_referenced_merge_requests_service_spec.rb new file mode 100644 index 00000000000..4e58179f45f --- /dev/null +++ b/spec/services/issues/fetch_referenced_merge_requests_service_spec.rb @@ -0,0 +1,35 @@ +require 'spec_helper.rb' + +describe Issues::FetchReferencedMergeRequestsService do + let(:project) { create(:project) } + let(:issue) { create(:issue, project: project) } + let(:other_project) { create(:project) } + + let(:mr) { create(:merge_request, source_project: project, target_project: project, id: 2)} + let(:other_mr) { create(:merge_request, source_project: other_project, target_project: other_project, id: 1)} + + let(:user) { create(:user) } + let(:service) { described_class.new(project, user) } + + context 'with mentioned merge requests' do + it 'returns a list of sorted merge requests' do + allow(issue).to receive(:referenced_merge_requests).with(user).and_return([other_mr, mr]) + + mrs, closed_by_mrs = service.execute(issue) + + expect(mrs).to match_array([mr, other_mr]) + expect(closed_by_mrs).to match_array([]) + end + end + + context 'with closed-by merge requests' do + it 'returns a list of sorted merge requests' do + allow(issue).to receive(:closed_by_merge_requests).with(user).and_return([other_mr, mr]) + + mrs, closed_by_mrs = service.execute(issue) + + expect(mrs).to match_array([]) + expect(closed_by_mrs).to match_array([mr, other_mr]) + end + end +end diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb index 322c91065e7..c148a98569b 100644 --- a/spec/services/issues/move_service_spec.rb +++ b/spec/services/issues/move_service_spec.rb @@ -232,6 +232,28 @@ describe Issues::MoveService do end end + context 'issue with assignee' do + let(:assignee) { create(:user) } + + before do + old_issue.assignees = [assignee] + end + + it 'preserves assignee with access to the new issue' do + new_project.add_reporter(assignee) + + new_issue = move_service.execute(old_issue, new_project) + + expect(new_issue.assignees).to eq([assignee]) + end + + it 'ignores assignee without access to the new issue' do + new_issue = move_service.execute(old_issue, new_project) + + expect(new_issue.assignees).to be_empty + end + end + context 'notes with references' do before do create(:merge_request, source_project: old_project) diff --git a/spec/services/labels/find_or_create_service_spec.rb b/spec/services/labels/find_or_create_service_spec.rb index 78aa5d442e7..68d5660445a 100644 --- a/spec/services/labels/find_or_create_service_spec.rb +++ b/spec/services/labels/find_or_create_service_spec.rb @@ -15,47 +15,79 @@ describe Labels::FindOrCreateService do context 'when acting on behalf of a specific user' do let(:user) { create(:user) } - subject(:service) { described_class.new(user, project, params) } - before do - project.add_developer(user) - end - context 'when label does not exist at group level' do - it 'creates a new label at project level' do - expect { service.execute }.to change(project.labels, :count).by(1) + context 'when finding labels on project level' do + subject(:service) { described_class.new(user, project, params) } + + before do + project.add_developer(user) end - end - context 'when label exists at group level' do - it 'returns the group label' do - group_label = create(:group_label, group: group, title: 'Security') + context 'when label does not exist at group level' do + it 'creates a new label at project level' do + expect { service.execute }.to change(project.labels, :count).by(1) + end + end - expect(service.execute).to eq group_label + context 'when label exists at group level' do + it 'returns the group label' do + group_label = create(:group_label, group: group, title: 'Security') + + expect(service.execute).to eq group_label + end + end + + context 'when label exists at project level' do + it 'returns the project label' do + project_label = create(:label, project: project, title: 'Security') + + expect(service.execute).to eq project_label + end end end - context 'when label does not exist at group level' do - it 'creates a new label at project leve' do - expect { service.execute }.to change(project.labels, :count).by(1) + context 'when finding labels on group level' do + subject(:service) { described_class.new(user, group, params) } + + before do + group.add_developer(user) + end + + context 'when label does not exist at group level' do + it 'creates a new label at group level' do + expect { service.execute }.to change(group.labels, :count).by(1) + end + end + + context 'when label exists at group level' do + it 'returns the group label' do + group_label = create(:group_label, group: group, title: 'Security') + + expect(service.execute).to eq group_label + end end end + end + + context 'when authorization is not required' do + context 'when finding labels on project level' do + subject(:service) { described_class.new(nil, project, params) } - context 'when label exists at project level' do it 'returns the project label' do project_label = create(:label, project: project, title: 'Security') - expect(service.execute).to eq project_label + expect(service.execute(skip_authorization: true)).to eq project_label end end - end - context 'when authorization is not required' do - subject(:service) { described_class.new(nil, project, params) } + context 'when finding labels on group level' do + subject(:service) { described_class.new(nil, group, params) } - it 'returns the project label' do - project_label = create(:label, project: project, title: 'Security') + it 'returns the group label' do + group_label = create(:group_label, group: group, title: 'Security') - expect(service.execute(skip_authorization: true)).to eq project_label + expect(service.execute(skip_authorization: true)).to eq group_label + end end end end diff --git a/spec/services/lfs/lock_file_service_spec.rb b/spec/services/lfs/lock_file_service_spec.rb new file mode 100644 index 00000000000..3e58eea2501 --- /dev/null +++ b/spec/services/lfs/lock_file_service_spec.rb @@ -0,0 +1,62 @@ +require 'spec_helper' + +describe Lfs::LockFileService do + let(:project) { create(:project) } + let(:current_user) { create(:user) } + + subject { described_class.new(project, current_user, params) } + + describe '#execute' do + let(:params) { { path: 'README.md' } } + + context 'when not authorized' do + it "doesn't succeed" do + result = subject.execute + + expect(result[:status]).to eq(:error) + expect(result[:http_status]).to eq(403) + expect(result[:message]).to eq('You have no permissions') + end + end + + context 'when authorized' do + before do + project.add_developer(current_user) + end + + context 'with an existent lock' do + let!(:lock) { create(:lfs_file_lock, project: project) } + + it "doesn't succeed" do + expect(subject.execute[:status]).to eq(:error) + end + + it "doesn't create the Lock" do + expect do + subject.execute + end.not_to change { LfsFileLock.count } + end + end + + context 'without an existent lock' do + it "succeeds" do + expect(subject.execute[:status]).to eq(:success) + end + + it "creates the Lock" do + expect do + subject.execute + end.to change { LfsFileLock.count }.by(1) + end + end + + context 'when an error is raised' do + it "doesn't succeed" do + allow_any_instance_of(described_class).to receive(:create_lock!).and_raise(StandardError) + + expect(subject.execute[:status]).to eq(:error) + end + end + end + end +end diff --git a/spec/services/lfs/locks_finder_service_spec.rb b/spec/services/lfs/locks_finder_service_spec.rb new file mode 100644 index 00000000000..e409b77babf --- /dev/null +++ b/spec/services/lfs/locks_finder_service_spec.rb @@ -0,0 +1,101 @@ +require 'spec_helper' + +describe Lfs::LocksFinderService do + let(:project) { create(:project) } + let(:user) { create(:user) } + let(:params) { {} } + + subject { described_class.new(project, user, params) } + + shared_examples 'no results' do + it 'returns an empty list' do + result = subject.execute + + expect(result[:status]).to eq(:success) + expect(result[:locks]).to be_blank + end + end + + describe '#execute' do + let!(:lock_1) { create(:lfs_file_lock, project: project) } + let!(:lock_2) { create(:lfs_file_lock, project: project, path: 'README') } + + context 'find by id' do + context 'with results' do + let(:params) do + { id: lock_1.id } + end + + it 'returns the record' do + result = subject.execute + + expect(result[:status]).to eq(:success) + expect(result[:locks].size).to eq(1) + expect(result[:locks].first).to eq(lock_1) + end + end + + context 'without results' do + let(:params) do + { id: 123 } + end + + include_examples 'no results' + end + end + + context 'find by path' do + context 'with results' do + let(:params) do + { path: lock_1.path } + end + + it 'returns the record' do + result = subject.execute + + expect(result[:status]).to eq(:success) + expect(result[:locks].size).to eq(1) + expect(result[:locks].first).to eq(lock_1) + end + end + + context 'without results' do + let(:params) do + { path: 'not-found' } + end + + include_examples 'no results' + end + end + + context 'find all' do + context 'with results' do + it 'returns all the records' do + result = subject.execute + + expect(result[:status]).to eq(:success) + expect(result[:locks].size).to eq(2) + end + end + + context 'without results' do + before do + LfsFileLock.delete_all + end + + include_examples 'no results' + end + end + + context 'when an error is raised' do + it "doesn't succeed" do + allow_any_instance_of(described_class).to receive(:find_locks).and_raise(StandardError) + + result = subject.execute + + expect(result[:status]).to eq(:error) + expect(result[:locks]).to be_blank + end + end + end +end diff --git a/spec/services/lfs/unlock_file_service_spec.rb b/spec/services/lfs/unlock_file_service_spec.rb new file mode 100644 index 00000000000..4bea112b9c6 --- /dev/null +++ b/spec/services/lfs/unlock_file_service_spec.rb @@ -0,0 +1,105 @@ +require 'spec_helper' + +describe Lfs::UnlockFileService do + let(:project) { create(:project) } + let(:current_user) { create(:user) } + let(:lock_author) { create(:user) } + let!(:lock) { create(:lfs_file_lock, user: lock_author, project: project) } + let(:params) { {} } + + subject { described_class.new(project, current_user, params) } + + describe '#execute' do + context 'when not authorized' do + it "doesn't succeed" do + result = subject.execute + + expect(result[:status]).to eq(:error) + expect(result[:http_status]).to eq(403) + expect(result[:message]).to eq('You have no permissions') + end + end + + context 'when authorized' do + before do + project.add_developer(current_user) + end + + context 'when lock does not exists' do + let(:params) { { id: 123 } } + it "doesn't succeed" do + result = subject.execute + + expect(result[:status]).to eq(:error) + expect(result[:http_status]).to eq(404) + end + end + + context 'when unlocked by the author' do + let(:current_user) { lock_author } + let(:params) { { id: lock.id } } + + it "succeeds" do + result = subject.execute + + expect(result[:status]).to eq(:success) + expect(result[:lock]).to be_present + end + end + + context 'when unlocked by a different user' do + let(:current_user) { create(:user) } + let(:params) { { id: lock.id } } + + it "doesn't succeed" do + result = subject.execute + + expect(result[:status]).to eq(:error) + expect(result[:message]).to match(/is locked by GitLab User #{lock_author.id}/) + expect(result[:http_status]).to eq(403) + end + end + + context 'when forced' do + let(:developer) { create(:user) } + let(:master) { create(:user) } + + before do + project.add_developer(developer) + project.add_master(master) + end + + context 'by a regular user' do + let(:current_user) { developer } + let(:params) do + { id: lock.id, + force: true } + end + + it "doesn't succeed" do + result = subject.execute + + expect(result[:status]).to eq(:error) + expect(result[:message]).to match(/You must have master access/) + expect(result[:http_status]).to eq(403) + end + end + + context 'by a master user' do + let(:current_user) { master } + let(:params) do + { id: lock.id, + force: true } + end + + it "succeeds" do + result = subject.execute + + expect(result[:status]).to eq(:success) + expect(result[:lock]).to be_present + end + end + end + end + end +end diff --git a/spec/services/members/approve_access_request_service_spec.rb b/spec/services/members/approve_access_request_service_spec.rb index b3018169a1c..7076571b753 100644 --- a/spec/services/members/approve_access_request_service_spec.rb +++ b/spec/services/members/approve_access_request_service_spec.rb @@ -1,70 +1,56 @@ require 'spec_helper' describe Members::ApproveAccessRequestService do - let(:user) { create(:user) } - let(:access_requester) { create(:user) } let(:project) { create(:project, :public, :access_requestable) } let(:group) { create(:group, :public, :access_requestable) } + let(:current_user) { create(:user) } + let(:access_requester_user) { create(:user) } + let(:access_requester) { source.requesters.find_by!(user_id: access_requester_user.id) } let(:opts) { {} } shared_examples 'a service raising ActiveRecord::RecordNotFound' do it 'raises ActiveRecord::RecordNotFound' do - expect { described_class.new(source, user, params).execute(opts) }.to raise_error(ActiveRecord::RecordNotFound) + expect { described_class.new(current_user).execute(access_requester, opts) }.to raise_error(ActiveRecord::RecordNotFound) end end shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do it 'raises Gitlab::Access::AccessDeniedError' do - expect { described_class.new(source, user, params).execute(opts) }.to raise_error(Gitlab::Access::AccessDeniedError) + expect { described_class.new(current_user).execute(access_requester, opts) }.to raise_error(Gitlab::Access::AccessDeniedError) end end shared_examples 'a service approving an access request' do it 'succeeds' do - expect { described_class.new(source, user, params).execute(opts) }.to change { source.requesters.count }.by(-1) + expect { described_class.new(current_user).execute(access_requester, opts) }.to change { source.requesters.count }.by(-1) end it 'returns a <Source>Member' do - member = described_class.new(source, user, params).execute(opts) + member = described_class.new(current_user).execute(access_requester, opts) expect(member).to be_a "#{source.class}Member".constantize expect(member.requested_at).to be_nil end context 'with a custom access level' do - let(:params2) { params.merge(user_id: access_requester.id, access_level: Gitlab::Access::MASTER) } - it 'returns a ProjectMember with the custom access level' do - member = described_class.new(source, user, params2).execute(opts) + member = described_class.new(current_user, access_level: Gitlab::Access::MASTER).execute(access_requester, opts) - expect(member.access_level).to eq Gitlab::Access::MASTER + expect(member.access_level).to eq(Gitlab::Access::MASTER) end end end - context 'when no access requester are found' do - let(:params) { { user_id: 42 } } - - it_behaves_like 'a service raising ActiveRecord::RecordNotFound' do - let(:source) { project } - end - - it_behaves_like 'a service raising ActiveRecord::RecordNotFound' do - let(:source) { group } - end - end - context 'when an access requester is found' do before do - project.request_access(access_requester) - group.request_access(access_requester) + project.request_access(access_requester_user) + group.request_access(access_requester_user) end - let(:params) { { user_id: access_requester.id } } context 'when current user is nil' do let(:user) { nil } - context 'and :force option is not given' do + context 'and :ldap option is not given' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { project } end @@ -74,8 +60,8 @@ describe Members::ApproveAccessRequestService do end end - context 'and :force option is false' do - let(:opts) { { force: false } } + context 'and :skip_authorization option is false' do + let(:opts) { { skip_authorization: false } } it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { project } @@ -86,8 +72,8 @@ describe Members::ApproveAccessRequestService do end end - context 'and :force option is true' do - let(:opts) { { force: true } } + context 'and :skip_authorization option is true' do + let(:opts) { { skip_authorization: true } } it_behaves_like 'a service approving an access request' do let(:source) { project } @@ -97,18 +83,6 @@ describe Members::ApproveAccessRequestService do let(:source) { group } end end - - context 'and :force param is true' do - let(:params) { { user_id: access_requester.id, force: true } } - - it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do - let(:source) { project } - end - - it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do - let(:source) { group } - end - end end context 'when current user cannot approve access request to the project' do @@ -123,8 +97,8 @@ describe Members::ApproveAccessRequestService do context 'when current user can approve access request to the project' do before do - project.add_master(user) - group.add_owner(user) + project.add_master(current_user) + group.add_owner(current_user) end it_behaves_like 'a service approving an access request' do @@ -134,14 +108,6 @@ describe Members::ApproveAccessRequestService do it_behaves_like 'a service approving an access request' do let(:source) { group } end - - context 'when given a :id' do - let(:params) { { id: project.requesters.find_by!(user_id: access_requester.id).id } } - - it_behaves_like 'a service approving an access request' do - let(:source) { project } - end - end end end end diff --git a/spec/services/members/authorized_destroy_service_spec.rb b/spec/services/members/authorized_destroy_service_spec.rb deleted file mode 100644 index 757c45708b9..00000000000 --- a/spec/services/members/authorized_destroy_service_spec.rb +++ /dev/null @@ -1,66 +0,0 @@ -require 'spec_helper' - -describe Members::AuthorizedDestroyService do - let(:member_user) { create(:user) } - let(:project) { create(:project, :public) } - let(:group) { create(:group, :public) } - let(:group_project) { create(:project, :public, group: group) } - - def number_of_assigned_issuables(user) - Issue.assigned_to(user).count + MergeRequest.assigned_to(user).count - end - - context 'Invited users' do - # Regression spec for issue: https://gitlab.com/gitlab-org/gitlab-ce/issues/32504 - it 'destroys invited project member' do - project.add_developer(member_user) - - member = create :project_member, :invited, project: project - - expect { described_class.new(member, member_user).execute } - .to change { Member.count }.from(3).to(2) - end - - it 'destroys invited group member' do - group.add_developer(member_user) - - member = create :group_member, :invited, group: group - - expect { described_class.new(member, member_user).execute } - .to change { Member.count }.from(2).to(1) - end - end - - context 'Group member' do - it "unassigns issues and merge requests" do - group.add_developer(member_user) - - issue = create :issue, project: group_project, assignees: [member_user] - create :issue, assignees: [member_user] - merge_request = create :merge_request, target_project: group_project, source_project: group_project, assignee: member_user - create :merge_request, target_project: project, source_project: project, assignee: member_user - - member = group.members.find_by(user_id: member_user.id) - - expect { described_class.new(member, member_user).execute } - .to change { number_of_assigned_issuables(member_user) }.from(4).to(2) - - expect(issue.reload.assignee_ids).to be_empty - expect(merge_request.reload.assignee_id).to be_nil - end - end - - context 'Project member' do - it "unassigns issues and merge requests" do - project.add_developer(member_user) - - create :issue, project: project, assignees: [member_user] - create :merge_request, target_project: project, source_project: project, assignee: member_user - - member = project.members.find_by(user_id: member_user.id) - - expect { described_class.new(member, member_user).execute } - .to change { number_of_assigned_issuables(member_user) }.from(2).to(0) - end - end -end diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb index 6bd4718e780..1831c62d788 100644 --- a/spec/services/members/create_service_spec.rb +++ b/spec/services/members/create_service_spec.rb @@ -11,7 +11,7 @@ describe Members::CreateService do it 'adds user to members' do params = { user_ids: project_user.id.to_s, access_level: Gitlab::Access::GUEST } - result = described_class.new(project, user, params).execute + result = described_class.new(user, params).execute(project) expect(result[:status]).to eq(:success) expect(project.users).to include project_user @@ -19,7 +19,7 @@ describe Members::CreateService do it 'adds no user to members' do params = { user_ids: '', access_level: Gitlab::Access::GUEST } - result = described_class.new(project, user, params).execute + result = described_class.new(user, params).execute(project) expect(result[:status]).to eq(:error) expect(result[:message]).to be_present @@ -30,7 +30,7 @@ describe Members::CreateService do user_ids = 1.upto(101).to_a.join(',') params = { user_ids: user_ids, access_level: Gitlab::Access::GUEST } - result = described_class.new(project, user, params).execute + result = described_class.new(user, params).execute(project) expect(result[:status]).to eq(:error) expect(result[:message]).to be_present diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb index 91152df3ad9..10c264a90c5 100644 --- a/spec/services/members/destroy_service_spec.rb +++ b/spec/services/members/destroy_service_spec.rb @@ -1,112 +1,202 @@ require 'spec_helper' describe Members::DestroyService do - let(:user) { create(:user) } + let(:current_user) { create(:user) } let(:member_user) { create(:user) } - let(:project) { create(:project, :public) } let(:group) { create(:group, :public) } + let(:group_project) { create(:project, :public, group: group) } + let(:opts) { {} } shared_examples 'a service raising ActiveRecord::RecordNotFound' do it 'raises ActiveRecord::RecordNotFound' do - expect { described_class.new(source, user, params).execute }.to raise_error(ActiveRecord::RecordNotFound) + expect { described_class.new(current_user).execute(member) }.to raise_error(ActiveRecord::RecordNotFound) end end shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do it 'raises Gitlab::Access::AccessDeniedError' do - expect { described_class.new(source, user, params).execute }.to raise_error(Gitlab::Access::AccessDeniedError) + expect { described_class.new(current_user).execute(member) }.to raise_error(Gitlab::Access::AccessDeniedError) end end + def number_of_assigned_issuables(user) + Issue.assigned_to(user).count + MergeRequest.assigned_to(user).count + end + shared_examples 'a service destroying a member' do it 'destroys the member' do - expect { described_class.new(source, user, params).execute }.to change { source.members.count }.by(-1) + expect { described_class.new(current_user).execute(member, opts) }.to change { member.source.members_and_requesters.count }.by(-1) end - context 'when the given member is an access requester' do - before do - source.members.find_by(user_id: member_user).destroy - source.update_attributes(request_access_enabled: true) - source.request_access(member_user) + it 'unassigns issues and merge requests' do + if member.invite? + expect { described_class.new(current_user).execute(member, opts) } + .not_to change { number_of_assigned_issuables(member_user) } + else + create :issue, assignees: [member_user] + issue = create :issue, project: group_project, assignees: [member_user] + merge_request = create :merge_request, target_project: group_project, source_project: group_project, assignee: member_user + + expect { described_class.new(current_user).execute(member, opts) } + .to change { number_of_assigned_issuables(member_user) }.from(3).to(1) + + expect(issue.reload.assignee_ids).to be_empty + expect(merge_request.reload.assignee_id).to be_nil end - let(:access_requester) { source.requesters.find_by(user_id: member_user) } + end - it_behaves_like 'a service raising ActiveRecord::RecordNotFound' + it 'destroys member notification_settings' do + if member_user.notification_settings.any? + expect { described_class.new(current_user).execute(member, opts) } + .to change { member_user.notification_settings.count }.by(-1) + else + expect { described_class.new(current_user).execute(member, opts) } + .not_to change { member_user.notification_settings.count } + end + end + end - %i[requesters all].each do |scope| - context "and #{scope} scope is passed" do - it 'destroys the access requester' do - expect { described_class.new(source, user, params).execute(scope) }.to change { source.requesters.count }.by(-1) - end + shared_examples 'a service destroying an access requester' do + it_behaves_like 'a service destroying a member' - it 'calls Member#after_decline_request' do - expect_any_instance_of(NotificationService).to receive(:decline_access_request).with(access_requester) + it 'calls Member#after_decline_request' do + expect_any_instance_of(NotificationService).to receive(:decline_access_request).with(member) - described_class.new(source, user, params).execute(scope) - end + described_class.new(current_user).execute(member) + end - context 'when current user is the member' do - it 'does not call Member#after_decline_request' do - expect_any_instance_of(NotificationService).not_to receive(:decline_access_request).with(access_requester) + context 'when current user is the member' do + it 'does not call Member#after_decline_request' do + expect_any_instance_of(NotificationService).not_to receive(:decline_access_request).with(member) - described_class.new(source, member_user, params).execute(scope) - end - end - end + described_class.new(member_user).execute(member) end end end - context 'when no member are found' do - let(:params) { { user_id: 42 } } + context 'with a member' do + before do + group_project.add_developer(member_user) + group.add_developer(member_user) + end + + context 'when current user cannot destroy the given member' do + it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do + let(:member) { group_project.members.find_by(user_id: member_user.id) } + end + + it_behaves_like 'a service destroying a member' do + let(:opts) { { skip_authorization: true } } + let(:member) { group_project.members.find_by(user_id: member_user.id) } + end + + it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do + let(:member) { group.members.find_by(user_id: member_user.id) } + end - it_behaves_like 'a service raising ActiveRecord::RecordNotFound' do - let(:source) { project } + it_behaves_like 'a service destroying a member' do + let(:opts) { { skip_authorization: true } } + let(:member) { group.members.find_by(user_id: member_user.id) } + end end - it_behaves_like 'a service raising ActiveRecord::RecordNotFound' do - let(:source) { group } + context 'when current user can destroy the given member' do + before do + group_project.add_master(current_user) + group.add_owner(current_user) + end + + it_behaves_like 'a service destroying a member' do + let(:member) { group_project.members.find_by(user_id: member_user.id) } + end + + it_behaves_like 'a service destroying a member' do + let(:member) { group.members.find_by(user_id: member_user.id) } + end end end - context 'when a member is found' do + context 'with an access requester' do before do - project.add_developer(member_user) - group.add_developer(member_user) + group_project.update_attributes(request_access_enabled: true) + group.update_attributes(request_access_enabled: true) + group_project.request_access(member_user) + group.request_access(member_user) end - let(:params) { { user_id: member_user.id } } - context 'when current user cannot destroy the given member' do + context 'when current user cannot destroy the given access requester' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do - let(:source) { project } + let(:member) { group_project.requesters.find_by(user_id: member_user.id) } + end + + it_behaves_like 'a service destroying a member' do + let(:opts) { { skip_authorization: true } } + let(:member) { group_project.requesters.find_by(user_id: member_user.id) } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do - let(:source) { group } + let(:member) { group.requesters.find_by(user_id: member_user.id) } + end + + it_behaves_like 'a service destroying a member' do + let(:opts) { { skip_authorization: true } } + let(:member) { group.requesters.find_by(user_id: member_user.id) } end end - context 'when current user can destroy the given member' do + context 'when current user can destroy the given access requester' do before do - project.add_master(user) - group.add_owner(user) + group_project.add_master(current_user) + group.add_owner(current_user) + end + + it_behaves_like 'a service destroying an access requester' do + let(:member) { group_project.requesters.find_by(user_id: member_user.id) } + end + + it_behaves_like 'a service destroying an access requester' do + let(:member) { group.requesters.find_by(user_id: member_user.id) } + end + end + end + + context 'with an invited user' do + let(:project_invited_member) { create(:project_member, :invited, project: group_project) } + let(:group_invited_member) { create(:group_member, :invited, group: group) } + + context 'when current user cannot destroy the given invited user' do + it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do + let(:member) { project_invited_member } end it_behaves_like 'a service destroying a member' do - let(:source) { project } + let(:opts) { { skip_authorization: true } } + let(:member) { project_invited_member } + end + + it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do + let(:member) { group_invited_member } end it_behaves_like 'a service destroying a member' do - let(:source) { group } + let(:opts) { { skip_authorization: true } } + let(:member) { group_invited_member } end + end - context 'when given a :id' do - let(:params) { { id: project.members.find_by!(user_id: user.id).id } } + context 'when current user can destroy the given invited user' do + before do + group_project.add_master(current_user) + group.add_owner(current_user) + end - it 'destroys the member' do - expect { described_class.new(project, user, params).execute } - .to change { project.members.count }.by(-1) - end + # Regression spec for issue: https://gitlab.com/gitlab-org/gitlab-ce/issues/32504 + it_behaves_like 'a service destroying a member' do + let(:member) { project_invited_member } + end + + it_behaves_like 'a service destroying a member' do + let(:member) { group_invited_member } end end end diff --git a/spec/services/members/request_access_service_spec.rb b/spec/services/members/request_access_service_spec.rb index 0a704bba521..e93ba5a85c0 100644 --- a/spec/services/members/request_access_service_spec.rb +++ b/spec/services/members/request_access_service_spec.rb @@ -5,17 +5,17 @@ describe Members::RequestAccessService do shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do it 'raises Gitlab::Access::AccessDeniedError' do - expect { described_class.new(source, user).execute }.to raise_error(Gitlab::Access::AccessDeniedError) + expect { described_class.new(user).execute(source) }.to raise_error(Gitlab::Access::AccessDeniedError) end end shared_examples 'a service creating a access request' do it 'succeeds' do - expect { described_class.new(source, user).execute }.to change { source.requesters.count }.by(1) + expect { described_class.new(user).execute(source) }.to change { source.requesters.count }.by(1) end it 'returns a <Source>Member' do - member = described_class.new(source, user).execute + member = described_class.new(user).execute(source) expect(member).to be_a "#{source.class}Member".constantize expect(member.requested_at).to be_present diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb new file mode 100644 index 00000000000..a451272dd1f --- /dev/null +++ b/spec/services/members/update_service_spec.rb @@ -0,0 +1,59 @@ +require 'spec_helper' + +describe Members::UpdateService do + let(:project) { create(:project, :public) } + let(:group) { create(:group, :public) } + let(:current_user) { create(:user) } + let(:member_user) { create(:user) } + let(:permission) { :update } + let(:member) { source.members_and_requesters.find_by!(user_id: member_user.id) } + let(:params) do + { access_level: Gitlab::Access::MASTER } + end + + shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do + it 'raises Gitlab::Access::AccessDeniedError' do + expect { described_class.new(current_user, params).execute(member, permission: permission) } + .to raise_error(Gitlab::Access::AccessDeniedError) + end + end + + shared_examples 'a service updating a member' do + it 'updates the member' do + updated_member = described_class.new(current_user, params).execute(member, permission: permission) + + expect(updated_member).to be_valid + expect(updated_member.access_level).to eq(Gitlab::Access::MASTER) + end + end + + before do + project.add_developer(member_user) + group.add_developer(member_user) + end + + context 'when current user cannot update the given member' do + it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do + let(:source) { project } + end + + it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do + let(:source) { group } + end + end + + context 'when current user can update the given member' do + before do + project.add_master(current_user) + group.add_owner(current_user) + end + + it_behaves_like 'a service updating a member' do + let(:source) { project } + end + + it_behaves_like 'a service updating a member' do + let(:source) { group } + end + end +end diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb index e56d335a7d6..6aed481939e 100644 --- a/spec/services/merge_requests/build_service_spec.rb +++ b/spec/services/merge_requests/build_service_spec.rb @@ -1,6 +1,7 @@ require 'spec_helper' describe MergeRequests::BuildService do + using RSpec::Parameterized::TableSyntax include RepoHelpers let(:project) { create(:project, :repository) } @@ -14,8 +15,8 @@ describe MergeRequests::BuildService do let(:target_branch) { 'master' } let(:merge_request) { service.execute } let(:compare) { double(:compare, commits: commits) } - let(:commit_1) { double(:commit_1, safe_message: "Initial commit\n\nCreate the app") } - let(:commit_2) { double(:commit_2, safe_message: 'This is a bad commit message!') } + let(:commit_1) { double(:commit_1, sha: 'f00ba7', safe_message: "Initial commit\n\nCreate the app") } + let(:commit_2) { double(:commit_2, sha: 'f00ba7', safe_message: 'This is a bad commit message!') } let(:commits) { nil } let(:service) do @@ -111,6 +112,7 @@ describe MergeRequests::BuildService do context 'one commit in the diff' do let(:commits) { Commit.decorate([commit_1], project) } + let(:commit_description) { commit_1.safe_message.split(/\n+/, 2).last } before do stub_compare @@ -125,7 +127,7 @@ describe MergeRequests::BuildService do end it 'uses the description of the commit as the description of the merge request' do - expect(merge_request.description).to eq(commit_1.safe_message.split(/\n+/, 2).last) + expect(merge_request.description).to eq(commit_description) end context 'merge request already has a description set' do @@ -148,68 +150,32 @@ describe MergeRequests::BuildService do end end - context 'branch starts with issue IID followed by a hyphen' do - let(:source_branch) { "#{issue.iid}-fix-issue" } - - it 'appends "Closes #$issue-iid" to the description' do - expect(merge_request.description).to eq("#{commit_1.safe_message.split(/\n+/, 2).last}\n\nCloses ##{issue.iid}") + context 'when the source branch matches an issue' do + where(:issue_tracker, :source_branch, :closing_message) do + :jira | 'FOO-123-fix-issue' | 'Closes FOO-123' + :jira | 'fix-issue' | nil + :custom_issue_tracker | '123-fix-issue' | 'Closes #123' + :custom_issue_tracker | 'fix-issue' | nil + :internal | '123-fix-issue' | 'Closes #123' + :internal | 'fix-issue' | nil end - context 'merge request already has a description set' do - let(:description) { 'Merge request description' } - - it 'appends "Closes #$issue-iid" to the description' do - expect(merge_request.description).to eq("#{description}\n\nCloses ##{issue.iid}") + with_them do + before do + if issue_tracker == :internal + issue.update!(iid: 123) + else + create(:"#{issue_tracker}_service", project: project) + end end - end - context 'commit has no description' do - let(:commits) { Commit.decorate([commit_2], project) } + it 'appends the closing description' do + expected_description = [commit_description, closing_message].compact.join("\n\n") - it 'sets the description to "Closes #$issue-iid"' do - expect(merge_request.description).to eq("Closes ##{issue.iid}") + expect(merge_request.description).to eq(expected_description) end end end - - context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do - let(:source_branch) { '12345-fix-issue' } - - before do - allow(project).to receive(:external_issue_tracker).and_return(false) - allow(project).to receive(:issues_enabled?).and_return(false) - end - - it 'uses the title of the commit as the title of the merge request' do - expect(merge_request.title).to eq(commit_1.safe_message.split("\n").first) - end - - it 'uses the description of the commit as the description of the merge request' do - commit_description = commit_1.safe_message.split(/\n+/, 2).last - - expect(merge_request.description).to eq("#{commit_description}") - end - end - - context 'branch starts with JIRA-formatted external issue IID followed by a hyphen' do - let(:source_branch) { 'EXMPL-12345-fix-issue' } - - before do - allow(project).to receive(:external_issue_tracker).and_return(true) - allow(project).to receive(:issues_enabled?).and_return(false) - allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern) - end - - it 'uses the title of the commit as the title of the merge request' do - expect(merge_request.title).to eq(commit_1.safe_message.split("\n").first) - end - - it 'uses the description of the commit as the description of the merge request and appends the closes text' do - commit_description = commit_1.safe_message.split(/\n+/, 2).last - - expect(merge_request.description).to eq("#{commit_description}\n\nCloses EXMPL-12345") - end - end end context 'more than one commit in the diff' do @@ -239,80 +205,62 @@ describe MergeRequests::BuildService do end end - context 'branch starts with GitLab issue IID followed by a hyphen' do - let(:source_branch) { "#{issue.iid}-fix-issue" } - - it 'sets the title to: Resolves "$issue-title"' do - expect(merge_request.title).to eq("Resolve \"#{issue.title}\"") + context 'when the source branch matches an issue' do + where(:issue_tracker, :source_branch, :title, :closing_message) do + :jira | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123' + :jira | 'fix-issue' | 'Fix issue' | nil + :custom_issue_tracker | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123' + :custom_issue_tracker | 'fix-issue' | 'Fix issue' | nil + :internal | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123' + :internal | 'fix-issue' | 'Fix issue' | nil + :internal | '124-fix-issue' | '124 fix issue' | nil end - context 'when issue is not accessible to user' do + with_them do before do - project.team.truncate - end - - it 'uses branch title as the merge request title' do - expect(merge_request.title).to eq("#{issue.iid} fix issue") + if issue_tracker == :internal + issue.update!(iid: 123) + else + create(:"#{issue_tracker}_service", project: project) + end end - end - - context 'issue does not exist' do - let(:source_branch) { "#{issue.iid.succ}-fix-issue" } - it 'uses the title of the branch as the merge request title' do - expect(merge_request.title).to eq("#{issue.iid.succ} fix issue") + it 'sets the correct title' do + expect(merge_request.title).to eq(title) end - end - - context 'issue is confidential' do - let(:issue_confidential) { true } - it 'uses the title of the branch as the merge request title' do - expect(merge_request.title).to eq("#{issue.iid} fix issue") + it 'sets the closing description' do + expect(merge_request.description).to eq(closing_message) end end end - context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do - let(:source_branch) { '12345-fix-issue' } + context 'when the issue is not accessible to user' do + let(:source_branch) { "#{issue.iid}-fix-issue" } before do - allow(project).to receive(:external_issue_tracker).and_return(false) - allow(project).to receive(:issues_enabled?).and_return(false) + project.team.truncate end - it 'sets the title to the humanized branch title' do - expect(merge_request.title).to eq('12345 fix issue') + it 'uses branch title as the merge request title' do + expect(merge_request.title).to eq("#{issue.iid} fix issue") end - end - - context 'branch starts with JIRA-formatted external issue IID' do - let(:source_branch) { 'EXMPL-12345' } - before do - allow(project).to receive(:external_issue_tracker).and_return(true) - allow(project).to receive(:issues_enabled?).and_return(false) - allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern) + it 'does not set a description' do + expect(merge_request.description).to be_nil end + end - it 'sets the title to the humanized branch title' do - expect(merge_request.title).to eq('Resolve EXMPL-12345') - end + context 'when the issue is confidential' do + let(:source_branch) { "#{issue.iid}-fix-issue" } + let(:issue_confidential) { true } - it 'appends the closes text' do - expect(merge_request.description).to eq('Closes EXMPL-12345') + it 'uses the title of the branch as the merge request title' do + expect(merge_request.title).to eq("#{issue.iid} fix issue") end - context 'followed by hyphenated text' do - let(:source_branch) { 'EXMPL-12345-fix-issue' } - - it 'sets the title to the humanized branch title' do - expect(merge_request.title).to eq('Resolve EXMPL-12345 "Fix issue"') - end - - it 'appends the closes text' do - expect(merge_request.description).to eq('Closes EXMPL-12345') - end + it 'does not set a description' do + expect(merge_request.description).to be_nil end end end diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb index 75553afc033..38d84cf0ceb 100644 --- a/spec/services/merge_requests/create_from_issue_service_spec.rb +++ b/spec/services/merge_requests/create_from_issue_service_spec.rb @@ -24,7 +24,7 @@ describe MergeRequests::CreateFromIssueService do end it 'delegates issue search to IssuesFinder' do - expect_any_instance_of(IssuesFinder).to receive(:execute).once.and_call_original + expect_any_instance_of(IssuesFinder).to receive(:find_by).once.and_call_original described_class.new(project, user, issue_iid: -1).execute end diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index 35eb84e5e88..62fdf870090 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -1307,6 +1307,33 @@ describe NotificationService, :mailer do end describe 'GroupMember' do + let(:added_user) { create(:user) } + + describe '#new_access_request' do + let(:master) { create(:user) } + let(:owner) { create(:user) } + let(:developer) { create(:user) } + let!(:group) do + create(:group, :public, :access_requestable) do |group| + group.add_owner(owner) + group.add_master(master) + group.add_developer(developer) + end + end + + before do + reset_delivered_emails! + end + + it 'sends notification to group owners_and_masters' do + group.request_access(added_user) + + should_email(owner) + should_email(master) + should_not_email(developer) + end + end + describe '#decline_group_invite' do let(:creator) { create(:user) } let(:group) { create(:group) } @@ -1328,18 +1355,9 @@ describe NotificationService, :mailer do describe '#new_group_member' do let(:group) { create(:group) } - let(:added_user) { create(:user) } - - def create_member! - GroupMember.create( - group: group, - user: added_user, - access_level: Gitlab::Access::GUEST - ) - end it 'sends a notification' do - create_member! + group.add_guest(added_user) should_only_email(added_user) end @@ -1349,7 +1367,7 @@ describe NotificationService, :mailer do end it 'does not send a notification' do - create_member! + group.add_guest(added_user) should_not_email_anyone end end @@ -1357,8 +1375,42 @@ describe NotificationService, :mailer do end describe 'ProjectMember' do + let(:project) { create(:project) } + set(:added_user) { create(:user) } + + describe '#new_access_request' do + context 'for a project in a user namespace' do + let(:project) do + create(:project, :public, :access_requestable) do |project| + project.add_master(project.owner) + end + end + + it 'sends notification to project owners_and_masters' do + project.request_access(added_user) + + should_only_email(project.owner) + end + end + + context 'for a project in a group' do + let(:group_owner) { create(:user) } + let(:group) { create(:group).tap { |g| g.add_owner(group_owner) } } + let!(:project) { create(:project, :public, :access_requestable, namespace: group) } + + before do + reset_delivered_emails! + end + + it 'sends notification to group owners_and_masters' do + project.request_access(added_user) + + should_only_email(group_owner) + end + end + end + describe '#decline_group_invite' do - let(:project) { create(:project) } let(:member) { create(:user) } before do @@ -1375,19 +1427,12 @@ describe NotificationService, :mailer do end describe '#new_project_member' do - let(:project) { create(:project) } - let(:added_user) { create(:user) } - - def create_member! - create(:project_member, user: added_user, project: project) - end - it do create_member! should_only_email(added_user) end - describe 'when notifications are disabled' do + context 'when notifications are disabled' do before do create_global_setting_for(added_user, :disabled) end @@ -1398,6 +1443,10 @@ describe NotificationService, :mailer do end end end + + def create_member! + create(:project_member, user: added_user, project: project) + end end context 'guest user in private project' do @@ -1629,6 +1678,78 @@ describe NotificationService, :mailer do end end + describe 'Pages domains' do + set(:project) { create(:project) } + set(:domain) { create(:pages_domain, project: project) } + set(:u_blocked) { create(:user, :blocked) } + set(:u_silence) { create_user_with_notification(:disabled, 'silent', project) } + set(:u_owner) { project.owner } + set(:u_master1) { create(:user) } + set(:u_master2) { create(:user) } + set(:u_developer) { create(:user) } + + before do + project.add_master(u_blocked) + project.add_master(u_silence) + project.add_master(u_master1) + project.add_master(u_master2) + project.add_developer(u_developer) + + reset_delivered_emails! + end + + %i[ + pages_domain_enabled + pages_domain_disabled + pages_domain_verification_succeeded + pages_domain_verification_failed + ].each do |sym| + describe "##{sym}" do + subject(:notify!) { notification.send(sym, domain) } + + it 'emails current watching masters' do + expect(Notify).to receive(:"#{sym}_email").at_least(:once).and_call_original + + notify! + + should_only_email(u_master1, u_master2, u_owner) + end + + it 'emails nobody if the project is missing' do + domain.project = nil + + notify! + + should_not_email_anyone + end + end + end + + describe '#pages_domain_verification_failed' do + it 'emails current watching masters' do + notification.pages_domain_verification_failed(domain) + + should_only_email(u_master1, u_master2, u_owner) + end + end + + describe '#pages_domain_enabled' do + it 'emails current watching masters' do + notification.pages_domain_enabled(domain) + + should_only_email(u_master1, u_master2, u_owner) + end + end + + describe '#pages_domain_disabled' do + it 'emails current watching masters' do + notification.pages_domain_disabled(domain) + + should_only_email(u_master1, u_master2, u_owner) + end + end + end + def build_team(project) @u_watcher = create_global_setting_for(create(:user), :watch) @u_participating = create_global_setting_for(create(:user), :participating) diff --git a/spec/services/projects/create_from_template_service_spec.rb b/spec/services/projects/create_from_template_service_spec.rb index 9919ec254c6..609d678caea 100644 --- a/spec/services/projects/create_from_template_service_spec.rb +++ b/spec/services/projects/create_from_template_service_spec.rb @@ -4,8 +4,10 @@ describe Projects::CreateFromTemplateService do let(:user) { create(:user) } let(:project_params) do { - path: user.to_param, - template_name: 'rails' + path: user.to_param, + template_name: 'rails', + description: 'project description', + visibility_level: Gitlab::VisibilityLevel::PRIVATE } end @@ -22,5 +24,7 @@ describe Projects::CreateFromTemplateService do expect(project).to be_saved expect(project.scheduled?).to be(true) + expect(project.description).to match('project description') + expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) end end diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb index 15699574b3a..fb6d7171ac3 100644 --- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb +++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Projects::HashedStorage::MigrateAttachmentsService do subject(:service) { described_class.new(project) } - let(:project) { create(:project) } + let(:project) { create(:project, :legacy_storage) } let(:legacy_storage) { Storage::LegacyProject.new(project) } let(:hashed_storage) { Storage::HashedProject.new(project) } diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb index 7b536cc05cb..747bd4529a0 100644 --- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb +++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Projects::HashedStorage::MigrateRepositoryService do let(:gitlab_shell) { Gitlab::Shell.new } - let(:project) { create(:project, :repository, :wiki_repo) } + let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo) } let(:service) { described_class.new(project) } let(:legacy_storage) { Storage::LegacyProject.new(project) } let(:hashed_storage) { Storage::HashedProject.new(project) } diff --git a/spec/services/projects/hashed_storage_migration_service_spec.rb b/spec/services/projects/hashed_storage_migration_service_spec.rb index 466f0b5d7c2..e8e18bb3ac0 100644 --- a/spec/services/projects/hashed_storage_migration_service_spec.rb +++ b/spec/services/projects/hashed_storage_migration_service_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Projects::HashedStorageMigrationService do - let(:project) { create(:project, :empty_repo, :wiki_repo) } + let(:project) { create(:project, :empty_repo, :wiki_repo, :legacy_storage) } subject(:service) { described_class.new(project) } describe '#execute' do diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb index ef68742a463..ae0e22e3dc0 100644 --- a/spec/services/projects/transfer_service_spec.rb +++ b/spec/services/projects/transfer_service_spec.rb @@ -4,7 +4,7 @@ describe Projects::TransferService do let(:gitlab_shell) { Gitlab::Shell.new } let(:user) { create(:user) } let(:group) { create(:group) } - let(:project) { create(:project, :repository, namespace: user.namespace) } + let(:project) { create(:project, :repository, :legacy_storage, namespace: user.namespace) } context 'namespace -> namespace' do before do @@ -214,7 +214,7 @@ describe Projects::TransferService do end context 'when hashed storage in use' do - let(:hashed_project) { create(:project, :repository, :hashed, namespace: user.namespace) } + let(:hashed_project) { create(:project, :repository, namespace: user.namespace) } before do group.add_owner(user) diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb index fc6aa713d6f..ad5a289290c 100644 --- a/spec/services/projects/update_service_spec.rb +++ b/spec/services/projects/update_service_spec.rb @@ -123,6 +123,40 @@ describe Projects::UpdateService do end end + context 'when we update project but not enabling a wiki' do + it 'does not try to create an empty wiki' do + FileUtils.rm_rf(project.wiki.repository.path) + + result = update_project(project, user, { name: 'test1' }) + + expect(result).to eq({ status: :success }) + expect(project.wiki_repository_exists?).to be false + end + end + + context 'when enabling a wiki' do + it 'creates a wiki' do + project.project_feature.update(wiki_access_level: ProjectFeature::DISABLED) + FileUtils.rm_rf(project.wiki.repository.path) + + result = update_project(project, user, project_feature_attributes: { wiki_access_level: ProjectFeature::ENABLED }) + + expect(result).to eq({ status: :success }) + expect(project.wiki_repository_exists?).to be true + expect(project.wiki_enabled?).to be true + end + + it 'logs an error and creates a metric when wiki can not be created' do + project.project_feature.update(wiki_access_level: ProjectFeature::DISABLED) + + expect_any_instance_of(ProjectWiki).to receive(:wiki).and_raise(ProjectWiki::CouldNotCreateWikiError) + expect_any_instance_of(described_class).to receive(:log_error).with("Could not create wiki for #{project.full_name}") + expect(Gitlab::Metrics).to receive(:counter) + + update_project(project, user, project_feature_attributes: { wiki_access_level: ProjectFeature::ENABLED }) + end + end + context 'when updating a project that contains container images' do before do stub_container_registry_config(enabled: true) @@ -150,6 +184,8 @@ describe Projects::UpdateService do let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } context 'with legacy storage' do + let(:project) { create(:project, :legacy_storage, :repository, creator: user, namespace: user.namespace) } + before do gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing") end diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb index ae160d104f1..f793f55e51b 100644 --- a/spec/services/quick_actions/interpret_service_spec.rb +++ b/spec/services/quick_actions/interpret_service_spec.rb @@ -522,6 +522,22 @@ describe QuickActions::InterpretService do let(:issuable) { merge_request } end + context 'only group milestones available' do + let(:group) { create(:group) } + let(:project) { create(:project, :public, namespace: group) } + let(:milestone) { create(:milestone, group: group, title: '10.0') } + + it_behaves_like 'milestone command' do + let(:content) { "/milestone %#{milestone.title}" } + let(:issuable) { issue } + end + + it_behaves_like 'milestone command' do + let(:content) { "/milestone %#{milestone.title}" } + let(:issuable) { merge_request } + end + end + it_behaves_like 'remove_milestone command' do let(:content) { '/remove_milestone' } let(:issuable) { issue } diff --git a/spec/services/search/snippet_service_spec.rb b/spec/services/search/snippet_service_spec.rb index bc7885b03d9..8ad162ad66e 100644 --- a/spec/services/search/snippet_service_spec.rb +++ b/spec/services/search/snippet_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Search::SnippetService do let(:author) { create(:author) } - let(:project) { create(:project) } + let(:project) { create(:project, :public) } let!(:public_snippet) { create(:snippet, :public, content: 'password: XXX') } let!(:internal_snippet) { create(:snippet, :internal, content: 'password: XXX') } diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb index 5e6c24f5730..562b89e6767 100644 --- a/spec/services/todo_service_spec.rb +++ b/spec/services/todo_service_spec.rb @@ -943,7 +943,8 @@ describe TodoService do described_class.new.mark_todos_as_done_by_ids(todo, john_doe) - expect_any_instance_of(TodosFinder).not_to receive(:execute) + # Make sure no TodosFinder is inialized to perform counting + expect(TodosFinder).not_to receive(:new) expect(john_doe.todos_done_count).to eq(1) expect(john_doe.todos_pending_count).to eq(1) diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb index bb3d73edf8e..11c75ddfcf8 100644 --- a/spec/services/users/destroy_service_spec.rb +++ b/spec/services/users/destroy_service_spec.rb @@ -173,7 +173,7 @@ describe Users::DestroyService do end context 'legacy storage' do - let!(:project) { create(:project, :empty_repo, namespace: user.namespace) } + let!(:project) { create(:project, :empty_repo, :legacy_storage, namespace: user.namespace) } it 'removes repository' do expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey @@ -181,7 +181,7 @@ describe Users::DestroyService do end context 'hashed storage' do - let!(:project) { create(:project, :empty_repo, :hashed, namespace: user.namespace) } + let!(:project) { create(:project, :empty_repo, namespace: user.namespace) } it 'removes repository' do expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb new file mode 100644 index 00000000000..576db1dde2d --- /dev/null +++ b/spec/services/verify_pages_domain_service_spec.rb @@ -0,0 +1,270 @@ +require 'spec_helper' + +describe VerifyPagesDomainService do + using RSpec::Parameterized::TableSyntax + include EmailHelpers + + let(:error_status) { { status: :error, message: "Couldn't verify #{domain.domain}" } } + + subject(:service) { described_class.new(domain) } + + describe '#execute' do + context 'verification code recognition (verified domain)' do + where(:domain_sym, :code_sym) do + :domain | :verification_code + :domain | :keyed_verification_code + + :verification_domain | :verification_code + :verification_domain | :keyed_verification_code + end + + with_them do + set(:domain) { create(:pages_domain) } + + let(:domain_name) { domain.send(domain_sym) } + let(:verification_code) { domain.send(code_sym) } + + it 'verifies and enables the domain' do + stub_resolver(domain_name => ['something else', verification_code]) + + expect(service.execute).to eq(status: :success) + expect(domain).to be_verified + expect(domain).to be_enabled + end + + it 'verifies and enables when the code is contained partway through a TXT record' do + stub_resolver(domain_name => "something #{verification_code} else") + + expect(service.execute).to eq(status: :success) + expect(domain).to be_verified + expect(domain).to be_enabled + end + + it 'does not verify when the code is not present' do + stub_resolver(domain_name => 'something else') + + expect(service.execute).to eq(error_status) + + expect(domain).not_to be_verified + expect(domain).to be_enabled + end + end + + context 'verified domain' do + set(:domain) { create(:pages_domain) } + + it 'unverifies (but does not disable) when the right code is not present' do + stub_resolver(domain.domain => 'something else') + + expect(service.execute).to eq(error_status) + expect(domain).not_to be_verified + expect(domain).to be_enabled + end + + it 'unverifies (but does not disable) when no records are present' do + stub_resolver + + expect(service.execute).to eq(error_status) + expect(domain).not_to be_verified + expect(domain).to be_enabled + end + end + + context 'expired domain' do + set(:domain) { create(:pages_domain, :expired) } + + it 'verifies and enables when the right code is present' do + stub_resolver(domain.domain => domain.keyed_verification_code) + + expect(service.execute).to eq(status: :success) + + expect(domain).to be_verified + expect(domain).to be_enabled + end + + it 'disables when the right code is not present' do + error_status[:message] += '. It is now disabled.' + + stub_resolver + + expect(service.execute).to eq(error_status) + + expect(domain).not_to be_verified + expect(domain).not_to be_enabled + end + end + end + + context 'timeout behaviour' do + let(:domain) { create(:pages_domain) } + + it 'sets a timeout on the DNS query' do + expect(stub_resolver).to receive(:timeouts=).with(described_class::RESOLVER_TIMEOUT_SECONDS) + + service.execute + end + end + + context 'email notifications' do + let(:notification_service) { instance_double('NotificationService') } + + where(:factory, :verification_succeeds, :expected_notification) do + nil | true | nil + nil | false | :verification_failed + :reverify | true | nil + :reverify | false | :verification_failed + :unverified | true | :verification_succeeded + :unverified | false | nil + :expired | true | nil + :expired | false | :disabled + :disabled | true | :enabled + :disabled | false | nil + end + + with_them do + let(:domain) { create(:pages_domain, *[factory].compact) } + + before do + allow(service).to receive(:notification_service) { notification_service } + + if verification_succeeds + stub_resolver(domain.domain => domain.verification_code) + else + stub_resolver + end + end + + it 'sends a notification if appropriate' do + if expected_notification + expect(notification_service).to receive(:"pages_domain_#{expected_notification}").with(domain) + end + + service.execute + end + end + + context 'pages verification disabled' do + let(:domain) { create(:pages_domain, :disabled) } + + before do + stub_application_setting(pages_domain_verification_enabled: false) + allow(service).to receive(:notification_service) { notification_service } + end + + it 'skips email notifications' do + expect(notification_service).not_to receive(:pages_domain_enabled) + + service.execute + end + end + end + + context 'pages configuration updates' do + context 'enabling a disabled domain' do + let(:domain) { create(:pages_domain, :disabled) } + + it 'schedules an update' do + stub_resolver(domain.domain => domain.verification_code) + + expect(domain).to receive(:update_daemon) + + service.execute + end + end + + context 'verifying an enabled domain' do + let(:domain) { create(:pages_domain) } + + it 'schedules an update' do + stub_resolver(domain.domain => domain.verification_code) + + expect(domain).not_to receive(:update_daemon) + + service.execute + end + end + + context 'disabling an expired domain' do + let(:domain) { create(:pages_domain, :expired) } + + it 'schedules an update' do + stub_resolver + + expect(domain).to receive(:update_daemon) + + service.execute + end + end + + context 'failing to verify a disabled domain' do + let(:domain) { create(:pages_domain, :disabled) } + + it 'does not schedule an update' do + stub_resolver + + expect(domain).not_to receive(:update_daemon) + + service.execute + end + end + end + + context 'no verification code' do + let(:domain) { create(:pages_domain) } + + it 'returns an error' do + domain.verification_code = '' + + disallow_resolver! + + expect(service.execute).to eq(status: :error, message: "No verification code set for #{domain.domain}") + end + end + + context 'pages domain verification is disabled' do + let(:domain) { create(:pages_domain, :disabled) } + + before do + stub_application_setting(pages_domain_verification_enabled: false) + end + + it 'extends domain validity by unconditionally reverifying' do + disallow_resolver! + + service.execute + + expect(domain).to be_verified + expect(domain).to be_enabled + end + + it 'does not shorten any grace period' do + grace = Time.now + 1.year + domain.update!(enabled_until: grace) + disallow_resolver! + + service.execute + + expect(domain.enabled_until).to be_like_time(grace) + end + end + end + + def disallow_resolver! + expect(Resolv::DNS).not_to receive(:open) + end + + def stub_resolver(stubbed_lookups = {}) + resolver = instance_double('Resolv::DNS') + allow(resolver).to receive(:timeouts=) + + expect(Resolv::DNS).to receive(:open).and_yield(resolver) + + allow(resolver).to receive(:getresources) { [] } + stubbed_lookups.each do |domain, records| + records = Array(records).map { |txt| Resolv::DNS::Resource::IN::TXT.new(txt) } + allow(resolver).to receive(:getresources).with(domain, Resolv::DNS::Resource::IN::TXT) { records } + end + + resolver + end +end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 85de0a14631..9f6f0204a16 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -154,6 +154,22 @@ RSpec.configure do |config| Sidekiq.redis(&:flushall) end + # The :each scope runs "inside" the example, so this hook ensures the DB is in the + # correct state before any examples' before hooks are called. This prevents a + # problem where `ScheduleIssuesClosedAtTypeChange` (or any migration that depends + # on background migrations being run inline during test setup) can be broken by + # altering Sidekiq behavior in an unrelated spec like so: + # + # around do |example| + # Sidekiq::Testing.fake! do + # example.run + # end + # end + config.before(:context, :migration) do + schema_migrate_down! + end + + # Each example may call `migrate!`, so we must ensure we are migrated down every time config.before(:each, :migration) do schema_migrate_down! end @@ -169,6 +185,18 @@ RSpec.configure do |config| config.around(:each, :postgresql) do |example| example.run if Gitlab::Database.postgresql? end + + config.around(:each, :mysql) do |example| + example.run if Gitlab::Database.mysql? + end + + # This makes sure the `ApplicationController#can?` method is stubbed with the + # original implementation for all view specs. + config.before(:each, type: :view) do + allow(view).to receive(:can?) do |*args| + Ability.allowed?(*args) + end + end end # add simpler way to match asset paths containing digest strings diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb index 5189c57b7db..8603b7f3e2c 100644 --- a/spec/support/capybara.rb +++ b/spec/support/capybara.rb @@ -78,8 +78,10 @@ RSpec.configure do |config| end config.after(:example, :js) do |example| - # prevent localstorage from introducing side effects based on test order - execute_script("localStorage.clear();") + # prevent localStorage from introducing side effects based on test order + unless ['', 'about:blank', 'data:,'].include? Capybara.current_session.driver.browser.current_url + execute_script("localStorage.clear();") + end # capybara/rspec already calls Capybara.reset_sessions! in an `after` hook, # but `block_and_wait_for_requests_complete` is called before it so by diff --git a/spec/support/cluster_application_spec.rb b/spec/support/cluster_application_spec.rb deleted file mode 100644 index ab77910a050..00000000000 --- a/spec/support/cluster_application_spec.rb +++ /dev/null @@ -1,105 +0,0 @@ -shared_examples 'cluster application specs' do - let(:factory_name) { described_class.to_s.downcase.gsub("::", "_") } - - describe '#name' do - it 'is .application_name' do - expect(subject.name).to eq(described_class.application_name) - end - - it 'is recorded in Clusters::Cluster::APPLICATIONS' do - expect(Clusters::Cluster::APPLICATIONS[subject.name]).to eq(described_class) - end - end - - describe '#status' do - let(:cluster) { create(:cluster, :provided_by_gcp) } - - subject { described_class.new(cluster: cluster) } - - it 'defaults to :not_installable' do - expect(subject.status_name).to be(:not_installable) - end - - context 'when application helm is scheduled' do - before do - create(factory_name, :scheduled, cluster: cluster) - end - - it 'defaults to :not_installable' do - expect(subject.status_name).to be(:not_installable) - end - end - - context 'when application helm is installed' do - before do - create(:clusters_applications_helm, :installed, cluster: cluster) - end - - it 'defaults to :installable' do - expect(subject.status_name).to be(:installable) - end - end - end - - describe '#install_command' do - it 'has all the needed information' do - expect(subject.install_command).to have_attributes(name: subject.name, install_helm: false) - end - end - - describe 'status state machine' do - describe '#make_installing' do - subject { create(factory_name, :scheduled) } - - it 'is installing' do - subject.make_installing! - - expect(subject).to be_installing - end - end - - describe '#make_installed' do - subject { create(factory_name, :installing) } - - it 'is installed' do - subject.make_installed - - expect(subject).to be_installed - end - end - - describe '#make_errored' do - subject { create(factory_name, :installing) } - let(:reason) { 'some errors' } - - it 'is errored' do - subject.make_errored(reason) - - expect(subject).to be_errored - expect(subject.status_reason).to eq(reason) - end - end - - describe '#make_scheduled' do - subject { create(factory_name, :installable) } - - it 'is scheduled' do - subject.make_scheduled - - expect(subject).to be_scheduled - end - - describe 'when was errored' do - subject { create(factory_name, :errored) } - - it 'clears #status_reason' do - expect(subject.status_reason).not_to be_nil - - subject.make_scheduled! - - expect(subject.status_reason).to be_nil - end - end - end - end -end diff --git a/spec/support/controllers/githubish_import_controller_shared_examples.rb b/spec/support/controllers/githubish_import_controller_shared_examples.rb index a0839eefe6c..3321f920666 100644 --- a/spec/support/controllers/githubish_import_controller_shared_examples.rb +++ b/spec/support/controllers/githubish_import_controller_shared_examples.rb @@ -92,6 +92,7 @@ end shared_examples 'a GitHub-ish import controller: POST create' do let(:user) { create(:user) } + let(:project) { create(:project) } let(:provider_username) { user.username } let(:provider_user) { OpenStruct.new(login: provider_username) } let(:provider_repo) do @@ -107,14 +108,34 @@ shared_examples 'a GitHub-ish import controller: POST create' do assign_session_token(provider) end + it 'returns 200 response when the project is imported successfully' do + allow(Gitlab::LegacyGithubImport::ProjectCreator) + .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider) + .and_return(double(execute: project)) + + post :create, format: :json + + expect(response).to have_gitlab_http_status(200) + end + + it 'returns 422 response when the project could not be imported' do + allow(Gitlab::LegacyGithubImport::ProjectCreator) + .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider) + .and_return(double(execute: build(:project))) + + post :create, format: :json + + expect(response).to have_gitlab_http_status(422) + end + context "when the repository owner is the provider user" do context "when the provider user and GitLab user's usernames match" do it "takes the current user's namespace" do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -124,9 +145,9 @@ shared_examples 'a GitHub-ish import controller: POST create' do it "takes the current user's namespace" do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end end @@ -151,9 +172,9 @@ shared_examples 'a GitHub-ish import controller: POST create' do it "takes the existing namespace" do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, provider_repo.name, existing_namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end @@ -163,9 +184,9 @@ shared_examples 'a GitHub-ish import controller: POST create' do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end end @@ -174,17 +195,17 @@ shared_examples 'a GitHub-ish import controller: POST create' do context "when current user can create namespaces" do it "creates the namespace" do expect(Gitlab::LegacyGithubImport::ProjectCreator) - .to receive(:new).and_return(double(execute: true)) + .to receive(:new).and_return(double(execute: project)) - expect { post :create, target_namespace: provider_repo.name, format: :js }.to change(Namespace, :count).by(1) + expect { post :create, target_namespace: provider_repo.name, format: :json }.to change(Namespace, :count).by(1) end it "takes the new namespace" do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, provider_repo.name, an_instance_of(Group), user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, target_namespace: provider_repo.name, format: :js + post :create, target_namespace: provider_repo.name, format: :json end end @@ -195,17 +216,17 @@ shared_examples 'a GitHub-ish import controller: POST create' do it "doesn't create the namespace" do expect(Gitlab::LegacyGithubImport::ProjectCreator) - .to receive(:new).and_return(double(execute: true)) + .to receive(:new).and_return(double(execute: project)) - expect { post :create, format: :js }.not_to change(Namespace, :count) + expect { post :create, format: :json }.not_to change(Namespace, :count) end it "takes the current user's namespace" do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, format: :js + post :create, format: :json end end end @@ -221,21 +242,21 @@ shared_examples 'a GitHub-ish import controller: POST create' do it 'takes the selected namespace and name' do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, test_namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: test_namespace.name, new_name: test_name, format: :js } + post :create, { target_namespace: test_namespace.name, new_name: test_name, format: :json } end it 'takes the selected name and default namespace' do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { new_name: test_name, format: :js } + post :create, { new_name: test_name, format: :json } end end - context 'user has chosen an existing nested namespace and name for the project' do + context 'user has chosen an existing nested namespace and name for the project', :postgresql do let(:parent_namespace) { create(:group, name: 'foo', owner: user) } let(:nested_namespace) { create(:group, name: 'bar', parent: parent_namespace) } let(:test_name) { 'test_name' } @@ -247,63 +268,124 @@ shared_examples 'a GitHub-ish import controller: POST create' do it 'takes the selected namespace and name' do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, nested_namespace, user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js } + post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :json } end end - context 'user has chosen a non-existent nested namespaces and name for the project' do + context 'user has chosen a non-existent nested namespaces and name for the project', :postgresql do let(:test_name) { 'test_name' } it 'takes the selected namespace and name' do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } end it 'creates the namespaces' do allow(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } } + expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } } .to change { Namespace.count }.by(2) end it 'new namespace has the right parent' do allow(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } + post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo') end end - context 'user has chosen existent and non-existent nested namespaces and name for the project' do + context 'user has chosen existent and non-existent nested namespaces and name for the project', :postgresql do let(:test_name) { 'test_name' } let!(:parent_namespace) { create(:group, name: 'foo', owner: user) } + before do + parent_namespace.add_owner(user) + end + it 'takes the selected namespace and name' do expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } + post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json } end it 'creates the namespaces' do allow(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider) - .and_return(double(execute: true)) + .and_return(double(execute: project)) - expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } } + expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json } } .to change { Namespace.count }.by(2) end + + it 'does not create a new namespace under the user namespace' do + expect(Gitlab::LegacyGithubImport::ProjectCreator) + .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider) + .and_return(double(execute: build_stubbed(:project))) + + expect { post :create, { target_namespace: "#{user.namespace_path}/test_group", new_name: test_name, format: :js } } + .not_to change { Namespace.count } + end + end + + context 'user cannot create a subgroup inside a group is not a member of' do + let(:test_name) { 'test_name' } + let!(:parent_namespace) { create(:group, name: 'foo') } + + it 'does not take the selected namespace and name' do + expect(Gitlab::LegacyGithubImport::ProjectCreator) + .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider) + .and_return(double(execute: build_stubbed(:project))) + + post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } + end + + it 'does not create the namespaces' do + allow(Gitlab::LegacyGithubImport::ProjectCreator) + .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider) + .and_return(double(execute: build_stubbed(:project))) + + expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } } + .not_to change { Namespace.count } + end + end + + context 'user can use a group without having permissions to create a group' do + let(:test_name) { 'test_name' } + let!(:group) { create(:group, name: 'foo') } + + it 'takes the selected namespace and name' do + group.add_owner(user) + user.update!(can_create_group: false) + + expect(Gitlab::LegacyGithubImport::ProjectCreator) + .to receive(:new).with(provider_repo, test_name, group, user, access_params, type: provider) + .and_return(double(execute: build_stubbed(:project))) + + post :create, { target_namespace: 'foo', new_name: test_name, format: :js } + end + end + + context 'when user can not create projects in the chosen namespace' do + it 'returns 422 response' do + other_namespace = create(:group, name: 'other_namespace') + + post :create, { target_namespace: other_namespace.name, format: :json } + + expect(response).to have_gitlab_http_status(422) + end end end end diff --git a/spec/support/cycle_analytics_helpers.rb b/spec/support/cycle_analytics_helpers.rb index d5ef80cfab2..73cc64c0b74 100644 --- a/spec/support/cycle_analytics_helpers.rb +++ b/spec/support/cycle_analytics_helpers.rb @@ -26,7 +26,19 @@ module CycleAnalyticsHelpers ref: 'refs/heads/master').execute end - def create_merge_request_closing_issue(issue, message: nil, source_branch: nil, commit_message: 'commit message') + def create_cycle(user, project, issue, mr, milestone, pipeline) + issue.update(milestone: milestone) + pipeline.run + + ci_build = create(:ci_build, pipeline: pipeline, status: :success, author: user) + + merge_merge_requests_closing_issue(user, project, issue) + ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash) + + ci_build + end + + def create_merge_request_closing_issue(user, project, issue, message: nil, source_branch: nil, commit_message: 'commit message') if !source_branch || project.repository.commit(source_branch).blank? source_branch = generate(:branch) project.repository.add_branch(user, source_branch, 'master') @@ -52,19 +64,19 @@ module CycleAnalyticsHelpers mr end - def merge_merge_requests_closing_issue(issue) + def merge_merge_requests_closing_issue(user, project, issue) merge_requests = issue.closed_by_merge_requests(user) merge_requests.each { |merge_request| MergeRequests::MergeService.new(project, user).execute(merge_request) } end - def deploy_master(environment: 'production') + def deploy_master(user, project, environment: 'production') dummy_job = case environment when 'production' - dummy_production_job + dummy_production_job(user, project) when 'staging' - dummy_staging_job + dummy_staging_job(user, project) else raise ArgumentError end @@ -72,25 +84,24 @@ module CycleAnalyticsHelpers CreateDeploymentService.new(dummy_job).execute end - def dummy_production_job - @dummy_job ||= new_dummy_job('production') + def dummy_production_job(user, project) + new_dummy_job(user, project, 'production') end - def dummy_staging_job - @dummy_job ||= new_dummy_job('staging') + def dummy_staging_job(user, project) + new_dummy_job(user, project, 'staging') end - def dummy_pipeline - @dummy_pipeline ||= - Ci::Pipeline.new( - sha: project.repository.commit('master').sha, - ref: 'master', - source: :push, - project: project, - protected: false) + def dummy_pipeline(project) + Ci::Pipeline.new( + sha: project.repository.commit('master').sha, + ref: 'master', + source: :push, + project: project, + protected: false) end - def new_dummy_job(environment) + def new_dummy_job(user, project, environment) project.environments.find_or_create_by(name: environment) Ci::Build.new( @@ -101,7 +112,7 @@ module CycleAnalyticsHelpers tag: false, name: 'dummy', stage: 'dummy', - pipeline: dummy_pipeline, + pipeline: dummy_pipeline(project), protected: false) end diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb index 1809ae1d141..5edc5de2a09 100644 --- a/spec/support/db_cleaner.rb +++ b/spec/support/db_cleaner.rb @@ -28,11 +28,11 @@ RSpec.configure do |config| end config.before(:each, :js) do - DatabaseCleaner.strategy = :deletion + DatabaseCleaner.strategy = :deletion, { cache_tables: false } end config.before(:each, :delete) do - DatabaseCleaner.strategy = :deletion + DatabaseCleaner.strategy = :deletion, { cache_tables: false } end config.before(:each, :migration) do diff --git a/spec/support/factory_girl.rb b/spec/support/factory_bot.rb index c7890e49c66..c7890e49c66 100644 --- a/spec/support/factory_girl.rb +++ b/spec/support/factory_bot.rb diff --git a/spec/support/features/variable_list_shared_examples.rb b/spec/support/features/variable_list_shared_examples.rb index 83bf06b6727..f7f851eb1eb 100644 --- a/spec/support/features/variable_list_shared_examples.rb +++ b/spec/support/features/variable_list_shared_examples.rb @@ -41,13 +41,13 @@ shared_examples 'variable list' do end end - it 'adds new unprotected variable' do + it 'adds new protected variable' do page.within('.js-ci-variable-list-section .js-row:last-child') do find('.js-ci-variable-input-key').set('key') find('.js-ci-variable-input-value').set('key value') find('.ci-variable-protected-item .js-project-feature-toggle').click - expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('false') + expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true') end click_button('Save variables') @@ -59,7 +59,7 @@ shared_examples 'variable list' do page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do expect(find('.js-ci-variable-input-key').value).to eq('key') expect(find('.js-ci-variable-input-value', visible: false).value).to eq('key value') - expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('false') + expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true') end end @@ -143,7 +143,6 @@ shared_examples 'variable list' do page.within('.js-ci-variable-list-section .js-row:last-child') do find('.js-ci-variable-input-key').set('unprotected_key') find('.js-ci-variable-input-value').set('unprotected_value') - find('.ci-variable-protected-item .js-project-feature-toggle').click expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('false') end @@ -178,6 +177,7 @@ shared_examples 'variable list' do page.within('.js-ci-variable-list-section .js-row:last-child') do find('.js-ci-variable-input-key').set('protected_key') find('.js-ci-variable-input-value').set('protected_value') + find('.ci-variable-protected-item .js-project-feature-toggle').click expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true') end @@ -261,9 +261,11 @@ shared_examples 'variable list' do click_button('Save variables') wait_for_requests + expect(all('.js-ci-variable-list-section .js-ci-variable-error-box ul li').count).to eq(1) + # We check the first row because it re-sorts to alphabetical order on refresh page.within('.js-ci-variable-list-section') do - expect(find('.js-ci-variable-error-box')).to have_content('Validation failed Variables Duplicate variables: samekey') + expect(find('.js-ci-variable-error-box')).to have_content(/Validation failed Variables have duplicate values \(.+\)/) end end end diff --git a/spec/support/fixture_helpers.rb b/spec/support/fixture_helpers.rb index 128aaaf25fe..8854382dc6b 100644 --- a/spec/support/fixture_helpers.rb +++ b/spec/support/fixture_helpers.rb @@ -1,12 +1,12 @@ module FixtureHelpers - def fixture_file(filename) + def fixture_file(filename, dir: '') return '' if filename.blank? - File.read(expand_fixture_path(filename)) + File.read(expand_fixture_path(filename, dir: dir)) end - def expand_fixture_path(filename) - File.expand_path(Rails.root.join('spec/fixtures/', filename)) + def expand_fixture_path(filename, dir: '') + File.expand_path(Rails.root.join(dir, 'spec', 'fixtures', filename)) end end diff --git a/spec/support/gitlab-git-test.git/packed-refs b/spec/support/gitlab-git-test.git/packed-refs index 507e4ce785a..ea50e4ad3f6 100644 --- a/spec/support/gitlab-git-test.git/packed-refs +++ b/spec/support/gitlab-git-test.git/packed-refs @@ -1,4 +1,4 @@ -# pack-refs with: peeled fully-peeled +# pack-refs with: peeled fully-peeled sorted 0b4bc9a49b562e85de7cc9e834518ea6828729b9 refs/heads/feature 12d65c8dd2b2676fa3ac47d955accc085a37a9c1 refs/heads/fix 6473c90867124755509e100d0d35ebdc85a0b6ae refs/heads/fix-blob-path diff --git a/spec/support/ldap_helpers.rb b/spec/support/ldap_helpers.rb index 28d39a32f02..081ce0ad7b7 100644 --- a/spec/support/ldap_helpers.rb +++ b/spec/support/ldap_helpers.rb @@ -1,13 +1,13 @@ module LdapHelpers def ldap_adapter(provider = 'ldapmain', ldap = double(:ldap)) - ::Gitlab::LDAP::Adapter.new(provider, ldap) + ::Gitlab::Auth::LDAP::Adapter.new(provider, ldap) end def user_dn(uid) "uid=#{uid},ou=users,dc=example,dc=com" end - # Accepts a hash of Gitlab::LDAP::Config keys and values. + # Accepts a hash of Gitlab::Auth::LDAP::Config keys and values. # # Example: # stub_ldap_config( @@ -15,21 +15,21 @@ module LdapHelpers # admin_group: 'my-admin-group' # ) def stub_ldap_config(messages) - allow_any_instance_of(::Gitlab::LDAP::Config).to receive_messages(messages) + allow_any_instance_of(::Gitlab::Auth::LDAP::Config).to receive_messages(messages) end # Stub an LDAP person search and provide the return entry. Specify `nil` for # `entry` to simulate when an LDAP person is not found # # Example: - # adapter = ::Gitlab::LDAP::Adapter.new('ldapmain', double(:ldap)) + # adapter = ::Gitlab::Auth::LDAP::Adapter.new('ldapmain', double(:ldap)) # ldap_user_entry = ldap_user_entry('john_doe') # # stub_ldap_person_find_by_uid('john_doe', ldap_user_entry, adapter) def stub_ldap_person_find_by_uid(uid, entry, provider = 'ldapmain') - return_value = ::Gitlab::LDAP::Person.new(entry, provider) if entry.present? + return_value = ::Gitlab::Auth::LDAP::Person.new(entry, provider) if entry.present? - allow(::Gitlab::LDAP::Person) + allow(::Gitlab::Auth::LDAP::Person) .to receive(:find_by_uid).with(uid, any_args).and_return(return_value) end diff --git a/spec/support/login_helpers.rb b/spec/support/login_helpers.rb index b52b6a28c54..d08183846a0 100644 --- a/spec/support/login_helpers.rb +++ b/spec/support/login_helpers.rb @@ -138,7 +138,7 @@ module LoginHelpers Rails.application.routes.draw do post '/users/auth/saml' => 'omniauth_callbacks#saml' end - allow(Gitlab::OAuth::Provider).to receive_messages(providers: [:saml], config_for: mock_saml_config) + allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [:saml], config_for: mock_saml_config) stub_omniauth_setting(messages) allow_any_instance_of(Object).to receive(:user_saml_omniauth_authorize_path).and_return('/users/auth/saml') allow_any_instance_of(Object).to receive(:omniauth_authorize_path).with(:user, "saml").and_return('/users/auth/saml') @@ -149,10 +149,10 @@ module LoginHelpers end def stub_basic_saml_config - allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', args: {} } }) + allow(Gitlab::Auth::Saml::Config).to receive_messages({ options: { name: 'saml', args: {} } }) end def stub_saml_group_config(groups) - allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', groups_attribute: 'groups', external_groups: groups, args: {} } }) + allow(Gitlab::Auth::Saml::Config).to receive_messages({ options: { name: 'saml', groups_attribute: 'groups', external_groups: groups, args: {} } }) end end diff --git a/spec/support/migrations_helpers.rb b/spec/support/migrations_helpers.rb index 6522d74ba89..6bf976a2cf9 100644 --- a/spec/support/migrations_helpers.rb +++ b/spec/support/migrations_helpers.rb @@ -15,18 +15,27 @@ module MigrationsHelpers ActiveRecord::Migrator.migrations(migrations_paths) end - def reset_column_in_migration_models + def clear_schema_cache! ActiveRecord::Base.connection_pool.connections.each do |conn| conn.schema_cache.clear! end + end - described_class.constants.sort.each do |name| - const = described_class.const_get(name) + def reset_column_in_all_models + clear_schema_cache! - if const.is_a?(Class) && const < ActiveRecord::Base - const.reset_column_information - end - end + # Reset column information for the most offending classes **after** we + # migrated the schema up, otherwise, column information could be + # outdated. We have a separate method for this so we can override it in EE. + ActiveRecord::Base.descendants.each(&method(:reset_column_information)) + + # Without that, we get errors because of missing attributes, e.g. + # super: no superclass method `elasticsearch_indexing' for #<ApplicationSetting:0x00007f85628508d8> + ApplicationSetting.define_attribute_methods + end + + def reset_column_information(klass) + klass.reset_column_information end def previous_migration @@ -36,7 +45,13 @@ module MigrationsHelpers end def migration_schema_version - self.class.metadata[:schema] || previous_migration.version + metadata_schema = self.class.metadata[:schema] + + if metadata_schema == :latest + migrations.last.version + else + metadata_schema || previous_migration.version + end end def schema_migrate_down! @@ -45,15 +60,17 @@ module MigrationsHelpers migration_schema_version) end - reset_column_in_migration_models + reset_column_in_all_models end def schema_migrate_up! + reset_column_in_all_models + disable_migrations_output do ActiveRecord::Migrator.migrate(migrations_paths) end - reset_column_in_migration_models + reset_column_in_all_models end def disable_migrations_output diff --git a/spec/support/migrations_helpers/track_untracked_uploads_helpers.rb b/spec/support/migrations_helpers/track_untracked_uploads_helpers.rb new file mode 100644 index 00000000000..016bcfa9b1b --- /dev/null +++ b/spec/support/migrations_helpers/track_untracked_uploads_helpers.rb @@ -0,0 +1,128 @@ +module MigrationsHelpers + module TrackUntrackedUploadsHelpers + PUBLIC_DIR = File.join(Rails.root, 'tmp', 'tests', 'public') + UPLOADS_DIR = File.join(PUBLIC_DIR, 'uploads') + SYSTEM_DIR = File.join(UPLOADS_DIR, '-', 'system') + UPLOAD_FILENAME = 'image.png'.freeze + FIXTURE_FILE_PATH = File.join(Rails.root, 'spec', 'fixtures', 'dk.png') + FIXTURE_CHECKSUM = 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75'.freeze + + def create_or_update_appearance(logo: false, header_logo: false) + appearance = appearances.first_or_create(title: 'foo', description: 'bar', logo: (UPLOAD_FILENAME if logo), header_logo: (UPLOAD_FILENAME if header_logo)) + + add_upload(appearance, 'Appearance', 'logo', 'AttachmentUploader') if logo + add_upload(appearance, 'Appearance', 'header_logo', 'AttachmentUploader') if header_logo + + appearance + end + + def create_group(avatar: false) + index = unique_index(:group) + group = namespaces.create(name: "group#{index}", path: "group#{index}", avatar: (UPLOAD_FILENAME if avatar)) + + add_upload(group, 'Group', 'avatar', 'AvatarUploader') if avatar + + group + end + + def create_note(attachment: false) + note = notes.create(attachment: (UPLOAD_FILENAME if attachment)) + + add_upload(note, 'Note', 'attachment', 'AttachmentUploader') if attachment + + note + end + + def create_project(avatar: false) + group = create_group + project = projects.create(namespace_id: group.id, path: "project#{unique_index(:project)}", avatar: (UPLOAD_FILENAME if avatar)) + routes.create(path: "#{group.path}/#{project.path}", source_id: project.id, source_type: 'Project') # so Project.find_by_full_path works + + add_upload(project, 'Project', 'avatar', 'AvatarUploader') if avatar + + project + end + + def create_user(avatar: false) + user = users.create(email: "foo#{unique_index(:user)}@bar.com", avatar: (UPLOAD_FILENAME if avatar), projects_limit: 100) + + add_upload(user, 'User', 'avatar', 'AvatarUploader') if avatar + + user + end + + def unique_index(name = :unnamed) + @unique_index ||= {} + @unique_index[name] ||= 0 + @unique_index[name] += 1 + end + + def add_upload(model, model_type, attachment_type, uploader) + file_path = upload_file_path(model, model_type, attachment_type) + path_relative_to_public = file_path.sub("#{PUBLIC_DIR}/", '') + create_file(file_path) + + uploads.create!( + size: 1062, + path: path_relative_to_public, + model_id: model.id, + model_type: model_type == 'Group' ? 'Namespace' : model_type, + uploader: uploader, + checksum: FIXTURE_CHECKSUM + ) + end + + def add_markdown_attachment(project, hashed_storage: false) + project_dir = hashed_storage ? hashed_project_uploads_dir(project) : legacy_project_uploads_dir(project) + attachment_dir = File.join(project_dir, SecureRandom.hex) + attachment_file_path = File.join(attachment_dir, UPLOAD_FILENAME) + project_attachment_path_relative_to_project = attachment_file_path.sub("#{project_dir}/", '') + create_file(attachment_file_path) + + uploads.create!( + size: 1062, + path: project_attachment_path_relative_to_project, + model_id: project.id, + model_type: 'Project', + uploader: 'FileUploader', + checksum: FIXTURE_CHECKSUM + ) + end + + def legacy_project_uploads_dir(project) + namespace = namespaces.find_by(id: project.namespace_id) + File.join(UPLOADS_DIR, namespace.path, project.path) + end + + def hashed_project_uploads_dir(project) + File.join(UPLOADS_DIR, '@hashed', 'aa', 'aaaaaaaaaaaa') + end + + def upload_file_path(model, model_type, attachment_type) + dir = File.join(upload_dir(model_type.downcase, attachment_type.to_s), model.id.to_s) + File.join(dir, UPLOAD_FILENAME) + end + + def upload_dir(model_type, attachment_type) + File.join(SYSTEM_DIR, model_type, attachment_type) + end + + def create_file(path) + File.delete(path) if File.exist?(path) + FileUtils.mkdir_p(File.dirname(path)) + FileUtils.cp(FIXTURE_FILE_PATH, path) + end + + def get_uploads(model, model_type) + uploads.where(model_type: model_type, model_id: model.id) + end + + def get_full_path(project) + routes.find_by(source_id: project.id, source_type: 'Project').path + end + + def ensure_temporary_tracking_table_exists + Gitlab::BackgroundMigration::PrepareUntrackedUploads.new.send(:ensure_temporary_tracking_table_exists) + end + end +end diff --git a/spec/support/prometheus/additional_metrics_shared_examples.rb b/spec/support/prometheus/additional_metrics_shared_examples.rb index dbbd4ad4d40..c7c3346d39e 100644 --- a/spec/support/prometheus/additional_metrics_shared_examples.rb +++ b/spec/support/prometheus/additional_metrics_shared_examples.rb @@ -12,11 +12,12 @@ RSpec.shared_examples 'additional metrics query' do let(:client) { double('prometheus_client') } let(:query_result) { described_class.new(client).query(*query_params) } - let(:environment) { create(:environment, slug: 'environment-slug') } + let(:project) { create(:project) } + let(:environment) { create(:environment, slug: 'environment-slug', project: project) } before do allow(client).to receive(:label_values).and_return(metric_names) - allow(metric_group_class).to receive(:all).and_return([simple_metric_group(metrics: [simple_metric])]) + allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group(metrics: [simple_metric])]) end context 'metrics query context' do @@ -24,13 +25,14 @@ RSpec.shared_examples 'additional metrics query' do shared_examples 'query context containing environment slug and filter' do it 'contains ci_environment_slug' do - expect(subject).to receive(:query_metrics).with(hash_including(ci_environment_slug: environment.slug)) + expect(subject).to receive(:query_metrics).with(project, hash_including(ci_environment_slug: environment.slug)) subject.query(*query_params) end it 'contains environment filter' do expect(subject).to receive(:query_metrics).with( + project, hash_including( environment_filter: "container_name!=\"POD\",environment=\"#{environment.slug}\"" ) @@ -48,7 +50,7 @@ RSpec.shared_examples 'additional metrics query' do it_behaves_like 'query context containing environment slug and filter' it 'query context contains kube_namespace' do - expect(subject).to receive(:query_metrics).with(hash_including(kube_namespace: kube_namespace)) + expect(subject).to receive(:query_metrics).with(project, hash_including(kube_namespace: kube_namespace)) subject.query(*query_params) end @@ -72,7 +74,7 @@ RSpec.shared_examples 'additional metrics query' do it_behaves_like 'query context containing environment slug and filter' it 'query context contains empty kube_namespace' do - expect(subject).to receive(:query_metrics).with(hash_including(kube_namespace: '')) + expect(subject).to receive(:query_metrics).with(project, hash_including(kube_namespace: '')) subject.query(*query_params) end @@ -81,7 +83,7 @@ RSpec.shared_examples 'additional metrics query' do context 'with one group where two metrics is found' do before do - allow(metric_group_class).to receive(:all).and_return([simple_metric_group]) + allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group]) end context 'some queries return results' do @@ -117,7 +119,7 @@ RSpec.shared_examples 'additional metrics query' do let(:metrics) { [simple_metric(queries: [simple_query])] } before do - allow(metric_group_class).to receive(:all).and_return( + allow(metric_group_class).to receive(:common_metrics).and_return( [ simple_metric_group(name: 'group_a', metrics: [simple_metric(queries: [simple_query])]), simple_metric_group(name: 'group_b', metrics: [simple_metric(title: 'title_b', queries: [simple_query('b')])]) diff --git a/spec/support/reactive_caching_helpers.rb b/spec/support/reactive_caching_helpers.rb index 34124f02133..e22dd974c6a 100644 --- a/spec/support/reactive_caching_helpers.rb +++ b/spec/support/reactive_caching_helpers.rb @@ -13,6 +13,12 @@ module ReactiveCachingHelpers write_reactive_cache(subject, data, *qualifiers) if data end + def synchronous_reactive_cache(subject) + allow(service).to receive(:with_reactive_cache) do |*args, &block| + block.call(service.calculate_reactive_cache(*args)) + end + end + def read_reactive_cache(subject, *qualifiers) Rails.cache.read(reactive_cache_key(subject, *qualifiers)) end diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb index 7ce80c82439..ea7dbade171 100644 --- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb +++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb @@ -89,6 +89,19 @@ shared_examples 'handle uploads' do end end + context "when neither the uploader nor the model exists" do + before do + allow_any_instance_of(Upload).to receive(:build_uploader).and_return(nil) + allow(controller).to receive(:find_model).and_return(nil) + end + + it "responds with status 404" do + show_upload + + expect(response).to have_gitlab_http_status(404) + end + end + context "when the file doesn't exist" do before do allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) diff --git a/spec/support/shared_examples/models/cluster_application_core_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_core_shared_examples.rb new file mode 100644 index 00000000000..87d12a784ba --- /dev/null +++ b/spec/support/shared_examples/models/cluster_application_core_shared_examples.rb @@ -0,0 +1,70 @@ +shared_examples 'cluster application core specs' do |application_name| + it { is_expected.to belong_to(:cluster) } + it { is_expected.to validate_presence_of(:cluster) } + + describe '#name' do + it 'is .application_name' do + expect(subject.name).to eq(described_class.application_name) + end + + it 'is recorded in Clusters::Cluster::APPLICATIONS' do + expect(Clusters::Cluster::APPLICATIONS[subject.name]).to eq(described_class) + end + end + + describe 'status state machine' do + describe '#make_installing' do + subject { create(application_name, :scheduled) } + + it 'is installing' do + subject.make_installing! + + expect(subject).to be_installing + end + end + + describe '#make_installed' do + subject { create(application_name, :installing) } + + it 'is installed' do + subject.make_installed + + expect(subject).to be_installed + end + end + + describe '#make_errored' do + subject { create(application_name, :installing) } + let(:reason) { 'some errors' } + + it 'is errored' do + subject.make_errored(reason) + + expect(subject).to be_errored + expect(subject.status_reason).to eq(reason) + end + end + + describe '#make_scheduled' do + subject { create(application_name, :installable) } + + it 'is scheduled' do + subject.make_scheduled + + expect(subject).to be_scheduled + end + + describe 'when was errored' do + subject { create(application_name, :errored) } + + it 'clears #status_reason' do + expect(subject.status_reason).not_to be_nil + + subject.make_scheduled! + + expect(subject.status_reason).to be_nil + end + end + end + end +end diff --git a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb new file mode 100644 index 00000000000..765dd32f4ba --- /dev/null +++ b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb @@ -0,0 +1,31 @@ +shared_examples 'cluster application status specs' do |application_name| + describe '#status' do + let(:cluster) { create(:cluster, :provided_by_gcp) } + + subject { described_class.new(cluster: cluster) } + + it 'sets a default status' do + expect(subject.status_name).to be(:not_installable) + end + + context 'when application helm is scheduled' do + before do + create(:clusters_applications_helm, :scheduled, cluster: cluster) + end + + it 'defaults to :not_installable' do + expect(subject.status_name).to be(:not_installable) + end + end + + context 'when application is scheduled' do + before do + create(:clusters_applications_helm, :installed, cluster: cluster) + end + + it 'sets a default status' do + expect(subject.status_name).to be(:installable) + end + end + end +end diff --git a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb index 4e18804b937..9fc2fbef449 100644 --- a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb @@ -17,12 +17,88 @@ shared_examples 'custom attributes endpoints' do |attributable_name| end end - it 'filters by custom attributes' do - get api("/#{attributable_name}", admin), custom_attributes: { foo: 'foo', bar: 'bar' } + context 'with an authorized user' do + it 'filters by custom attributes' do + get api("/#{attributable_name}", admin), custom_attributes: { foo: 'foo', bar: 'bar' } - expect(response).to have_gitlab_http_status(200) - expect(json_response.size).to be 1 - expect(json_response.first['id']).to eq attributable.id + expect(response).to have_gitlab_http_status(200) + expect(json_response.size).to be 1 + expect(json_response.first['id']).to eq attributable.id + end + end + end + + describe "GET /#{attributable_name} with custom attributes" do + before do + other_attributable + end + + context 'with an unauthorized user' do + it 'does not include custom attributes' do + get api("/#{attributable_name}", user), with_custom_attributes: true + + expect(response).to have_gitlab_http_status(200) + expect(json_response.size).to be 2 + expect(json_response.first).not_to include 'custom_attributes' + end + end + + context 'with an authorized user' do + it 'does not include custom attributes by default' do + get api("/#{attributable_name}", admin) + + expect(response).to have_gitlab_http_status(200) + expect(json_response.size).to be 2 + expect(json_response.first).not_to include 'custom_attributes' + expect(json_response.second).not_to include 'custom_attributes' + end + + it 'includes custom attributes if requested' do + get api("/#{attributable_name}", admin), with_custom_attributes: true + + expect(response).to have_gitlab_http_status(200) + expect(json_response.size).to be 2 + + attributable_response = json_response.find { |r| r['id'] == attributable.id } + other_attributable_response = json_response.find { |r| r['id'] == other_attributable.id } + + expect(attributable_response['custom_attributes']).to contain_exactly( + { 'key' => 'foo', 'value' => 'foo' }, + { 'key' => 'bar', 'value' => 'bar' } + ) + + expect(other_attributable_response['custom_attributes']).to eq [] + end + end + end + + describe "GET /#{attributable_name}/:id with custom attributes" do + context 'with an unauthorized user' do + it 'does not include custom attributes' do + get api("/#{attributable_name}/#{attributable.id}", user), with_custom_attributes: true + + expect(response).to have_gitlab_http_status(200) + expect(json_response).not_to include 'custom_attributes' + end + end + + context 'with an authorized user' do + it 'does not include custom attributes by default' do + get api("/#{attributable_name}/#{attributable.id}", admin) + + expect(response).to have_gitlab_http_status(200) + expect(json_response).not_to include 'custom_attributes' + end + + it 'includes custom attributes if requested' do + get api("/#{attributable_name}/#{attributable.id}", admin), with_custom_attributes: true + + expect(response).to have_gitlab_http_status(200) + expect(json_response['custom_attributes']).to contain_exactly( + { 'key' => 'foo', 'value' => 'foo' }, + { 'key' => 'bar', 'value' => 'bar' } + ) + end end end @@ -33,14 +109,16 @@ shared_examples 'custom attributes endpoints' do |attributable_name| it_behaves_like 'an unauthorized API user' end - it 'returns all custom attributes' do - get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin) + context 'with an authorized user' do + it 'returns all custom attributes' do + get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin) - expect(response).to have_gitlab_http_status(200) - expect(json_response).to contain_exactly( - { 'key' => 'foo', 'value' => 'foo' }, - { 'key' => 'bar', 'value' => 'bar' } - ) + expect(response).to have_gitlab_http_status(200) + expect(json_response).to contain_exactly( + { 'key' => 'foo', 'value' => 'foo' }, + { 'key' => 'bar', 'value' => 'bar' } + ) + end end end @@ -51,11 +129,13 @@ shared_examples 'custom attributes endpoints' do |attributable_name| it_behaves_like 'an unauthorized API user' end - it 'returns a single custom attribute' do - get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin) + context 'with an authorized user' do + it'returns a single custom attribute' do + get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin) - expect(response).to have_gitlab_http_status(200) - expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' }) + expect(response).to have_gitlab_http_status(200) + expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' }) + end end end @@ -66,24 +146,26 @@ shared_examples 'custom attributes endpoints' do |attributable_name| it_behaves_like 'an unauthorized API user' end - it 'creates a new custom attribute' do - expect do - put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), value: 'new' - end.to change { attributable.custom_attributes.count }.by(1) + context 'with an authorized user' do + it 'creates a new custom attribute' do + expect do + put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), value: 'new' + end.to change { attributable.custom_attributes.count }.by(1) - expect(response).to have_gitlab_http_status(200) - expect(json_response).to eq({ 'key' => 'new', 'value' => 'new' }) - expect(attributable.custom_attributes.find_by(key: 'new').value).to eq 'new' - end + expect(response).to have_gitlab_http_status(200) + expect(json_response).to eq({ 'key' => 'new', 'value' => 'new' }) + expect(attributable.custom_attributes.find_by(key: 'new').value).to eq 'new' + end - it 'updates an existing custom attribute' do - expect do - put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), value: 'new' - end.not_to change { attributable.custom_attributes.count } + it 'updates an existing custom attribute' do + expect do + put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), value: 'new' + end.not_to change { attributable.custom_attributes.count } - expect(response).to have_gitlab_http_status(200) - expect(json_response).to eq({ 'key' => 'foo', 'value' => 'new' }) - expect(custom_attribute1.reload.value).to eq 'new' + expect(response).to have_gitlab_http_status(200) + expect(json_response).to eq({ 'key' => 'foo', 'value' => 'new' }) + expect(custom_attribute1.reload.value).to eq 'new' + end end end @@ -94,13 +176,15 @@ shared_examples 'custom attributes endpoints' do |attributable_name| it_behaves_like 'an unauthorized API user' end - it 'deletes an existing custom attribute' do - expect do - delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin) - end.to change { attributable.custom_attributes.count }.by(-1) + context 'with an authorized user' do + it 'deletes an existing custom attribute' do + expect do + delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin) + end.to change { attributable.custom_attributes.count }.by(-1) - expect(response).to have_gitlab_http_status(204) - expect(attributable.custom_attributes.find_by(key: 'foo')).to be_nil + expect(response).to have_gitlab_http_status(204) + expect(attributable.custom_attributes.find_by(key: 'foo')).to be_nil + end end end end diff --git a/spec/support/slack_mattermost_notifications_shared_examples.rb b/spec/support/slack_mattermost_notifications_shared_examples.rb index e827a8da0b7..5e1ce19eafb 100644 --- a/spec/support/slack_mattermost_notifications_shared_examples.rb +++ b/spec/support/slack_mattermost_notifications_shared_examples.rb @@ -337,6 +337,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do before do chat_service.notify_only_default_branch = true + WebMock.stub_request(:post, webhook_url) end it 'does not call the Slack/Mattermost API for pipeline events' do @@ -345,6 +346,23 @@ RSpec.shared_examples 'slack or mattermost notifications' do expect(result).to be_falsy end + + it 'does not notify push events if they are not for the default branch' do + ref = "#{Gitlab::Git::BRANCH_REF_PREFIX}test" + push_sample_data = Gitlab::DataBuilder::Push.build(project, user, nil, nil, ref, []) + + chat_service.execute(push_sample_data) + + expect(WebMock).not_to have_requested(:post, webhook_url) + end + + it 'notifies about push events for the default branch' do + push_sample_data = Gitlab::DataBuilder::Push.build_sample(project, user) + + chat_service.execute(push_sample_data) + + expect(WebMock).to have_requested(:post, webhook_url).once + end end context 'when disabled' do diff --git a/spec/support/snippet_visibility.rb b/spec/support/snippet_visibility.rb new file mode 100644 index 00000000000..3a7c69b7877 --- /dev/null +++ b/spec/support/snippet_visibility.rb @@ -0,0 +1,322 @@ +RSpec.shared_examples 'snippet visibility' do + let!(:author) { create(:user) } + let!(:member) { create(:user) } + let!(:external) { create(:user, :external) } + + let!(:snippet_type_visibilities) do + { + public: Snippet::PUBLIC, + internal: Snippet::INTERNAL, + private: Snippet::PRIVATE + } + end + + context "For project snippets" do + let!(:users) do + { + unauthenticated: nil, + external: external, + non_member: create(:user), + member: member, + author: author + } + end + + let!(:project_type_visibilities) do + { + public: Gitlab::VisibilityLevel::PUBLIC, + internal: Gitlab::VisibilityLevel::INTERNAL, + private: Gitlab::VisibilityLevel::PRIVATE + } + end + + let(:project_feature_visibilities) do + { + enabled: ProjectFeature::ENABLED, + private: ProjectFeature::PRIVATE, + disabled: ProjectFeature::DISABLED + } + end + + where(:project_type, :feature_visibility, :user_type, :snippet_type, :outcome) do + [ + # Public projects + [:public, :enabled, :unauthenticated, :public, true], + [:public, :enabled, :unauthenticated, :internal, false], + [:public, :enabled, :unauthenticated, :private, false], + + [:public, :enabled, :external, :public, true], + [:public, :enabled, :external, :internal, false], + [:public, :enabled, :external, :private, false], + + [:public, :enabled, :non_member, :public, true], + [:public, :enabled, :non_member, :internal, true], + [:public, :enabled, :non_member, :private, false], + + [:public, :enabled, :member, :public, true], + [:public, :enabled, :member, :internal, true], + [:public, :enabled, :member, :private, true], + + [:public, :enabled, :author, :public, true], + [:public, :enabled, :author, :internal, true], + [:public, :enabled, :author, :private, true], + + [:public, :private, :unauthenticated, :public, false], + [:public, :private, :unauthenticated, :internal, false], + [:public, :private, :unauthenticated, :private, false], + + [:public, :private, :external, :public, false], + [:public, :private, :external, :internal, false], + [:public, :private, :external, :private, false], + + [:public, :private, :non_member, :public, false], + [:public, :private, :non_member, :internal, false], + [:public, :private, :non_member, :private, false], + + [:public, :private, :member, :public, true], + [:public, :private, :member, :internal, true], + [:public, :private, :member, :private, true], + + [:public, :private, :author, :public, true], + [:public, :private, :author, :internal, true], + [:public, :private, :author, :private, true], + + [:public, :disabled, :unauthenticated, :public, false], + [:public, :disabled, :unauthenticated, :internal, false], + [:public, :disabled, :unauthenticated, :private, false], + + [:public, :disabled, :external, :public, false], + [:public, :disabled, :external, :internal, false], + [:public, :disabled, :external, :private, false], + + [:public, :disabled, :non_member, :public, false], + [:public, :disabled, :non_member, :internal, false], + [:public, :disabled, :non_member, :private, false], + + [:public, :disabled, :member, :public, false], + [:public, :disabled, :member, :internal, false], + [:public, :disabled, :member, :private, false], + + [:public, :disabled, :author, :public, false], + [:public, :disabled, :author, :internal, false], + [:public, :disabled, :author, :private, false], + + # Internal projects + [:internal, :enabled, :unauthenticated, :public, false], + [:internal, :enabled, :unauthenticated, :internal, false], + [:internal, :enabled, :unauthenticated, :private, false], + + [:internal, :enabled, :external, :public, false], + [:internal, :enabled, :external, :internal, false], + [:internal, :enabled, :external, :private, false], + + [:internal, :enabled, :non_member, :public, true], + [:internal, :enabled, :non_member, :internal, true], + [:internal, :enabled, :non_member, :private, false], + + [:internal, :enabled, :member, :public, true], + [:internal, :enabled, :member, :internal, true], + [:internal, :enabled, :member, :private, true], + + [:internal, :enabled, :author, :public, true], + [:internal, :enabled, :author, :internal, true], + [:internal, :enabled, :author, :private, true], + + [:internal, :private, :unauthenticated, :public, false], + [:internal, :private, :unauthenticated, :internal, false], + [:internal, :private, :unauthenticated, :private, false], + + [:internal, :private, :external, :public, false], + [:internal, :private, :external, :internal, false], + [:internal, :private, :external, :private, false], + + [:internal, :private, :non_member, :public, false], + [:internal, :private, :non_member, :internal, false], + [:internal, :private, :non_member, :private, false], + + [:internal, :private, :member, :public, true], + [:internal, :private, :member, :internal, true], + [:internal, :private, :member, :private, true], + + [:internal, :private, :author, :public, true], + [:internal, :private, :author, :internal, true], + [:internal, :private, :author, :private, true], + + [:internal, :disabled, :unauthenticated, :public, false], + [:internal, :disabled, :unauthenticated, :internal, false], + [:internal, :disabled, :unauthenticated, :private, false], + + [:internal, :disabled, :external, :public, false], + [:internal, :disabled, :external, :internal, false], + [:internal, :disabled, :external, :private, false], + + [:internal, :disabled, :non_member, :public, false], + [:internal, :disabled, :non_member, :internal, false], + [:internal, :disabled, :non_member, :private, false], + + [:internal, :disabled, :member, :public, false], + [:internal, :disabled, :member, :internal, false], + [:internal, :disabled, :member, :private, false], + + [:internal, :disabled, :author, :public, false], + [:internal, :disabled, :author, :internal, false], + [:internal, :disabled, :author, :private, false], + + # Private projects + [:private, :enabled, :unauthenticated, :public, false], + [:private, :enabled, :unauthenticated, :internal, false], + [:private, :enabled, :unauthenticated, :private, false], + + [:private, :enabled, :external, :public, true], + [:private, :enabled, :external, :internal, true], + [:private, :enabled, :external, :private, true], + + [:private, :enabled, :non_member, :public, false], + [:private, :enabled, :non_member, :internal, false], + [:private, :enabled, :non_member, :private, false], + + [:private, :enabled, :member, :public, true], + [:private, :enabled, :member, :internal, true], + [:private, :enabled, :member, :private, true], + + [:private, :enabled, :author, :public, true], + [:private, :enabled, :author, :internal, true], + [:private, :enabled, :author, :private, true], + + [:private, :private, :unauthenticated, :public, false], + [:private, :private, :unauthenticated, :internal, false], + [:private, :private, :unauthenticated, :private, false], + + [:private, :private, :external, :public, true], + [:private, :private, :external, :internal, true], + [:private, :private, :external, :private, true], + + [:private, :private, :non_member, :public, false], + [:private, :private, :non_member, :internal, false], + [:private, :private, :non_member, :private, false], + + [:private, :private, :member, :public, true], + [:private, :private, :member, :internal, true], + [:private, :private, :member, :private, true], + + [:private, :private, :author, :public, true], + [:private, :private, :author, :internal, true], + [:private, :private, :author, :private, true], + + [:private, :disabled, :unauthenticated, :public, false], + [:private, :disabled, :unauthenticated, :internal, false], + [:private, :disabled, :unauthenticated, :private, false], + + [:private, :disabled, :external, :public, false], + [:private, :disabled, :external, :internal, false], + [:private, :disabled, :external, :private, false], + + [:private, :disabled, :non_member, :public, false], + [:private, :disabled, :non_member, :internal, false], + [:private, :disabled, :non_member, :private, false], + + [:private, :disabled, :member, :public, false], + [:private, :disabled, :member, :internal, false], + [:private, :disabled, :member, :private, false], + + [:private, :disabled, :author, :public, false], + [:private, :disabled, :author, :internal, false], + [:private, :disabled, :author, :private, false] + ] + end + + with_them do + let!(:project) { create(:project, visibility_level: project_type_visibilities[project_type]) } + let!(:project_feature) { project.project_feature.update_column(:snippets_access_level, project_feature_visibilities[feature_visibility]) } + let!(:user) { users[user_type] } + let!(:snippet) { create(:project_snippet, visibility_level: snippet_type_visibilities[snippet_type], project: project, author: author) } + let!(:members) do + project.add_developer(author) + project.add_developer(member) + project.add_developer(external) if project.private? + end + + context "For #{params[:project_type]} project and #{params[:user_type]} users" do + it 'should agree with the read_project_snippet policy' do + expect(can?(user, :read_project_snippet, snippet)).to eq(outcome) + end + + it 'should return proper outcome' do + results = described_class.new(user, project: project).execute + expect(results.include?(snippet)).to eq(outcome) + end + end + + context "Without a given project and #{params[:user_type]} users" do + it 'should return proper outcome' do + results = described_class.new(user).execute + expect(results.include?(snippet)).to eq(outcome) + end + + it 'returns no snippets when the user cannot read cross project' do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + + snippets = described_class.new(user).execute + + expect(snippets).to be_empty + end + end + end + end + + context 'For personal snippets' do + let!(:users) do + { + unauthenticated: nil, + external: external, + non_member: create(:user), + author: author + } + end + + where(:snippet_visibility, :user_type, :outcome) do + [ + [:public, :unauthenticated, true], + [:public, :external, true], + [:public, :non_member, true], + [:public, :author, true], + + [:internal, :unauthenticated, false], + [:internal, :external, false], + [:internal, :non_member, true], + [:internal, :author, true], + + [:private, :unauthenticated, false], + [:private, :external, false], + [:private, :non_member, false], + [:private, :author, true] + ] + end + + with_them do + let!(:user) { users[user_type] } + let!(:snippet) { create(:personal_snippet, visibility_level: snippet_type_visibilities[snippet_visibility], author: author) } + + context "For personal and #{params[:snippet_visibility]} snippets with #{params[:user_type]} user" do + it 'should agree with read_personal_snippet policy' do + expect(can?(user, :read_personal_snippet, snippet)).to eq(outcome) + end + + it 'should return proper outcome' do + results = described_class.new(user).execute + expect(results.include?(snippet)).to eq(outcome) + end + + it 'should return personal snippets when the user cannot read cross project' do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } + + results = described_class.new(user).execute + + expect(results.include?(snippet)).to eq(outcome) + end + end + end + end +end diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb index c275522159c..01321989f01 100644 --- a/spec/support/test_env.rb +++ b/spec/support/test_env.rb @@ -1,5 +1,5 @@ require 'rspec/mocks' -require 'toml' +require 'toml-rb' module TestEnv extend self diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb deleted file mode 100644 index 5752078d2a0..00000000000 --- a/spec/support/track_untracked_uploads_helpers.rb +++ /dev/null @@ -1,20 +0,0 @@ -module TrackUntrackedUploadsHelpers - def uploaded_file - fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg') - fixture_file_upload(fixture_path) - end - - def ensure_temporary_tracking_table_exists - Gitlab::BackgroundMigration::PrepareUntrackedUploads.new.send(:ensure_temporary_tracking_table_exists) - end - - def drop_temp_table_if_exists - ActiveRecord::Base.connection.drop_table(:untracked_files_for_uploads) if ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads) - end - - def create_or_update_appearance(attrs) - a = Appearance.first_or_initialize(title: 'foo', description: 'bar') - a.update!(attrs) - a - end -end diff --git a/spec/tasks/gitlab/check_rake_spec.rb b/spec/tasks/gitlab/check_rake_spec.rb index 538ff952bf4..4eda618b6d6 100644 --- a/spec/tasks/gitlab/check_rake_spec.rb +++ b/spec/tasks/gitlab/check_rake_spec.rb @@ -11,8 +11,8 @@ describe 'gitlab:ldap:check rake task' do context 'when LDAP is not enabled' do it 'does not attempt to bind or search for users' do - expect(Gitlab::LDAP::Config).not_to receive(:providers) - expect(Gitlab::LDAP::Adapter).not_to receive(:open) + expect(Gitlab::Auth::LDAP::Config).not_to receive(:providers) + expect(Gitlab::Auth::LDAP::Adapter).not_to receive(:open) run_rake_task('gitlab:ldap:check') end @@ -23,12 +23,12 @@ describe 'gitlab:ldap:check rake task' do let(:adapter) { ldap_adapter('ldapmain', ldap) } before do - allow(Gitlab::LDAP::Config) + allow(Gitlab::Auth::LDAP::Config) .to receive_messages( enabled?: true, providers: ['ldapmain'] ) - allow(Gitlab::LDAP::Adapter).to receive(:open).and_yield(adapter) + allow(Gitlab::Auth::LDAP::Adapter).to receive(:open).and_yield(adapter) allow(adapter).to receive(:users).and_return([]) end diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb index b37d6ac831f..1f4053ff9ad 100644 --- a/spec/tasks/gitlab/gitaly_rake_spec.rb +++ b/spec/tasks/gitlab/gitaly_rake_spec.rb @@ -132,7 +132,7 @@ describe 'gitlab:gitaly namespace rake task' do expect { run_rake_task('gitlab:gitaly:storage_config')} .to output(expected_output).to_stdout - parsed_output = TOML.parse(expected_output) + parsed_output = TomlRB.parse(expected_output) config.each do |name, params| expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params['path'] }) end diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb index 6a92e7fae51..b42ce982b27 100644 --- a/spec/uploaders/file_uploader_spec.rb +++ b/spec/uploaders/file_uploader_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe FileUploader do let(:group) { create(:group, name: 'awesome') } - let(:project) { create(:project, namespace: group, name: 'project') } + let(:project) { create(:project, :legacy_storage, namespace: group, name: 'project') } let(:uploader) { described_class.new(project) } let(:upload) { double(model: project, path: 'secret/foo.jpg') } @@ -16,12 +16,12 @@ describe FileUploader do shared_examples 'uses hashed storage' do context 'when rolled out attachments' do + let(:project) { build_stubbed(:project, namespace: group, name: 'project') } + before do allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed') end - let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') } - it_behaves_like 'builds correct paths', store_dir: %r{ca/fe/fe/ed/\h+}, absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg} diff --git a/spec/validators/variable_duplicates_validator_spec.rb b/spec/validators/variable_duplicates_validator_spec.rb new file mode 100644 index 00000000000..0b71a67f94d --- /dev/null +++ b/spec/validators/variable_duplicates_validator_spec.rb @@ -0,0 +1,67 @@ +require 'spec_helper' + +describe VariableDuplicatesValidator do + let(:validator) { described_class.new(attributes: [:variables], **options) } + + describe '#validate_each' do + let(:project) { build(:project) } + + subject { validator.validate_each(project, :variables, project.variables) } + + context 'with no scope' do + let(:options) { {} } + let(:variables) { build_list(:ci_variable, 2, project: project) } + + before do + project.variables << variables + end + + it 'does not have any errors' do + subject + + expect(project.errors.empty?).to be true + end + + context 'with duplicates' do + before do + project.variables.build(key: variables.first.key, value: 'dummy_value') + end + + it 'has a duplicate key error' do + subject + + expect(project.errors).to have_key(:variables) + end + end + end + + context 'with a scope attribute' do + let(:options) { { scope: :environment_scope } } + let(:first_variable) { build(:ci_variable, key: 'test_key', environment_scope: '*', project: project) } + let(:second_variable) { build(:ci_variable, key: 'test_key', environment_scope: 'prod', project: project) } + + before do + project.variables << first_variable + project.variables << second_variable + end + + it 'does not have any errors' do + subject + + expect(project.errors.empty?).to be true + end + + context 'with duplicates' do + before do + project.variables.build(key: second_variable.key, value: 'dummy_value', environment_scope: second_variable.environment_scope) + end + + it 'has a duplicate key error' do + subject + + expect(project.errors).to have_key(:variables) + end + end + end + end +end diff --git a/spec/views/shared/projects/_project.html.haml_spec.rb b/spec/views/shared/projects/_project.html.haml_spec.rb index f0a4f153699..3b14045e61f 100644 --- a/spec/views/shared/projects/_project.html.haml_spec.rb +++ b/spec/views/shared/projects/_project.html.haml_spec.rb @@ -5,6 +5,7 @@ describe 'shared/projects/_project.html.haml' do before do allow(view).to receive(:current_application_settings).and_return(Gitlab::CurrentSettings.current_application_settings) + allow(view).to receive(:can?) { true } end it 'should render creator avatar if project has a creator' do diff --git a/spec/workers/authorized_projects_worker_spec.rb b/spec/workers/authorized_projects_worker_spec.rb index 0d6eb536c33..d095138f6b7 100644 --- a/spec/workers/authorized_projects_worker_spec.rb +++ b/spec/workers/authorized_projects_worker_spec.rb @@ -1,79 +1,6 @@ require 'spec_helper' describe AuthorizedProjectsWorker do - let(:project) { create(:project) } - - def build_args_list(*ids, multiply: 1) - args_list = ids.map { |id| [id] } - args_list * multiply - end - - describe '.bulk_perform_and_wait' do - it 'schedules the ids and waits for the jobs to complete' do - args_list = build_args_list(project.owner.id) - - project.owner.project_authorizations.delete_all - described_class.bulk_perform_and_wait(args_list) - - expect(project.owner.project_authorizations.count).to eq(1) - end - - it 'inlines workloads <= 3 jobs' do - args_list = build_args_list(project.owner.id, multiply: 3) - expect(described_class).to receive(:bulk_perform_inline).with(args_list) - - described_class.bulk_perform_and_wait(args_list) - end - - it 'runs > 3 jobs using sidekiq' do - project.owner.project_authorizations.delete_all - - expect(described_class).to receive(:bulk_perform_async).and_call_original - - args_list = build_args_list(project.owner.id, multiply: 4) - described_class.bulk_perform_and_wait(args_list) - - expect(project.owner.project_authorizations.count).to eq(1) - end - end - - describe '.bulk_perform_inline' do - it 'refreshes the authorizations inline' do - project.owner.project_authorizations.delete_all - - expect_any_instance_of(described_class).to receive(:perform).and_call_original - - described_class.bulk_perform_inline(build_args_list(project.owner.id)) - - expect(project.owner.project_authorizations.count).to eq(1) - end - - it 'enqueues jobs if an error is raised' do - invalid_id = -1 - args_list = build_args_list(project.owner.id, invalid_id) - - allow_any_instance_of(described_class).to receive(:perform).with(project.owner.id) - allow_any_instance_of(described_class).to receive(:perform).with(invalid_id).and_raise(ArgumentError) - expect(described_class).to receive(:bulk_perform_async).with(build_args_list(invalid_id)) - - described_class.bulk_perform_inline(args_list) - end - end - - describe '.bulk_perform_async' do - it "uses it's respective sidekiq queue" do - args_list = build_args_list(project.owner.id) - push_bulk_args = { - 'class' => described_class, - 'args' => args_list - } - - expect(Sidekiq::Client).to receive(:push_bulk).with(push_bulk_args).once - - described_class.bulk_perform_async(args_list) - end - end - describe '#perform' do let(:user) { create(:user) } @@ -85,12 +12,6 @@ describe AuthorizedProjectsWorker do job.perform(user.id) end - it 'notifies the JobWaiter when done if the key is provided' do - expect(Gitlab::JobWaiter).to receive(:notify).with('notify-key', job.jid) - - job.perform(user.id, 'notify-key') - end - context "when the user is not found" do it "does nothing" do expect_any_instance_of(User).not_to receive(:refresh_authorized_projects) diff --git a/spec/workers/check_gcp_project_billing_worker_spec.rb b/spec/workers/check_gcp_project_billing_worker_spec.rb index 7b7a7c1bc44..526ecf75921 100644 --- a/spec/workers/check_gcp_project_billing_worker_spec.rb +++ b/spec/workers/check_gcp_project_billing_worker_spec.rb @@ -6,6 +6,11 @@ describe CheckGcpProjectBillingWorker do subject { described_class.new.perform('token_key') } + before do + allow(described_class).to receive(:get_billing_state) + allow_any_instance_of(described_class).to receive(:update_billing_change_counter) + end + context 'when there is a token in redis' do before do allow(described_class).to receive(:get_session_token).and_return(token) @@ -23,11 +28,8 @@ describe CheckGcpProjectBillingWorker do end it 'stores billing status in redis' do - redis_double = double - expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double]) - expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis_double) - expect(redis_double).to receive(:set).with(described_class.redis_shared_state_key_for(token), anything, anything) + expect(described_class).to receive(:set_billing_state).with(token, true) subject end @@ -48,7 +50,7 @@ describe CheckGcpProjectBillingWorker do context 'when there is no token in redis' do before do - allow_any_instance_of(described_class).to receive(:get_session_token).and_return(nil) + allow(described_class).to receive(:get_session_token).and_return(nil) end it 'does not call the service' do @@ -58,4 +60,57 @@ describe CheckGcpProjectBillingWorker do end end end + + describe 'billing change counter' do + subject { described_class.new.perform('token_key') } + + before do + allow(described_class).to receive(:get_session_token).and_return('bogustoken') + allow_any_instance_of(described_class).to receive(:try_obtain_lease_for).and_return('randomuuid') + allow(described_class).to receive(:set_billing_state) + end + + context 'when previous state was false' do + before do + expect(described_class).to receive(:get_billing_state).and_return(false) + end + + context 'when the current state is false' do + before do + expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([]) + end + + it 'increments the billing change counter' do + expect_any_instance_of(described_class).to receive_message_chain(:billing_changed_counter, :increment) + + subject + end + end + + context 'when the current state is true' do + before do + expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double]) + end + + it 'increments the billing change counter' do + expect_any_instance_of(described_class).to receive_message_chain(:billing_changed_counter, :increment) + + subject + end + end + end + + context 'when previous state was true' do + before do + expect(described_class).to receive(:get_billing_state).and_return(true) + expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double]) + end + + it 'increment the billing change counter' do + expect_any_instance_of(described_class).to receive_message_chain(:billing_changed_counter, :increment) + + subject + end + end + end end diff --git a/spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb b/spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb new file mode 100644 index 00000000000..2e2e9afd25a --- /dev/null +++ b/spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb @@ -0,0 +1,30 @@ +require 'spec_helper' + +describe ClusterWaitForIngressIpAddressWorker do + describe '#perform' do + let(:service) { instance_double(Clusters::Applications::CheckIngressIpAddressService, execute: true) } + let(:application) { instance_double(Clusters::Applications::Ingress) } + let(:worker) { described_class.new } + + before do + allow(worker) + .to receive(:find_application) + .with('ingress', 117) + .and_yield(application) + + allow(Clusters::Applications::CheckIngressIpAddressService) + .to receive(:new) + .with(application) + .and_return(service) + + allow(described_class) + .to receive(:perform_in) + end + + it 'finds the application and calls CheckIngressIpAddressService#execute' do + worker.perform('ingress', 117) + + expect(service).to have_received(:execute) + end + end +end diff --git a/spec/workers/concerns/waitable_worker_spec.rb b/spec/workers/concerns/waitable_worker_spec.rb new file mode 100644 index 00000000000..4af0de86ac9 --- /dev/null +++ b/spec/workers/concerns/waitable_worker_spec.rb @@ -0,0 +1,92 @@ +require 'spec_helper' + +describe WaitableWorker do + let(:worker) do + Class.new do + def self.name + 'Gitlab::Foo::Bar::DummyWorker' + end + + class << self + cattr_accessor(:counter) { 0 } + end + + include ApplicationWorker + prepend WaitableWorker + + def perform(i = 0) + self.class.counter += i + end + end + end + + subject(:job) { worker.new } + + describe '.bulk_perform_and_wait' do + it 'schedules the jobs and waits for them to complete' do + worker.bulk_perform_and_wait([[1], [2]]) + + expect(worker.counter).to eq(3) + end + + it 'inlines workloads <= 3 jobs' do + args_list = [[1], [2], [3]] + expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original + + worker.bulk_perform_and_wait(args_list) + + expect(worker.counter).to eq(6) + end + + it 'runs > 3 jobs using sidekiq' do + expect(worker).to receive(:bulk_perform_async) + + worker.bulk_perform_and_wait([[1], [2], [3], [4]]) + end + end + + describe '.bulk_perform_inline' do + it 'runs the jobs inline' do + expect(worker).not_to receive(:bulk_perform_async) + + worker.bulk_perform_inline([[1], [2]]) + + expect(worker.counter).to eq(3) + end + + it 'enqueues jobs if an error is raised' do + expect(worker).to receive(:bulk_perform_async).with([['foo']]) + + worker.bulk_perform_inline([[1], ['foo']]) + end + end + + describe '#perform' do + shared_examples 'perform' do + it 'notifies the JobWaiter when done if the key is provided' do + key = Gitlab::JobWaiter.new.key + expect(Gitlab::JobWaiter).to receive(:notify).with(key, job.jid) + + job.perform(*args, key) + end + + it 'does not notify the JobWaiter when done if no key is provided' do + expect(Gitlab::JobWaiter).not_to receive(:notify) + + job.perform(*args) + end + end + + context 'when the worker takes arguments' do + let(:args) { [1] } + + it_behaves_like 'perform' + end + + context 'when the worker takes no arguments' do + let(:args) { [] } + + it_behaves_like 'perform' + end + end +end diff --git a/spec/workers/namespaceless_project_destroy_worker_spec.rb b/spec/workers/namespaceless_project_destroy_worker_spec.rb index ed8cedc0079..479d9396eca 100644 --- a/spec/workers/namespaceless_project_destroy_worker_spec.rb +++ b/spec/workers/namespaceless_project_destroy_worker_spec.rb @@ -22,7 +22,9 @@ describe NamespacelessProjectDestroyWorker do end end - context 'project has no namespace' do + # Only possible with schema 20180222043024 and lower. + # Project#namespace_id has not null constraint since then + context 'project has no namespace', :migration, schema: 20180222043024 do let!(:project) do project = build(:project, namespace_id: nil) project.save(validate: false) diff --git a/spec/workers/pages_domain_verification_cron_worker_spec.rb b/spec/workers/pages_domain_verification_cron_worker_spec.rb new file mode 100644 index 00000000000..8f780428c82 --- /dev/null +++ b/spec/workers/pages_domain_verification_cron_worker_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +describe PagesDomainVerificationCronWorker do + subject(:worker) { described_class.new } + + describe '#perform' do + it 'enqueues a PagesDomainVerificationWorker for domains needing verification' do + verified = create(:pages_domain) + reverify = create(:pages_domain, :reverify) + disabled = create(:pages_domain, :disabled) + + [reverify, disabled].each do |domain| + expect(PagesDomainVerificationWorker).to receive(:perform_async).with(domain.id) + end + + expect(PagesDomainVerificationWorker).not_to receive(:perform_async).with(verified.id) + + worker.perform + end + end +end diff --git a/spec/workers/pages_domain_verification_worker_spec.rb b/spec/workers/pages_domain_verification_worker_spec.rb new file mode 100644 index 00000000000..372fc95ab4a --- /dev/null +++ b/spec/workers/pages_domain_verification_worker_spec.rb @@ -0,0 +1,27 @@ +require 'spec_helper' + +describe PagesDomainVerificationWorker do + subject(:worker) { described_class.new } + + let(:domain) { create(:pages_domain) } + + describe '#perform' do + it 'does nothing for a non-existent domain' do + domain.destroy + + expect(VerifyPagesDomainService).not_to receive(:new) + + expect { worker.perform(domain.id) }.not_to raise_error + end + + it 'delegates to VerifyPagesDomainService' do + service = double(:service) + expected_domain = satisfy { |obj| obj == domain } + + expect(VerifyPagesDomainService).to receive(:new).with(expected_domain) { service } + expect(service).to receive(:execute) + + worker.perform(domain.id) + end + end +end diff --git a/spec/workers/plugin_worker_spec.rb b/spec/workers/plugin_worker_spec.rb new file mode 100644 index 00000000000..9238a8199bc --- /dev/null +++ b/spec/workers/plugin_worker_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe PluginWorker do + include RepoHelpers + + let(:filename) { 'my_plugin.rb' } + let(:data) { { 'event_name' => 'project_create' } } + + subject { described_class.new } + + describe '#perform' do + it 'executes Gitlab::Plugin with expected values' do + allow(Gitlab::Plugin).to receive(:execute).with(filename, data).and_return([true, '']) + + expect(subject.perform(filename, data)).to be_truthy + end + + it 'logs message in case of plugin execution failure' do + allow(Gitlab::Plugin).to receive(:execute).with(filename, data).and_return([false, 'permission denied']) + + expect(Gitlab::PluginLogger).to receive(:error) + expect(subject.perform(filename, data)).to be_truthy + end + end +end diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb index 24f8ca67594..76ef57b6b1e 100644 --- a/spec/workers/process_commit_worker_spec.rb +++ b/spec/workers/process_commit_worker_spec.rb @@ -20,6 +20,32 @@ describe ProcessCommitWorker do worker.perform(project.id, -1, commit.to_hash) end + context 'when commit is a merge request merge commit' do + let(:merge_request) do + create(:merge_request, + description: "Closes #{issue.to_reference}", + source_branch: 'feature-merged', + target_branch: 'master', + source_project: project) + end + + let(:commit) do + project.repository.create_branch('feature-merged', 'feature') + + sha = project.repository.merge(user, + merge_request.diff_head_sha, + merge_request, + "Closes #{issue.to_reference}") + project.repository.commit(sha) + end + + it 'it does not close any issues from the commit message' do + expect(worker).not_to receive(:close_issues) + + worker.perform(project.id, user.id, commit.to_hash) + end + end + it 'processes the commit message' do expect(worker).to receive(:process_commit_message).and_call_original @@ -48,11 +74,9 @@ describe ProcessCommitWorker do describe '#process_commit_message' do context 'when pushing to the default branch' do it 'closes issues that should be closed per the commit message' do - allow(commit).to receive(:safe_message) - .and_return("Closes #{issue.to_reference}") + allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}") - expect(worker).to receive(:close_issues) - .with(project, user, user, commit, [issue]) + expect(worker).to receive(:close_issues).with(project, user, user, commit, [issue]) worker.process_commit_message(project, commit, user, user, true) end @@ -60,8 +84,7 @@ describe ProcessCommitWorker do context 'when pushing to a non-default branch' do it 'does not close any issues' do - allow(commit).to receive(:safe_message) - .and_return("Closes #{issue.to_reference}") + allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}") expect(worker).not_to receive(:close_issues) @@ -102,8 +125,7 @@ describe ProcessCommitWorker do describe '#update_issue_metrics' do it 'updates any existing issue metrics' do - allow(commit).to receive(:safe_message) - .and_return("Closes #{issue.to_reference}") + allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}") worker.update_issue_metrics(commit, user) @@ -113,10 +135,10 @@ describe ProcessCommitWorker do end it "doesn't execute any queries with false conditions" do - allow(commit).to receive(:safe_message) - .and_return("Lorem Ipsum") + allow(commit).to receive(:safe_message).and_return("Lorem Ipsum") - expect { worker.update_issue_metrics(commit, user) }.not_to make_queries_matching(/WHERE (?:1=0|0=1)/) + expect { worker.update_issue_metrics(commit, user) } + .not_to make_queries_matching(/WHERE (?:1=0|0=1)/) end end @@ -128,8 +150,9 @@ describe ProcessCommitWorker do end it 'parses date strings into Time instances' do - commit = worker - .build_commit(project, id: '123', authored_date: Time.now.to_s) + commit = worker.build_commit(project, + id: '123', + authored_date: Time.now.to_s) expect(commit.authored_date).to be_an_instance_of(Time) end diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb index 4912baa348c..6c66658d8c3 100644 --- a/spec/workers/repository_fork_worker_spec.rb +++ b/spec/workers/repository_fork_worker_spec.rb @@ -68,7 +68,7 @@ describe RepositoryForkWorker do end it "handles bad fork" do - error_message = "Unable to fork project #{fork_project.id} for repository #{project.full_path} -> #{fork_project.full_path}" + error_message = "Unable to fork project #{fork_project.id} for repository #{project.disk_path} -> #{fork_project.disk_path}" expect_fork_repository.and_return(false) diff --git a/spec/workers/storage_migrator_worker_spec.rb b/spec/workers/storage_migrator_worker_spec.rb index 8619ff2f7da..ff625164142 100644 --- a/spec/workers/storage_migrator_worker_spec.rb +++ b/spec/workers/storage_migrator_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe StorageMigratorWorker do subject(:worker) { described_class.new } - let(:projects) { create_list(:project, 2) } + let(:projects) { create_list(:project, 2, :legacy_storage) } describe '#perform' do let(:ids) { projects.map(&:id) } diff --git a/spec/workers/stuck_import_jobs_worker_spec.rb b/spec/workers/stuck_import_jobs_worker_spec.rb index a82eb54ffe4..069514552b1 100644 --- a/spec/workers/stuck_import_jobs_worker_spec.rb +++ b/spec/workers/stuck_import_jobs_worker_spec.rb @@ -2,35 +2,59 @@ require 'spec_helper' describe StuckImportJobsWorker do let(:worker) { described_class.new } - let(:exclusive_lease_uuid) { SecureRandom.uuid } - before do - allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(exclusive_lease_uuid) - end + shared_examples 'project import job detection' do + context 'when the job has completed' do + context 'when the import status was already updated' do + before do + allow(Gitlab::SidekiqStatus).to receive(:completed_jids) do + project.import_start + project.import_finish - describe 'with started import_status' do - let(:project) { create(:project, :import_started, import_jid: '123') } + [project.import_jid] + end + end + + it 'does not mark the project as failed' do + worker.perform + + expect(project.reload.import_status).to eq('finished') + end + end + + context 'when the import status was not updated' do + before do + allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([project.import_jid]) + end - describe 'long running import' do - it 'marks the project as failed' do - allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(['123']) + it 'marks the project as failed' do + worker.perform - expect { worker.perform }.to change { project.reload.import_status }.to('failed') + expect(project.reload.import_status).to eq('failed') + end end end - describe 'running import' do - it 'does not mark the project as failed' do + context 'when the job is still in Sidekiq' do + before do allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([]) + end + it 'does not mark the project as failed' do expect { worker.perform }.not_to change { project.reload.import_status } end + end + end - describe 'import without import_jid' do - it 'marks the project as failed' do - expect { worker.perform }.to change { project.reload.import_status }.to('failed') - end - end + describe 'with scheduled import_status' do + it_behaves_like 'project import job detection' do + let(:project) { create(:project, :import_scheduled, import_jid: '123') } + end + end + + describe 'with started import_status' do + it_behaves_like 'project import job detection' do + let(:project) { create(:project, :import_started, import_jid: '123') } end end end |