summaryrefslogtreecommitdiff
path: root/spec/services
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzesiek.bizon@gmail.com>2017-04-04 13:36:36 +0200
committerGrzegorz Bizon <grzesiek.bizon@gmail.com>2017-04-04 13:36:36 +0200
commit44321b1a3d99fc1a25d4b24afef55e1755f89e8a (patch)
treeb5eee1964c7f32adb8096c07cee23c2a413640eb /spec/services
parentb03f1699c47ce8a08f67ef458107d22cbafbc0bd (diff)
parent5efd67942cec39d733d27a52edc2ebc86babce30 (diff)
downloadgitlab-ce-44321b1a3d99fc1a25d4b24afef55e1755f89e8a.tar.gz
Merge branch 'master' into feature/multi-level-container-registry-images
* master: (57 commits) Ensure we generate unique usernames otherwise validations fail Fix a Knapsack issue that would load support/capybara.rb before support/env.rb Ensure users have a short username otherwise a click event is triggered ... Enable the `bullet_logger` setting; enable `raise` in test environment Fix Rubocop offenses Set the right timeout for Gitlab::Shell#fetch_remote Refactoring Projects::ImportService Move methods that are not related to mirroring to the repository model Fix GitHub pull request formatter spec Rename skip_metrics to imported on the importable concern Add CHANGELOG Remove unused include from RepositoryImportWorker Skip MR metrics when importing projects from GitHub Fetch GitHub project as a mirror to get all refs at once Make file templates easy to use and discover Ensure user has a unique username otherwise `user10` would match `user1` Ensure the AbuseReport fixtures create unique reported users Don't use FFaker in factories, use sequences instead Fix brittle specs Fix the AbuseReport seeder ... Conflicts: db/schema.rb
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb59
-rw-r--r--spec/services/notification_service_spec.rb190
-rw-r--r--spec/services/projects/import_service_spec.rb83
-rw-r--r--spec/services/users/destroy_spec.rb19
4 files changed, 231 insertions, 120 deletions
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index d93616c4f50..bb98fb37a90 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -418,65 +418,6 @@ describe Ci::ProcessPipelineService, '#execute', :services do
end
end
- context 'when there are builds that are not created yet' do
- let(:pipeline) do
- create(:ci_pipeline, config: config)
- end
-
- let(:config) do
- { rspec: { stage: 'test', script: 'rspec' },
- deploy: { stage: 'deploy', script: 'rsync' } }
- end
-
- before do
- create_build('linux', stage: 'build', stage_idx: 0)
- create_build('mac', stage: 'build', stage_idx: 0)
- end
-
- it 'processes the pipeline' do
- # Currently we have five builds with state created
- #
- expect(builds.count).to eq(0)
- expect(all_builds.count).to eq(2)
-
- # Process builds service will enqueue builds from the first stage.
- #
- process_pipeline
-
- expect(builds.count).to eq(2)
- expect(all_builds.count).to eq(2)
-
- # When builds succeed we will enqueue remaining builds.
- #
- # We will have 2 succeeded, 1 pending (from stage test), total 4 (two
- # additional build from `.gitlab-ci.yml`).
- #
- succeed_pending
- process_pipeline
-
- expect(builds.success.count).to eq(2)
- expect(builds.pending.count).to eq(1)
- expect(all_builds.count).to eq(4)
-
- # When pending merge_when_pipeline_succeeds in stage test, we enqueue deploy stage.
- #
- succeed_pending
- process_pipeline
-
- expect(builds.pending.count).to eq(1)
- expect(builds.success.count).to eq(3)
- expect(all_builds.count).to eq(4)
-
- # When the last one succeeds we have 4 successful builds.
- #
- succeed_pending
- process_pipeline
-
- expect(builds.success.count).to eq(4)
- expect(all_builds.count).to eq(4)
- end
- end
-
def process_pipeline
described_class.new(pipeline.project, user).execute(pipeline)
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 5c841843b40..e3146a56495 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -113,7 +113,7 @@ describe NotificationService, services: true do
project.add_master(issue.assignee)
project.add_master(note.author)
create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@subscribed_participant cc this guy')
- update_custom_notification(:new_note, @u_guest_custom, project)
+ update_custom_notification(:new_note, @u_guest_custom, resource: project)
update_custom_notification(:new_note, @u_custom_global)
end
@@ -379,7 +379,7 @@ describe NotificationService, services: true do
build_team(note.project)
reset_delivered_emails!
allow_any_instance_of(Commit).to receive(:author).and_return(@u_committer)
- update_custom_notification(:new_note, @u_guest_custom, project)
+ update_custom_notification(:new_note, @u_guest_custom, resource: project)
update_custom_notification(:new_note, @u_custom_global)
end
@@ -457,7 +457,7 @@ describe NotificationService, services: true do
add_users_with_subscription(issue.project, issue)
reset_delivered_emails!
- update_custom_notification(:new_issue, @u_guest_custom, project)
+ update_custom_notification(:new_issue, @u_guest_custom, resource: project)
update_custom_notification(:new_issue, @u_custom_global)
end
@@ -567,7 +567,7 @@ describe NotificationService, services: true do
describe '#reassigned_issue' do
before do
- update_custom_notification(:reassign_issue, @u_guest_custom, project)
+ update_custom_notification(:reassign_issue, @u_guest_custom, resource: project)
update_custom_notification(:reassign_issue, @u_custom_global)
end
@@ -760,7 +760,7 @@ describe NotificationService, services: true do
describe '#close_issue' do
before do
- update_custom_notification(:close_issue, @u_guest_custom, project)
+ update_custom_notification(:close_issue, @u_guest_custom, resource: project)
update_custom_notification(:close_issue, @u_custom_global)
end
@@ -791,7 +791,7 @@ describe NotificationService, services: true do
describe '#reopen_issue' do
before do
- update_custom_notification(:reopen_issue, @u_guest_custom, project)
+ update_custom_notification(:reopen_issue, @u_guest_custom, resource: project)
update_custom_notification(:reopen_issue, @u_custom_global)
end
@@ -856,14 +856,14 @@ describe NotificationService, services: true do
before do
build_team(merge_request.target_project)
add_users_with_subscription(merge_request.target_project, merge_request)
- update_custom_notification(:new_merge_request, @u_guest_custom, project)
+ update_custom_notification(:new_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:new_merge_request, @u_custom_global)
reset_delivered_emails!
end
describe '#new_merge_request' do
before do
- update_custom_notification(:new_merge_request, @u_guest_custom, project)
+ update_custom_notification(:new_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:new_merge_request, @u_custom_global)
end
@@ -952,7 +952,7 @@ describe NotificationService, services: true do
describe '#reassigned_merge_request' do
before do
- update_custom_notification(:reassign_merge_request, @u_guest_custom, project)
+ update_custom_notification(:reassign_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:reassign_merge_request, @u_custom_global)
end
@@ -1026,7 +1026,7 @@ describe NotificationService, services: true do
describe '#closed_merge_request' do
before do
- update_custom_notification(:close_merge_request, @u_guest_custom, project)
+ update_custom_notification(:close_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:close_merge_request, @u_custom_global)
end
@@ -1056,7 +1056,7 @@ describe NotificationService, services: true do
describe '#merged_merge_request' do
before do
- update_custom_notification(:merge_merge_request, @u_guest_custom, project)
+ update_custom_notification(:merge_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:merge_merge_request, @u_custom_global)
end
@@ -1108,7 +1108,7 @@ describe NotificationService, services: true do
describe '#reopen_merge_request' do
before do
- update_custom_notification(:reopen_merge_request, @u_guest_custom, project)
+ update_custom_notification(:reopen_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:reopen_merge_request, @u_custom_global)
end
@@ -1281,40 +1281,172 @@ describe NotificationService, services: true do
describe 'Pipelines' do
describe '#pipeline_finished' do
let(:project) { create(:project, :public, :repository) }
- let(:current_user) { create(:user) }
let(:u_member) { create(:user) }
- let(:u_other) { create(:user) }
+ let(:u_watcher) { create_user_with_notification(:watch, 'watcher') }
+
+ let(:u_custom_notification_unset) do
+ create_user_with_notification(:custom, 'custom_unset')
+ end
+
+ let(:u_custom_notification_enabled) do
+ user = create_user_with_notification(:custom, 'custom_enabled')
+ update_custom_notification(:success_pipeline, user, resource: project)
+ update_custom_notification(:failed_pipeline, user, resource: project)
+ user
+ end
+
+ let(:u_custom_notification_disabled) do
+ user = create_user_with_notification(:custom, 'custom_disabled')
+ update_custom_notification(:success_pipeline, user, resource: project, value: false)
+ update_custom_notification(:failed_pipeline, user, resource: project, value: false)
+ user
+ end
let(:commit) { project.commit }
- let(:pipeline) do
- create(:ci_pipeline, :success,
+
+ def create_pipeline(user, status)
+ create(:ci_pipeline, status,
project: project,
- user: current_user,
+ user: user,
ref: 'refs/heads/master',
sha: commit.id,
before_sha: '00000000')
end
before do
- project.add_master(current_user)
project.add_master(u_member)
+ project.add_master(u_watcher)
+ project.add_master(u_custom_notification_unset)
+ project.add_master(u_custom_notification_enabled)
+ project.add_master(u_custom_notification_disabled)
+
reset_delivered_emails!
end
- context 'without custom recipients' do
- it 'notifies the pipeline user' do
- notification.pipeline_finished(pipeline)
+ context 'with a successful pipeline' do
+ context 'when the creator has default settings' do
+ before do
+ pipeline = create_pipeline(u_member, :success)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'notifies nobody' do
+ should_not_email_anyone
+ end
+ end
+
+ context 'when the creator has watch set' do
+ before do
+ pipeline = create_pipeline(u_watcher, :success)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'notifies nobody' do
+ should_not_email_anyone
+ end
+ end
+
+ context 'when the creator has custom notifications, but without any set' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_unset, :success)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'notifies nobody' do
+ should_not_email_anyone
+ end
+ end
+
+ context 'when the creator has custom notifications disabled' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_disabled, :success)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'notifies nobody' do
+ should_not_email_anyone
+ end
+ end
+
+ context 'when the creator has custom notifications enabled' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_enabled, :success)
+ notification.pipeline_finished(pipeline)
+ end
- should_only_email(current_user, kind: :bcc)
+ it 'emails only the creator' do
+ should_only_email(u_custom_notification_enabled, kind: :bcc)
+ end
end
end
- context 'with custom recipients' do
- it 'notifies the custom recipients' do
- users = [u_member, u_other]
- notification.pipeline_finished(pipeline, users.map(&:notification_email))
+ context 'with a failed pipeline' do
+ context 'when the creator has no custom notification set' do
+ before do
+ pipeline = create_pipeline(u_member, :failed)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'emails only the creator' do
+ should_only_email(u_member, kind: :bcc)
+ end
+ end
+
+ context 'when the creator has watch set' do
+ before do
+ pipeline = create_pipeline(u_watcher, :failed)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'emails only the creator' do
+ should_only_email(u_watcher, kind: :bcc)
+ end
+ end
+
+ context 'when the creator has custom notifications, but without any set' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_unset, :failed)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'emails only the creator' do
+ should_only_email(u_custom_notification_unset, kind: :bcc)
+ end
+ end
+
+ context 'when the creator has custom notifications disabled' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_disabled, :failed)
+ notification.pipeline_finished(pipeline)
+ end
- should_only_email(*users, kind: :bcc)
+ it 'notifies nobody' do
+ should_not_email_anyone
+ end
+ end
+
+ context 'when the creator has custom notifications set' do
+ before do
+ pipeline = create_pipeline(u_custom_notification_enabled, :failed)
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'emails only the creator' do
+ should_only_email(u_custom_notification_enabled, kind: :bcc)
+ end
+ end
+
+ context 'when the creator has no read_build access' do
+ before do
+ pipeline = create_pipeline(u_member, :failed)
+ project.update(public_builds: false)
+ project.team.truncate
+ notification.pipeline_finished(pipeline)
+ end
+
+ it 'does not send emails' do
+ should_not_email_anyone
+ end
end
end
end
@@ -1385,9 +1517,9 @@ describe NotificationService, services: true do
# Create custom notifications
# When resource is nil it means global notification
- def update_custom_notification(event, user, resource = nil)
+ def update_custom_notification(event, user, resource: nil, value: true)
setting = user.notification_settings_for(resource)
- setting.events[event] = true
+ setting.events[event] = value
setting.save
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index e5917bb0b7a..09cfa36b3b9 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -26,30 +26,59 @@ describe Projects::ImportService, services: true do
result = subject.execute
expect(result[:status]).to eq :error
- expect(result[:message]).to eq 'The repository could not be created.'
+ expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - The repository could not be created."
end
end
context 'with known url' do
before do
project.import_url = 'https://github.com/vim/vim.git'
+ project.import_type = 'github'
end
- it 'succeeds if repository import is successfully' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true)
+ context 'with a Github repository' do
+ it 'succeeds if repository import is successfully' do
+ expect_any_instance_of(Repository).to receive(:fetch_remote).and_return(true)
+ expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(true)
- result = subject.execute
+ result = subject.execute
- expect(result[:status]).to eq :success
+ expect(result[:status]).to eq :success
+ end
+
+ it 'fails if repository import fails' do
+ expect_any_instance_of(Repository).to receive(:fetch_remote).and_raise(Gitlab::Shell::Error.new('Failed to import the repository'))
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Failed to import the repository"
+ end
end
- it 'fails if repository import fails' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_raise(Gitlab::Shell::Error.new('Failed to import the repository'))
+ context 'with a non Github repository' do
+ before do
+ project.import_url = 'https://bitbucket.org/vim/vim.git'
+ project.import_type = 'bitbucket'
+ end
- result = subject.execute
+ it 'succeeds if repository import is successfully' do
+ expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true)
+ expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true)
- expect(result[:status]).to eq :error
- expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Failed to import the repository"
+ result = subject.execute
+
+ expect(result[:status]).to eq :success
+ end
+
+ it 'fails if repository import fails' do
+ expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_raise(Gitlab::Shell::Error.new('Failed to import the repository'))
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Failed to import the repository"
+ end
end
end
@@ -64,8 +93,8 @@ describe Projects::ImportService, services: true do
end
it 'succeeds if importer succeeds' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true)
- expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(true)
+ allow_any_instance_of(Repository).to receive(:fetch_remote).and_return(true)
+ allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(true)
result = subject.execute
@@ -73,48 +102,42 @@ describe Projects::ImportService, services: true do
end
it 'flushes various caches' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).
- with(project.repository_storage_path, project.path_with_namespace, project.import_url).
+ allow_any_instance_of(Repository).to receive(:fetch_remote).
and_return(true)
- expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).
+ allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).
and_return(true)
- expect_any_instance_of(Repository).to receive(:expire_emptiness_caches).
- and_call_original
-
- expect_any_instance_of(Repository).to receive(:expire_exists_cache).
- and_call_original
+ expect_any_instance_of(Repository).to receive(:expire_content_cache)
subject.execute
end
it 'fails if importer fails' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true)
- expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(false)
+ allow_any_instance_of(Repository).to receive(:fetch_remote).and_return(true)
+ allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_return(false)
result = subject.execute
expect(result[:status]).to eq :error
- expect(result[:message]).to eq 'The remote data could not be imported.'
+ expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - The remote data could not be imported."
end
it 'fails if importer raise an error' do
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_return(true)
- expect_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_raise(Projects::ImportService::Error.new('Github: failed to connect API'))
+ allow_any_instance_of(Gitlab::Shell).to receive(:fetch_remote).and_return(true)
+ allow_any_instance_of(Gitlab::GithubImport::Importer).to receive(:execute).and_raise(Projects::ImportService::Error.new('Github: failed to connect API'))
result = subject.execute
expect(result[:status]).to eq :error
- expect(result[:message]).to eq 'Github: failed to connect API'
+ expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - Github: failed to connect API"
end
- it 'expires existence cache after error' do
+ it 'expires content cache after error' do
allow_any_instance_of(Project).to receive(:repository_exists?).and_return(false, true)
- expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).with(project.repository_storage_path, project.path_with_namespace, project.import_url).and_raise(Gitlab::Shell::Error.new('Failed to import the repository'))
- expect_any_instance_of(Repository).to receive(:expire_emptiness_caches).and_call_original
- expect_any_instance_of(Repository).to receive(:expire_exists_cache).and_call_original
+ expect_any_instance_of(Gitlab::Shell).to receive(:fetch_remote).and_raise(Gitlab::Shell::Error.new('Failed to import the repository'))
+ expect_any_instance_of(Repository).to receive(:expire_content_cache)
subject.execute
end
diff --git a/spec/services/users/destroy_spec.rb b/spec/services/users/destroy_spec.rb
index 9a28c03d968..66c61b7f8ff 100644
--- a/spec/services/users/destroy_spec.rb
+++ b/spec/services/users/destroy_spec.rb
@@ -17,13 +17,28 @@ describe Users::DestroyService, services: true do
expect { Namespace.with_deleted.find(user.namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
- it 'will delete the project in the near future' do
- expect_any_instance_of(Projects::DestroyService).to receive(:async_execute).once
+ it 'will delete the project' do
+ expect_any_instance_of(Projects::DestroyService).to receive(:execute).once
service.execute(user)
end
end
+ context 'projects in pending_delete' do
+ before do
+ project.pending_delete = true
+ project.save
+ end
+
+ it 'destroys a project in pending_delete' do
+ expect_any_instance_of(Projects::DestroyService).to receive(:execute).once
+
+ service.execute(user)
+
+ expect { Project.find(project.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
context "a deleted user's issues" do
let(:project) { create(:project) }