summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb8
-rw-r--r--spec/controllers/projects/clusters/gcp_controller_spec.rb182
-rw-r--r--spec/controllers/projects/clusters/user_controller_spec.rb89
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb226
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb9
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb20
-rw-r--r--spec/controllers/projects/todos_controller_spec.rb133
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb8
-rw-r--r--spec/controllers/sessions_controller_spec.rb13
-rw-r--r--spec/factories/ci/runners.rb4
-rw-r--r--spec/factories/todos.rb4
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb10
-rw-r--r--spec/features/projects/clusters/user_spec.rb2
-rw-r--r--spec/features/projects/clusters_spec.rb2
-rw-r--r--spec/features/projects/issues/user_creates_issue_spec.rb3
-rw-r--r--spec/features/projects/jobs_spec.rb2
-rw-r--r--spec/features/projects/milestones/new_spec.rb4
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb60
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb4
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb4
-rw-r--r--spec/features/tags/master_creates_tag_spec.rb4
-rw-r--r--spec/features/tags/master_deletes_tag_spec.rb27
-rw-r--r--spec/features/tags/master_updates_tag_spec.rb4
-rw-r--r--spec/finders/pipelines_finder_spec.rb29
-rw-r--r--spec/finders/todos_finder_spec.rb64
-rw-r--r--spec/graphql/gitlab_schema_spec.rb6
-rw-r--r--spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb52
-rw-r--r--spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb30
-rw-r--r--spec/graphql/resolvers/project_pipelines_resolver_spec.rb22
-rw-r--r--spec/graphql/types/ci/pipeline_type_spec.rb7
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb13
-rw-r--r--spec/graphql/types/project_type_spec.rb2
-rw-r--r--spec/helpers/issuables_helper_spec.rb21
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js15
-rw-r--r--spec/javascripts/ide/stores/modules/merge_requests/actions_spec.js28
-rw-r--r--spec/javascripts/ide/stores/modules/pipelines/actions_spec.js79
-rw-r--r--spec/javascripts/sidebar/todo_spec.js158
-rw-r--r--spec/lib/banzai/filter/merge_request_reference_filter_spec.rb7
-rw-r--r--spec/lib/gitlab/background_migration/delete_diff_files_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb9
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb15
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb64
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb265
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb112
-rw-r--r--spec/lib/gitlab/middleware/multipart_spec.rb73
-rw-r--r--spec/lib/gitlab/middleware/read_only_spec.rb35
-rw-r--r--spec/migrations/enqueue_delete_diff_files_workers_spec.rb48
-rw-r--r--spec/models/ci/runner_spec.rb9
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb18
-rw-r--r--spec/models/merge_request_diff_spec.rb7
-rw-r--r--spec/models/merge_request_spec.rb16
-rw-r--r--spec/models/project_services/bamboo_service_spec.rb22
-rw-r--r--spec/models/repository_spec.rb156
-rw-r--r--spec/models/todo_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb24
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb12
-rw-r--r--spec/requests/api/runners_spec.rb23
-rw-r--r--spec/services/files/update_service_spec.rb12
-rw-r--r--spec/services/labels/find_or_create_service_spec.rb20
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb18
-rw-r--r--spec/services/merge_requests/conflicts/resolve_service_spec.rb11
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb38
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb45
-rw-r--r--spec/support/helpers/graphql_helpers.rb20
-rw-r--r--spec/support/shared_examples/controllers/todos_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb79
-rw-r--r--spec/support/shared_examples/requests/api/merge_requests_list.rb5
-rw-r--r--spec/tasks/gitlab/git_rake_spec.rb17
-rw-r--r--spec/uploaders/file_uploader_spec.rb44
-rw-r--r--spec/workers/project_cache_worker_spec.rb73
-rw-r--r--spec/workers/prune_web_hook_logs_worker_spec.rb22
72 files changed, 1655 insertions, 1062 deletions
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 6a1869d1a48..5a7281ed704 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Groups::UploadsController do
+ include WorkhorseHelpers
+
let(:model) { create(:group, :public) }
let(:params) do
{ group_id: model }
@@ -9,4 +11,10 @@ describe Groups::UploadsController do
it_behaves_like 'handle uploads' do
let(:uploader_class) { NamespaceFileUploader }
end
+
+ def post_authorize(verified: true)
+ request.headers.merge!(workhorse_internal_api_request_header) if verified
+
+ post :authorize, group_id: model.full_path, format: :json
+ end
end
diff --git a/spec/controllers/projects/clusters/gcp_controller_spec.rb b/spec/controllers/projects/clusters/gcp_controller_spec.rb
deleted file mode 100644
index 271ba37aed4..00000000000
--- a/spec/controllers/projects/clusters/gcp_controller_spec.rb
+++ /dev/null
@@ -1,182 +0,0 @@
-require 'spec_helper'
-
-describe Projects::Clusters::GcpController do
- include AccessMatchersForController
- include GoogleApi::CloudPlatformHelpers
-
- set(:project) { create(:project) }
-
- describe 'GET login' do
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
- end
-
- context 'when omniauth has been configured' do
- let(:key) { 'secret-key' }
- let(:session_key_for_redirect_uri) do
- GoogleApi::CloudPlatform::Client.session_key_for_redirect_uri(key)
- end
-
- before do
- allow(SecureRandom).to receive(:hex).and_return(key)
- end
-
- it 'has authorize_url' do
- go
-
- expect(assigns(:authorize_url)).to include(key)
- expect(session[session_key_for_redirect_uri]).to eq(gcp_new_project_clusters_path(project))
- end
- end
-
- context 'when omniauth has not configured' do
- before do
- stub_omniauth_setting(providers: [])
- end
-
- it 'does not have authorize_url' do
- go
-
- expect(assigns(:authorize_url)).to be_nil
- end
- end
- end
-
- describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- get :login, namespace_id: project.namespace, project_id: project
- end
- end
-
- describe 'GET new' do
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
- end
-
- context 'when access token is valid' do
- before do
- stub_google_api_validate_token
- end
-
- it 'has new object' do
- go
-
- expect(assigns(:cluster)).to be_an_instance_of(Clusters::Cluster)
- end
- end
-
- context 'when access token is expired' do
- before do
- stub_google_api_expired_token
- end
-
- it { expect(go).to redirect_to(gcp_login_project_clusters_path(project)) }
- end
-
- context 'when access token is not stored in session' do
- it { expect(go).to redirect_to(gcp_login_project_clusters_path(project)) }
- end
- end
-
- describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- get :new, namespace_id: project.namespace, project_id: project
- end
- end
-
- describe 'POST create' do
- let(:params) do
- {
- cluster: {
- name: 'new-cluster',
- provider_gcp_attributes: {
- gcp_project_id: '111'
- }
- }
- }
- end
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
- end
-
- context 'when access token is valid' do
- before do
- stub_google_api_validate_token
- end
-
- it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
- expect { go }.to change { Clusters::Cluster.count }
- .and change { Clusters::Providers::Gcp.count }
- expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
- expect(project.clusters.first).to be_gcp
- expect(project.clusters.first).to be_kubernetes
- end
- end
-
- context 'when access token is expired' do
- before do
- stub_google_api_expired_token
- end
-
- it 'redirects to login page' do
- expect(go).to redirect_to(gcp_login_project_clusters_path(project))
- end
- end
-
- context 'when access token is not stored in session' do
- it 'redirects to login page' do
- expect(go).to redirect_to(gcp_login_project_clusters_path(project))
- end
- end
- end
-
- describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- post :create, params.merge(namespace_id: project.namespace, project_id: project)
- end
- end
-end
diff --git a/spec/controllers/projects/clusters/user_controller_spec.rb b/spec/controllers/projects/clusters/user_controller_spec.rb
deleted file mode 100644
index 913976d187f..00000000000
--- a/spec/controllers/projects/clusters/user_controller_spec.rb
+++ /dev/null
@@ -1,89 +0,0 @@
-require 'spec_helper'
-
-describe Projects::Clusters::UserController do
- include AccessMatchersForController
-
- set(:project) { create(:project) }
-
- describe 'GET new' do
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
- end
-
- it 'has new object' do
- go
-
- expect(assigns(:cluster)).to be_an_instance_of(Clusters::Cluster)
- end
- end
-
- describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- get :new, namespace_id: project.namespace, project_id: project
- end
- end
-
- describe 'POST create' do
- let(:params) do
- {
- cluster: {
- name: 'new-cluster',
- platform_kubernetes_attributes: {
- api_url: 'http://my-url',
- token: 'test',
- namespace: 'aaa'
- }
- }
- }
- end
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
- end
-
- context 'when creates a cluster' do
- it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
- expect { go }.to change { Clusters::Cluster.count }
- .and change { Clusters::Platforms::Kubernetes.count }
- expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
- expect(project.clusters.first).to be_user
- expect(project.clusters.first).to be_kubernetes
- end
- end
- end
-
- describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- post :create, params.merge(namespace_id: project.namespace, project_id: project)
- end
- end
-end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 380e50c8cac..e47ccdc9aea 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe Projects::ClustersController do
include AccessMatchersForController
+ include GoogleApi::CloudPlatformHelpers
set(:project) { create(:project) }
@@ -73,6 +74,231 @@ describe Projects::ClustersController do
end
end
+ describe 'GET new' do
+ describe 'functionality for new cluster' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when omniauth has been configured' do
+ let(:key) { 'secret-key' }
+ let(:session_key_for_redirect_uri) do
+ GoogleApi::CloudPlatform::Client.session_key_for_redirect_uri(key)
+ end
+
+ before do
+ allow(SecureRandom).to receive(:hex).and_return(key)
+ end
+
+ it 'has authorize_url' do
+ go
+
+ expect(assigns(:authorize_url)).to include(key)
+ expect(session[session_key_for_redirect_uri]).to eq(new_project_cluster_path(project))
+ end
+ end
+
+ context 'when omniauth has not configured' do
+ before do
+ stub_omniauth_setting(providers: [])
+ end
+
+ it 'does not have authorize_url' do
+ go
+
+ expect(assigns(:authorize_url)).to be_nil
+ end
+ end
+
+ context 'when access token is valid' do
+ before do
+ stub_google_api_validate_token
+ end
+
+ it 'has new object' do
+ go
+
+ expect(assigns(:gcp_cluster)).to be_an_instance_of(Clusters::Cluster)
+ end
+ end
+
+ context 'when access token is expired' do
+ before do
+ stub_google_api_expired_token
+ end
+
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+
+ context 'when access token is not stored in session' do
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+ end
+
+ describe 'functionality for existing cluster' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ it 'has new object' do
+ go
+
+ expect(assigns(:user_cluster)).to be_an_instance_of(Clusters::Cluster)
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ get :new, namespace_id: project.namespace, project_id: project
+ end
+ end
+
+ describe 'POST create for new cluster' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ provider_gcp_attributes: {
+ gcp_project_id: 'gcp-project-12345'
+ }
+ }
+ }
+ end
+
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when access token is valid' do
+ before do
+ stub_google_api_validate_token
+ end
+
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { go }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Providers::Gcp.count }
+ expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
+ expect(project.clusters.first).to be_gcp
+ expect(project.clusters.first).to be_kubernetes
+ end
+ end
+
+ context 'when access token is expired' do
+ before do
+ stub_google_api_expired_token
+ end
+
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+
+ context 'when access token is not stored in session' do
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow_any_instance_of(described_class)
+ .to receive(:token_in_session).and_return('token')
+ allow_any_instance_of(described_class)
+ .to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_create) do
+ OpenStruct.new(
+ self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123',
+ status: 'RUNNING'
+ )
+ end
+
+ allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
+ end
+
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ post :create_gcp, params.merge(namespace_id: project.namespace, project_id: project)
+ end
+ end
+
+ describe 'POST create for existing cluster' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ platform_kubernetes_attributes: {
+ api_url: 'http://my-url',
+ token: 'test',
+ namespace: 'aaa'
+ }
+ }
+ }
+ end
+
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when creates a cluster' do
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { go }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Platforms::Kubernetes.count }
+ expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
+ expect(project.clusters.first).to be_user
+ expect(project.clusters.first).to be_kubernetes
+ end
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ post :create_user, params.merge(namespace_id: project.namespace, project_id: project)
+ end
+ end
+
describe 'GET status' do
let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 06c8a432561..b10421b8f26 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -102,6 +102,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
describe 'GET show' do
let!(:job) { create(:ci_build, :failed, pipeline: pipeline) }
+ let!(:second_job) { create(:ci_build, :failed, pipeline: pipeline) }
+ let!(:third_job) { create(:ci_build, :failed) }
context 'when requesting HTML' do
context 'when job exists' do
@@ -113,6 +115,13 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:build).id).to eq(job.id)
end
+
+ it 'has the correct build collection' do
+ builds = assigns(:builds).map(&:id)
+
+ expect(builds).to include(job.id, second_job.id)
+ expect(builds).not_to include(third_job.id)
+ end
end
context 'when job does not exist' do
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 9618a8417ec..1cc7f33b57a 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -4,7 +4,7 @@ describe Projects::PipelinesController do
include ApiHelpers
set(:user) { create(:user) }
- set(:project) { create(:project, :public, :repository) }
+ let(:project) { create(:project, :public, :repository) }
let(:feature) { ProjectFeature::DISABLED }
before do
@@ -91,6 +91,24 @@ describe Projects::PipelinesController do
end
end
+ context 'when the project is private' do
+ let(:project) { create(:project, :private, :repository) }
+
+ it 'returns `not_found` when the user does not have access' do
+ sign_in(create(:user))
+
+ get_pipelines_index_json
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns the pipelines when the user has access' do
+ get_pipelines_index_json
+
+ expect(json_response['pipelines'].size).to eq(5)
+ end
+ end
+
def get_pipelines_index_json
get :index, namespace_id: project.namespace,
project_id: project,
diff --git a/spec/controllers/projects/todos_controller_spec.rb b/spec/controllers/projects/todos_controller_spec.rb
index 1ce7e84bef9..58f2817c7cc 100644
--- a/spec/controllers/projects/todos_controller_spec.rb
+++ b/spec/controllers/projects/todos_controller_spec.rb
@@ -5,10 +5,29 @@ describe Projects::TodosController do
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:parent) { project }
+
+ shared_examples 'project todos actions' do
+ it_behaves_like 'todos actions'
+
+ context 'when not authorized for resource' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
+ sign_in(user)
+ end
+
+ it "doesn't create todo" do
+ expect { post_create }.not_to change { user.todos.count }
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
context 'Issues' do
describe 'POST create' do
- def go
+ def post_create
post :create,
namespace_id: project.namespace,
project_id: project,
@@ -17,66 +36,13 @@ describe Projects::TodosController do
format: 'html'
end
- context 'when authorized' do
- before do
- sign_in(user)
- project.add_developer(user)
- end
-
- it 'creates todo for issue' do
- expect do
- go
- end.to change { user.todos.count }.by(1)
-
- expect(response).to have_gitlab_http_status(200)
- end
-
- it 'returns todo path and pending count' do
- go
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['count']).to eq 1
- expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}})
- end
- end
-
- context 'when not authorized for project' do
- it 'does not create todo for issue that user has no access to' do
- sign_in(user)
- expect do
- go
- end.to change { user.todos.count }.by(0)
-
- expect(response).to have_gitlab_http_status(404)
- end
-
- it 'does not create todo for issue when user not logged in' do
- expect do
- go
- end.to change { user.todos.count }.by(0)
-
- expect(response).to have_gitlab_http_status(302)
- end
- end
-
- context 'when not authorized for issue' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
- sign_in(user)
- end
-
- it "doesn't create todo" do
- expect { go }.not_to change { user.todos.count }
- expect(response).to have_gitlab_http_status(404)
- end
- end
+ it_behaves_like 'project todos actions'
end
end
context 'Merge Requests' do
describe 'POST create' do
- def go
+ def post_create
post :create,
namespace_id: project.namespace,
project_id: project,
@@ -85,60 +51,7 @@ describe Projects::TodosController do
format: 'html'
end
- context 'when authorized' do
- before do
- sign_in(user)
- project.add_developer(user)
- end
-
- it 'creates todo for merge request' do
- expect do
- go
- end.to change { user.todos.count }.by(1)
-
- expect(response).to have_gitlab_http_status(200)
- end
-
- it 'returns todo path and pending count' do
- go
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['count']).to eq 1
- expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}})
- end
- end
-
- context 'when not authorized for project' do
- it 'does not create todo for merge request user has no access to' do
- sign_in(user)
- expect do
- go
- end.to change { user.todos.count }.by(0)
-
- expect(response).to have_gitlab_http_status(404)
- end
-
- it 'does not create todo for merge request user has no access to' do
- expect do
- go
- end.to change { user.todos.count }.by(0)
-
- expect(response).to have_gitlab_http_status(302)
- end
- end
-
- context 'when not authorized for merge_request' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
- sign_in(user)
- end
-
- it "doesn't create todo" do
- expect { go }.not_to change { user.todos.count }
- expect(response).to have_gitlab_http_status(404)
- end
- end
+ it_behaves_like 'project todos actions'
end
end
end
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index eca9baed9c9..325ee53aafb 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Projects::UploadsController do
+ include WorkhorseHelpers
+
let(:model) { create(:project, :public) }
let(:params) do
{ namespace_id: model.namespace.to_param, project_id: model }
@@ -15,4 +17,10 @@ describe Projects::UploadsController do
expect(response).to redirect_to(new_user_session_path)
end
end
+
+ def post_authorize(verified: true)
+ request.headers.merge!(workhorse_internal_api_request_header) if verified
+
+ post :authorize, namespace_id: model.namespace, project_id: model.path, format: :json
+ end
end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index cdec26bd421..7c00652317b 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -93,6 +93,12 @@ describe SessionsController do
it 'displays an error when the reCAPTCHA is not solved' do
# Without this, `verify_recaptcha` arbitraily returns true in test env
Recaptcha.configuration.skip_verify_env.delete('test')
+ counter = double(:counter)
+
+ expect(counter).to receive(:increment)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:failed_login_captcha_total, anything)
+ .and_return(counter)
post(:create, user: user_params)
@@ -104,6 +110,13 @@ describe SessionsController do
it 'successfully logs in a user when reCAPTCHA is solved' do
# Avoid test ordering issue and ensure `verify_recaptcha` returns true
Recaptcha.configuration.skip_verify_env << 'test'
+ counter = double(:counter)
+
+ expect(counter).to receive(:increment)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:successful_login_captcha_total, anything)
+ .and_return(counter)
+ expect(Gitlab::Metrics).to receive(:counter).and_call_original
post(:create, user: user_params)
diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb
index 6fb621b5e51..347e4f433e2 100644
--- a/spec/factories/ci/runners.rb
+++ b/spec/factories/ci/runners.rb
@@ -6,7 +6,6 @@ FactoryBot.define do
active true
access_level :not_protected
- is_shared true
runner_type :instance_type
trait :online do
@@ -14,12 +13,10 @@ FactoryBot.define do
end
trait :instance do
- is_shared true
runner_type :instance_type
end
trait :group do
- is_shared false
runner_type :group_type
after(:build) do |runner, evaluator|
@@ -28,7 +25,6 @@ FactoryBot.define do
end
trait :project do
- is_shared false
runner_type :project_type
after(:build) do |runner, evaluator|
diff --git a/spec/factories/todos.rb b/spec/factories/todos.rb
index 94f8caedfa6..14486c80341 100644
--- a/spec/factories/todos.rb
+++ b/spec/factories/todos.rb
@@ -1,8 +1,8 @@
FactoryBot.define do
factory :todo do
project
- author { project.creator }
- user { project.creator }
+ author { project&.creator || user }
+ user { project&.creator || user }
target factory: :issue
action { Todo::ASSIGNED }
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 3db384e5b65..df21a42c3d3 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -16,9 +16,9 @@ feature 'Gcp Cluster', :js do
let(:project_id) { 'test-project-1234' }
before do
- allow_any_instance_of(Projects::Clusters::GcpController)
+ allow_any_instance_of(Projects::ClustersController)
.to receive(:token_in_session).and_return('token')
- allow_any_instance_of(Projects::Clusters::GcpController)
+ allow_any_instance_of(Projects::ClustersController)
.to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
end
@@ -27,7 +27,7 @@ feature 'Gcp Cluster', :js do
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
- click_link 'Create on Google Kubernetes Engine'
+ click_link 'Create new Cluster on GKE'
end
context 'when user filled form with valid parameters' do
@@ -148,7 +148,7 @@ feature 'Gcp Cluster', :js do
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
- click_link 'Create on Google Kubernetes Engine'
+ click_link 'Create new Cluster on GKE'
end
it 'user sees a login page' do
@@ -187,7 +187,7 @@ feature 'Gcp Cluster', :js do
it 'user sees offer on cluster GCP login page' do
click_link 'Add Kubernetes cluster'
- click_link 'Create on Google Kubernetes Engine'
+ click_link 'Create new Cluster on GKE'
expect(page).to have_css('.gcp-signup-offer')
end
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 698b64a659c..766ea58cc17 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -17,7 +17,7 @@ feature 'User Cluster', :js do
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
- click_link 'Add an existing Kubernetes cluster'
+ click_link 'Add existing cluster'
end
context 'when user filled form with valid parameters' do
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index a251a2f4e52..64241102c8b 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -83,7 +83,7 @@ feature 'Clusters', :js do
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
- click_link 'Create on Google Kubernetes Engine'
+ click_link 'Create new Cluster on GKE'
end
it 'user sees a login page' do
diff --git a/spec/features/projects/issues/user_creates_issue_spec.rb b/spec/features/projects/issues/user_creates_issue_spec.rb
index e76f7c5589d..5e8662100c5 100644
--- a/spec/features/projects/issues/user_creates_issue_spec.rb
+++ b/spec/features/projects/issues/user_creates_issue_spec.rb
@@ -17,6 +17,9 @@ describe "User creates issue" do
expect(page).to have_no_content("Assign to")
.and have_no_content("Labels")
.and have_no_content("Milestone")
+
+ expect(page.find('#issue_title')['placeholder']).to eq 'Title'
+ expect(page.find('#issue_description')['placeholder']).to eq 'Write a comment or drag your files here…'
end
issue_title = "500 error on profile"
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index d2aaf60e72c..d06abdd999b 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -165,7 +165,7 @@ feature 'Jobs', :clean_gitlab_redis_shared_state do
it 'links to issues/new with the title and description filled in' do
button_title = "Job Failed ##{job.id}"
- job_url = project_job_path(project, job)
+ job_url = project_job_url(project, job, host: page.server.host, port: page.server.port)
options = { issue: { title: button_title, description: "Job [##{job.id}](#{job_url}) failed for #{job.sha}:\n" } }
href = new_project_issue_path(project, options)
diff --git a/spec/features/projects/milestones/new_spec.rb b/spec/features/projects/milestones/new_spec.rb
index f7900210fe6..6595bff549b 100644
--- a/spec/features/projects/milestones/new_spec.rb
+++ b/spec/features/projects/milestones/new_spec.rb
@@ -9,9 +9,9 @@ feature 'Creating a new project milestone', :js do
visit new_project_milestone_path(project)
end
- it 'description has autocomplete' do
+ it 'description has emoji autocomplete' do
find('#milestone_description').native.send_keys('')
- fill_in 'milestone_description', with: '@'
+ fill_in 'milestone_description', with: ':'
expect(page).to have_selector('.atwho-view')
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index e44361fbe26..7b9242f0631 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -5,6 +5,8 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
# see spec/features/projects/files/project_owner_creates_license_file_spec.rb
# see spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+ include FakeBlobHelpers
+
let(:user) { create(:user) }
describe 'empty project' do
@@ -141,11 +143,57 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
allow_any_instance_of(AutoDevopsHelper).to receive(:show_auto_devops_callout?).and_return(false)
project.add_master(user)
sign_in(user)
+ end
- visit project_path(project)
+ context 'Readme button' do
+ before do
+ allow(Project).to receive(:find_by_full_path)
+ .with(project.full_path, follow_redirects: true)
+ .and_return(project)
+ end
+
+ context 'when the project has a populated Readme' do
+ it 'show the "Readme" anchor' do
+ visit project_path(project)
+
+ expect(project.repository.readme).not_to be_nil
+
+ page.within('.project-stats') do
+ expect(page).not_to have_link('Add Readme', href: presenter.add_readme_path)
+ expect(page).to have_link('Readme', href: presenter.readme_path)
+ end
+ end
+
+ context 'when the project has an empty Readme' do
+ it 'show the "Readme" anchor' do
+ allow(project.repository).to receive(:readme).and_return(fake_blob(path: 'README.md', data: '', size: 0))
+
+ visit project_path(project)
+
+ page.within('.project-stats') do
+ expect(page).not_to have_link('Add Readme', href: presenter.add_readme_path)
+ expect(page).to have_link('Readme', href: presenter.readme_path)
+ end
+ end
+ end
+ end
+
+ context 'when the project does not have a Readme' do
+ it 'shows the "Add Readme" button' do
+ allow(project.repository).to receive(:readme).and_return(nil)
+
+ visit project_path(project)
+
+ page.within('.project-stats') do
+ expect(page).to have_link('Add Readme', href: presenter.add_readme_path)
+ end
+ end
+ end
end
it 'no "Add Changelog" button if the project already has a changelog' do
+ visit project_path(project)
+
expect(project.repository.changelog).not_to be_nil
page.within('.project-stats') do
@@ -154,6 +202,8 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
end
it 'no "Add License" button if the project already has a license' do
+ visit project_path(project)
+
expect(project.repository.license_blob).not_to be_nil
page.within('.project-stats') do
@@ -162,6 +212,8 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
end
it 'no "Add Contribution guide" button if the project already has a contribution guide' do
+ visit project_path(project)
+
expect(project.repository.contribution_guide).not_to be_nil
page.within('.project-stats') do
@@ -171,6 +223,8 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
describe 'GitLab CI configuration button' do
it '"Set up CI/CD" button linked to new file populated for a .gitlab-ci.yml' do
+ visit project_path(project)
+
expect(project.repository.gitlab_ci_yml).to be_nil
page.within('.project-stats') do
@@ -211,6 +265,8 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
describe 'Auto DevOps button' do
it '"Enable Auto DevOps" button linked to settings page' do
+ visit project_path(project)
+
page.within('.project-stats') do
expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
end
@@ -263,6 +319,8 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
describe 'Kubernetes cluster button' do
it '"Add Kubernetes cluster" button linked to clusters page' do
+ visit project_path(project)
+
page.within('.project-stats') do
expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project))
end
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index 706894f4b32..733e6c89de7 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -242,7 +242,7 @@ describe "User creates wiki page" do
end
end
- it "shows the autocompletion dropdown" do
+ it "shows the emoji autocompletion dropdown" do
click_link("New page")
page.within("#modal-new-wiki") do
@@ -254,7 +254,7 @@ describe "User creates wiki page" do
page.within(".wiki-form") do
find("#wiki_content").native.send_keys("")
- fill_in(:wiki_content, with: "@")
+ fill_in(:wiki_content, with: ":")
end
expect(page).to have_selector(".atwho-view")
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index 272dac127dd..2ccbc15b6da 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -96,11 +96,11 @@ describe 'User updates wiki page' do
expect(find('textarea#wiki_content').value).to eq('')
end
- it 'shows the autocompletion dropdown', :js do
+ it 'shows the emoji autocompletion dropdown', :js do
click_link('Edit')
find('#wiki_content').native.send_keys('')
- fill_in(:wiki_content, with: '@')
+ fill_in(:wiki_content, with: ':')
expect(page).to have_selector('.atwho-view')
end
diff --git a/spec/features/tags/master_creates_tag_spec.rb b/spec/features/tags/master_creates_tag_spec.rb
index 8a8f6933fa5..6701f575a23 100644
--- a/spec/features/tags/master_creates_tag_spec.rb
+++ b/spec/features/tags/master_creates_tag_spec.rb
@@ -75,9 +75,9 @@ feature 'Master creates tag' do
visit new_project_tag_path(project)
end
- it 'description has autocomplete', :js do
+ it 'description has emoji autocomplete', :js do
find('#release_description').native.send_keys('')
- fill_in 'release_description', with: '@'
+ fill_in 'release_description', with: ':'
expect(page).to have_selector('.atwho-view')
end
diff --git a/spec/features/tags/master_deletes_tag_spec.rb b/spec/features/tags/master_deletes_tag_spec.rb
index 9981bfa4609..1d4df2c55a7 100644
--- a/spec/features/tags/master_deletes_tag_spec.rb
+++ b/spec/features/tags/master_deletes_tag_spec.rb
@@ -35,30 +35,15 @@ feature 'Master deletes tag' do
end
context 'when pre-receive hook fails', :js do
- context 'when Gitaly operation_user_delete_tag feature is enabled' do
- before do
- allow_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rm_tag)
- .and_raise(Gitlab::Git::PreReceiveError, 'Do not delete tags')
- end
-
- scenario 'shows the error message' do
- delete_first_tag
-
- expect(page).to have_content('Do not delete tags')
- end
+ before do
+ allow_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rm_tag)
+ .and_raise(Gitlab::Git::PreReceiveError, 'Do not delete tags')
end
- context 'when Gitaly operation_user_delete_tag feature is disabled', :skip_gitaly_mock do
- before do
- allow_any_instance_of(Gitlab::Git::HooksService).to receive(:execute)
- .and_raise(Gitlab::Git::PreReceiveError, 'Do not delete tags')
- end
-
- scenario 'shows the error message' do
- delete_first_tag
+ scenario 'shows the error message' do
+ delete_first_tag
- expect(page).to have_content('Do not delete tags')
- end
+ expect(page).to have_content('Do not delete tags')
end
end
diff --git a/spec/features/tags/master_updates_tag_spec.rb b/spec/features/tags/master_updates_tag_spec.rb
index 1c370a99b13..26f51bee887 100644
--- a/spec/features/tags/master_updates_tag_spec.rb
+++ b/spec/features/tags/master_updates_tag_spec.rb
@@ -25,13 +25,13 @@ feature 'Master updates tag' do
expect(page).to have_content 'Awesome release notes'
end
- scenario 'description has autocomplete', :js do
+ scenario 'description has emoji autocomplete', :js do
page.within(first('.content-list .controls')) do
click_link 'Edit release notes'
end
find('#release_description').native.send_keys('')
- fill_in 'release_description', with: '@'
+ fill_in 'release_description', with: ':'
expect(page).to have_selector('.atwho-view')
end
diff --git a/spec/finders/pipelines_finder_spec.rb b/spec/finders/pipelines_finder_spec.rb
index d6253b605b9..c6e832ad69b 100644
--- a/spec/finders/pipelines_finder_spec.rb
+++ b/spec/finders/pipelines_finder_spec.rb
@@ -1,9 +1,10 @@
require 'spec_helper'
describe PipelinesFinder do
- let(:project) { create(:project, :repository) }
-
- subject { described_class.new(project, params).execute }
+ let(:project) { create(:project, :public, :repository) }
+ let(:current_user) { nil }
+ let(:params) { {} }
+ subject { described_class.new(project, current_user, params).execute }
describe "#execute" do
context 'when params is empty' do
@@ -223,5 +224,27 @@ describe PipelinesFinder do
end
end
end
+
+ context 'when the project has limited access to piplines' do
+ let(:project) { create(:project, :private, :repository) }
+ let(:current_user) { create(:user) }
+ let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) }
+
+ context 'when the user has access' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'is expected to return pipelines' do
+ is_expected.to contain_exactly(*pipelines)
+ end
+ end
+
+ context 'the user is not allowed to read pipelines' do
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
end
end
diff --git a/spec/finders/todos_finder_spec.rb b/spec/finders/todos_finder_spec.rb
index 9747b9402a7..6061021d3b0 100644
--- a/spec/finders/todos_finder_spec.rb
+++ b/spec/finders/todos_finder_spec.rb
@@ -5,12 +5,76 @@ describe TodosFinder do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
+ let(:issue) { create(:issue, project: project) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
let(:finder) { described_class }
before do
group.add_developer(user)
end
+ describe '#execute' do
+ context 'visibility' do
+ let(:private_group_access) { create(:group, :private) }
+ let(:private_group_hidden) { create(:group, :private) }
+ let(:public_project) { create(:project, :public) }
+ let(:private_project_hidden) { create(:project) }
+ let(:public_group) { create(:group) }
+
+ let!(:todo1) { create(:todo, user: user, project: project, group: nil) }
+ let!(:todo2) { create(:todo, user: user, project: public_project, group: nil) }
+ let!(:todo3) { create(:todo, user: user, project: private_project_hidden, group: nil) }
+ let!(:todo4) { create(:todo, user: user, project: nil, group: group) }
+ let!(:todo5) { create(:todo, user: user, project: nil, group: private_group_access) }
+ let!(:todo6) { create(:todo, user: user, project: nil, group: private_group_hidden) }
+ let!(:todo7) { create(:todo, user: user, project: nil, group: public_group) }
+
+ before do
+ private_group_access.add_developer(user)
+ end
+
+ it 'returns only todos with a target a user has access to' do
+ todos = finder.new(user).execute
+
+ expect(todos).to match_array([todo1, todo2, todo4, todo5, todo7])
+ end
+ end
+
+ context 'filtering' do
+ let!(:todo1) { create(:todo, user: user, project: project, target: issue) }
+ let!(:todo2) { create(:todo, user: user, group: group, target: merge_request) }
+
+ it 'returns correct todos when filtered by a project' do
+ todos = finder.new(user, { project_id: project.id }).execute
+
+ expect(todos).to match_array([todo1])
+ end
+
+ it 'returns correct todos when filtered by a group' do
+ todos = finder.new(user, { group_id: group.id }).execute
+
+ expect(todos).to match_array([todo1, todo2])
+ end
+
+ it 'returns correct todos when filtered by a type' do
+ todos = finder.new(user, { type: 'Issue' }).execute
+
+ expect(todos).to match_array([todo1])
+ end
+
+ context 'with subgroups', :nested_groups do
+ let(:subgroup) { create(:group, parent: group) }
+ let!(:todo3) { create(:todo, user: user, group: subgroup, target: issue) }
+
+ it 'returns todos from subgroups when filtered by a group' do
+ todos = finder.new(user, { group_id: group.id }).execute
+
+ expect(todos).to match_array([todo1, todo2, todo3])
+ end
+ end
+ end
+ end
+
describe '#sort' do
context 'by date' do
let!(:todo1) { create(:todo, user: user, project: project) }
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index b892f6b44ed..515bbe78cb7 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -27,6 +27,12 @@ describe GitlabSchema do
expect(described_class.query).to eq(::Types::QueryType.to_graphql)
end
+ it 'paginates active record relations using `Gitlab::Graphql::Connections::KeysetConnection`' do
+ connection = GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS[ActiveRecord::Relation.name]
+
+ expect(connection).to eq(Gitlab::Graphql::Connections::KeysetConnection)
+ end
+
def field_instrumenters
described_class.instrumenters[:field]
end
diff --git a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
new file mode 100644
index 00000000000..ea7159eacf9
--- /dev/null
+++ b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
@@ -0,0 +1,52 @@
+require 'spec_helper'
+
+describe ResolvesPipelines do
+ include GraphqlHelpers
+
+ subject(:resolver) do
+ Class.new(Resolvers::BaseResolver) do
+ include ResolvesPipelines
+
+ def resolve(**args)
+ resolve_pipelines(object, args)
+ end
+ end
+ end
+
+ let(:current_user) { create(:user) }
+ set(:project) { create(:project, :private) }
+ set(:pipeline) { create(:ci_pipeline, project: project) }
+ set(:failed_pipeline) { create(:ci_pipeline, :failed, project: project) }
+ set(:ref_pipeline) { create(:ci_pipeline, project: project, ref: 'awesome-feature') }
+ set(:sha_pipeline) { create(:ci_pipeline, project: project, sha: 'deadbeef') }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it { is_expected.to have_graphql_arguments(:status, :ref, :sha) }
+
+ it 'finds all pipelines' do
+ expect(resolve_pipelines).to contain_exactly(pipeline, failed_pipeline, ref_pipeline, sha_pipeline)
+ end
+
+ it 'allows filtering by status' do
+ expect(resolve_pipelines(status: 'failed')).to contain_exactly(failed_pipeline)
+ end
+
+ it 'allows filtering by ref' do
+ expect(resolve_pipelines(ref: 'awesome-feature')).to contain_exactly(ref_pipeline)
+ end
+
+ it 'allows filtering by sha' do
+ expect(resolve_pipelines(sha: 'deadbeef')).to contain_exactly(sha_pipeline)
+ end
+
+ it 'does not return any pipelines if the user does not have access' do
+ expect(resolve_pipelines({}, {})).to be_empty
+ end
+
+ def resolve_pipelines(args = {}, context = { current_user: current_user })
+ resolve(resolver, obj: project, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
new file mode 100644
index 00000000000..09b17bf6fc9
--- /dev/null
+++ b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe Resolvers::MergeRequestPipelinesResolver do
+ include GraphqlHelpers
+
+ set(:merge_request) { create(:merge_request) }
+ set(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha
+ )
+ end
+ set(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project) }
+ set(:other_pipeline) { create(:ci_pipeline) }
+ let(:current_user) { create(:user) }
+
+ before do
+ merge_request.project.add_developer(current_user)
+ end
+
+ def resolve_pipelines
+ resolve(described_class, obj: merge_request, ctx: { current_user: current_user })
+ end
+
+ it 'resolves only MRs for the passed merge request' do
+ expect(resolve_pipelines).to contain_exactly(pipeline)
+ end
+end
diff --git a/spec/graphql/resolvers/project_pipelines_resolver_spec.rb b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
new file mode 100644
index 00000000000..407ca2f9d78
--- /dev/null
+++ b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe Resolvers::ProjectPipelinesResolver do
+ include GraphqlHelpers
+
+ set(:project) { create(:project) }
+ set(:pipeline) { create(:ci_pipeline, project: project) }
+ set(:other_pipeline) { create(:ci_pipeline) }
+ let(:current_user) { create(:user) }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ def resolve_pipelines
+ resolve(described_class, obj: project, ctx: { current_user: current_user })
+ end
+
+ it 'resolves only MRs for the passed merge request' do
+ expect(resolve_pipelines).to contain_exactly(pipeline)
+ end
+end
diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb
new file mode 100644
index 00000000000..ec1c689a4be
--- /dev/null
+++ b/spec/graphql/types/ci/pipeline_type_spec.rb
@@ -0,0 +1,7 @@
+require 'spec_helper'
+
+describe Types::Ci::PipelineType do
+ it { expect(described_class.graphql_name).to eq('Pipeline') }
+
+ it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Ci::Pipeline) }
+end
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index 6e57122867a..c369953e3ea 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -1,5 +1,16 @@
require 'spec_helper'
-describe Types::MergeRequestType do
+describe GitlabSchema.types['MergeRequest'] do
it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::MergeRequest) }
+
+ describe 'head pipeline' do
+ it 'has a head pipeline field' do
+ expect(described_class).to have_graphql_field(:head_pipeline)
+ end
+
+ it 'authorizes the field' do
+ expect(described_class.fields['headPipeline'])
+ .to require_graphql_authorizations(:read_pipeline)
+ end
+ end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 7b5bc335511..49606c397b9 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -13,4 +13,6 @@ describe GitlabSchema.types['Project'] do
.to require_graphql_authorizations(:read_merge_request)
end
end
+
+ it { is_expected.to have_graphql_field(:pipelines) }
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index cddb49b320f..d246beb9888 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -21,6 +21,27 @@ describe IssuablesHelper do
end
end
+ describe '#group_dropdown_label' do
+ let(:group) { create(:group) }
+ let(:default) { 'default label' }
+
+ it 'returns default group label when group_id is nil' do
+ expect(group_dropdown_label(nil, default)).to eq('default label')
+ end
+
+ it 'returns "any group" when group_id is 0' do
+ expect(group_dropdown_label('0', default)).to eq('Any group')
+ end
+
+ it 'returns group full path when a group was found for the provided id' do
+ expect(group_dropdown_label(group.id, default)).to eq(group.full_name)
+ end
+
+ it 'returns default label when a group was not found for the provided id' do
+ expect(group_dropdown_label(9999, default)).to eq('default label')
+ end
+ end
+
describe '#issuable_labels_tooltip' do
it 'returns label text with no labels' do
expect(issuable_labels_tooltip([])).to eq("Labels")
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index 02836fcaeea..1af49f4985c 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -24,21 +24,6 @@ describe('DiffsStoreMutations', () => {
});
});
- describe('SET_DIFF_FILES', () => {
- it('should set diff files to state', () => {
- const filePath = '/first-diff-file-path';
- const state = {};
- const diffFiles = {
- a_mode: 1,
- highlighted_diff_lines: [{ file_path: filePath }],
- };
-
- mutations[types.SET_DIFF_FILES](state, diffFiles);
- expect(state.diffFiles.aMode).toEqual(1);
- expect(state.diffFiles.highlightedDiffLines[0].filePath).toEqual(filePath);
- });
- });
-
describe('SET_DIFF_VIEW_TYPE', () => {
it('should set diff view type properly', () => {
const state = {};
diff --git a/spec/javascripts/ide/stores/modules/merge_requests/actions_spec.js b/spec/javascripts/ide/stores/modules/merge_requests/actions_spec.js
index fa4c18931e5..d21f33eaf6d 100644
--- a/spec/javascripts/ide/stores/modules/merge_requests/actions_spec.js
+++ b/spec/javascripts/ide/stores/modules/merge_requests/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/merge_requests/state';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
-import actions, {
+import {
requestMergeRequests,
receiveMergeRequestsError,
receiveMergeRequestsSuccess,
@@ -41,28 +41,26 @@ describe('IDE merge requests actions', () => {
});
describe('receiveMergeRequestsError', () => {
- let flashSpy;
-
- beforeEach(() => {
- flashSpy = spyOnDependency(actions, 'flash');
- });
-
it('should should commit error', done => {
testAction(
receiveMergeRequestsError,
- 'created',
+ { type: 'created', search: '' },
mockedState,
[{ type: types.RECEIVE_MERGE_REQUESTS_ERROR, payload: 'created' }],
- [],
+ [
+ {
+ type: 'setErrorMessage',
+ payload: {
+ text: 'Error loading merge requests.',
+ action: jasmine.any(Function),
+ actionText: 'Please try again',
+ actionPayload: { type: 'created', search: '' },
+ },
+ },
+ ],
done,
);
});
-
- it('creates flash message', () => {
- receiveMergeRequestsError({ commit() {} }, 'created');
-
- expect(flashSpy).toHaveBeenCalled();
- });
});
describe('receiveMergeRequestsSuccess', () => {
diff --git a/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js b/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js
index f47e69d6e5b..836ba72b5d8 100644
--- a/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js
+++ b/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js
@@ -1,7 +1,7 @@
import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import actions, {
+import {
requestLatestPipeline,
receiveLatestPipelineError,
receiveLatestPipelineSuccess,
@@ -59,7 +59,7 @@ describe('IDE pipelines actions', () => {
it('commits error', done => {
testAction(
receiveLatestPipelineError,
- null,
+ { status: 404 },
mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
[{ type: 'stopPipelinePolling' }],
@@ -67,12 +67,26 @@ describe('IDE pipelines actions', () => {
);
});
- it('creates flash message', () => {
- const flashSpy = spyOnDependency(actions, 'flash');
-
- receiveLatestPipelineError({ commit() {}, dispatch() {} });
-
- expect(flashSpy).toHaveBeenCalled();
+ it('dispatches setErrorMessage is not 404', done => {
+ testAction(
+ receiveLatestPipelineError,
+ { status: 500 },
+ mockedState,
+ [{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
+ [
+ {
+ type: 'setErrorMessage',
+ payload: {
+ text: 'An error occured whilst fetching the latest pipline.',
+ action: jasmine.any(Function),
+ actionText: 'Please try again',
+ actionPayload: null,
+ },
+ },
+ { type: 'stopPipelinePolling' },
+ ],
+ done,
+ );
});
});
@@ -181,7 +195,10 @@ describe('IDE pipelines actions', () => {
new Promise(resolve => requestAnimationFrame(resolve))
.then(() => {
- expect(dispatch.calls.argsFor(1)).toEqual(['receiveLatestPipelineError']);
+ expect(dispatch.calls.argsFor(1)).toEqual([
+ 'receiveLatestPipelineError',
+ jasmine.anything(),
+ ]);
})
.then(done)
.catch(done.fail);
@@ -199,21 +216,23 @@ describe('IDE pipelines actions', () => {
it('commits error', done => {
testAction(
receiveJobsError,
- 1,
+ { id: 1 },
mockedState,
[{ type: types.RECEIVE_JOBS_ERROR, payload: 1 }],
- [],
+ [
+ {
+ type: 'setErrorMessage',
+ payload: {
+ text: 'An error occured whilst loading the pipelines jobs.',
+ action: jasmine.anything(),
+ actionText: 'Please try again',
+ actionPayload: { id: 1 },
+ },
+ },
+ ],
done,
);
});
-
- it('creates flash message', () => {
- const flashSpy = spyOnDependency(actions, 'flash');
-
- receiveJobsError({ commit() {} }, 1);
-
- expect(flashSpy).toHaveBeenCalled();
- });
});
describe('receiveJobsSuccess', () => {
@@ -268,7 +287,7 @@ describe('IDE pipelines actions', () => {
[],
[
{ type: 'requestJobs', payload: stage.id },
- { type: 'receiveJobsError', payload: stage.id },
+ { type: 'receiveJobsError', payload: stage },
],
done,
);
@@ -337,18 +356,20 @@ describe('IDE pipelines actions', () => {
null,
mockedState,
[{ type: types.RECEIVE_JOB_TRACE_ERROR }],
- [],
+ [
+ {
+ type: 'setErrorMessage',
+ payload: {
+ text: 'An error occured whilst fetching the job trace.',
+ action: jasmine.any(Function),
+ actionText: 'Please try again',
+ actionPayload: null,
+ },
+ },
+ ],
done,
);
});
-
- it('creates flash message', () => {
- const flashSpy = spyOnDependency(actions, 'flash');
-
- receiveJobTraceError({ commit() {} });
-
- expect(flashSpy).toHaveBeenCalled();
- });
});
describe('receiveJobTraceSuccess', () => {
diff --git a/spec/javascripts/sidebar/todo_spec.js b/spec/javascripts/sidebar/todo_spec.js
new file mode 100644
index 00000000000..a929b804a29
--- /dev/null
+++ b/spec/javascripts/sidebar/todo_spec.js
@@ -0,0 +1,158 @@
+import Vue from 'vue';
+
+import SidebarTodos from '~/sidebar/components/todo_toggle/todo.vue';
+import mountComponent from 'spec/helpers/vue_mount_component_helper';
+
+const createComponent = ({
+ issuableId = 1,
+ issuableType = 'epic',
+ isTodo,
+ isActionActive,
+ collapsed,
+}) => {
+ const Component = Vue.extend(SidebarTodos);
+
+ return mountComponent(Component, {
+ issuableId,
+ issuableType,
+ isTodo,
+ isActionActive,
+ collapsed,
+ });
+};
+
+describe('SidebarTodo', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = createComponent({});
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('computed', () => {
+ describe('buttonClasses', () => {
+ it('returns todo button classes for when `collapsed` prop is `false`', () => {
+ expect(vm.buttonClasses).toBe('btn btn-default btn-todo issuable-header-btn float-right');
+ });
+
+ it('returns todo button classes for when `collapsed` prop is `true`', done => {
+ vm.collapsed = true;
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.buttonClasses).toBe('btn-blank btn-todo sidebar-collapsed-icon dont-change-state');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('buttonLabel', () => {
+ it('returns todo button text for marking todo as done when `isTodo` prop is `true`', () => {
+ expect(vm.buttonLabel).toBe('Mark todo as done');
+ });
+
+ it('returns todo button text for add todo when `isTodo` prop is `false`', done => {
+ vm.isTodo = false;
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.buttonLabel).toBe('Add todo');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('collapsedButtonIconClasses', () => {
+ it('returns collapsed button icon class when `isTodo` prop is `true`', () => {
+ expect(vm.collapsedButtonIconClasses).toBe('todo-undone');
+ });
+
+ it('returns empty string when `isTodo` prop is `false`', done => {
+ vm.isTodo = false;
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.collapsedButtonIconClasses).toBe('');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('collapsedButtonIcon', () => {
+ it('returns button icon name when `isTodo` prop is `true`', () => {
+ expect(vm.collapsedButtonIcon).toBe('todo-done');
+ });
+
+ it('returns button icon name when `isTodo` prop is `false`', done => {
+ vm.isTodo = false;
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.collapsedButtonIcon).toBe('todo-add');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('handleButtonClick', () => {
+ it('emits `toggleTodo` event on component', () => {
+ spyOn(vm, '$emit');
+ vm.handleButtonClick();
+ expect(vm.$emit).toHaveBeenCalledWith('toggleTodo');
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders component container element', () => {
+ const dataAttributes = {
+ issuableId: '1',
+ issuableType: 'epic',
+ originalTitle: 'Mark todo as done',
+ placement: 'left',
+ container: 'body',
+ boundary: 'viewport',
+ };
+ expect(vm.$el.nodeName).toBe('BUTTON');
+
+ const elDataAttrs = vm.$el.dataset;
+ Object.keys(elDataAttrs).forEach((attr) => {
+ expect(elDataAttrs[attr]).toBe(dataAttributes[attr]);
+ });
+ });
+
+ it('renders button label element when `collapsed` prop is `false`', () => {
+ const buttonLabelEl = vm.$el.querySelector('span.issuable-todo-inner');
+ expect(buttonLabelEl).not.toBeNull();
+ expect(buttonLabelEl.innerText.trim()).toBe('Mark todo as done');
+ });
+
+ it('renders button icon when `collapsed` prop is `true`', done => {
+ vm.collapsed = true;
+ Vue.nextTick()
+ .then(() => {
+ const buttonIconEl = vm.$el.querySelector('svg');
+ expect(buttonIconEl).not.toBeNull();
+ expect(buttonIconEl.querySelector('use').getAttribute('xlink:href')).toContain('todo-done');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('renders loading icon when `isActionActive` prop is true', done => {
+ vm.isActionActive = true;
+ Vue.nextTick()
+ .then(() => {
+ const loadingEl = vm.$el.querySelector('span.loading-container');
+ expect(loadingEl).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
index a1dd72c498f..55c41e55437 100644
--- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
@@ -210,6 +210,13 @@ describe Banzai::Filter::MergeRequestReferenceFilter do
.to eq reference
end
+ it 'commit ref tag is valid' do
+ doc = reference_filter("See #{reference}")
+ commit_ref_tag = doc.css('a').first.css('span.gfm.gfm-commit')
+
+ expect(commit_ref_tag.text).to eq(commit.short_id)
+ end
+
it 'has valid text' do
doc = reference_filter("See #{reference}")
diff --git a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb b/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
index a251ab323d8..1b3df7b20d4 100644
--- a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
+++ b/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180619121030 do
+describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180626125654 do
describe '#perform' do
context 'when diff files can be deleted' do
let(:merge_request) { create(:merge_request, :merged) }
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
index f48ee8924e8..79287021981 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
@@ -20,6 +20,15 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
diff_files
end
+ it 'it uses a different cache key if diff line keys change' do
+ mr_diff = described_class.new(merge_request.merge_request_diff, diff_options: nil)
+ key = mr_diff.cache_key
+
+ stub_const('Gitlab::Diff::Line::SERIALIZE_KEYS', [:foo])
+
+ expect(mr_diff.cache_key).not_to eq(key)
+ end
+
shared_examples 'initializes a DiffCollection' do
it 'returns a valid instance of a DiffCollection' do
expect(diff_files).to be_a(Gitlab::Git::DiffCollection)
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 5dfbb8e71f8..ebeb05d6e02 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -26,6 +26,21 @@ describe Gitlab::Diff::File do
end
end
+ describe '#diff_lines_for_serializer' do
+ it 'includes bottom match line if not in the end' do
+ expect(diff_file.diff_lines_for_serializer.last.type).to eq('match')
+ end
+
+ context 'when deleted' do
+ let(:commit) { project.commit('d59c60028b053793cecfb4022de34602e1a9218e') }
+ let(:diff_file) { commit.diffs.diff_file_with_old_path('files/js/commit.js.coffee') }
+
+ it 'does not include bottom match line' do
+ expect(diff_file.diff_lines_for_serializer.last.type).not_to eq('match')
+ end
+ end
+ end
+
describe '#mode_changed?' do
it { expect(diff_file.mode_changed?).to be_falsey }
end
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index 13df8531b63..ef52a25f47e 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -20,37 +20,55 @@ describe Gitlab::Gfm::UploadsRewriter do
"Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
end
- describe '#rewrite' do
- let!(:new_text) { rewriter.rewrite(new_project) }
+ shared_examples "files are accessible" do
+ describe '#rewrite' do
+ let!(:new_text) { rewriter.rewrite(new_project) }
- let(:old_files) { [image_uploader, zip_uploader].map(&:file) }
- let(:new_files) do
- described_class.new(new_text, new_project, user).files
- end
+ let(:old_files) { [image_uploader, zip_uploader] }
+ let(:new_files) do
+ described_class.new(new_text, new_project, user).files
+ end
- let(:old_paths) { old_files.map(&:path) }
- let(:new_paths) { new_files.map(&:path) }
+ let(:old_paths) { old_files.map(&:path) }
+ let(:new_paths) { new_files.map(&:path) }
- it 'rewrites content' do
- expect(new_text).not_to eq text
- expect(new_text.length).to eq text.length
- end
+ it 'rewrites content' do
+ expect(new_text).not_to eq text
+ expect(new_text.length).to eq text.length
+ end
- it 'copies files' do
- expect(new_files).to all(exist)
- expect(old_paths).not_to match_array new_paths
- expect(old_paths).to all(include(old_project.disk_path))
- expect(new_paths).to all(include(new_project.disk_path))
- end
+ it 'copies files' do
+ expect(new_files).to all(exist)
+ expect(old_paths).not_to match_array new_paths
+ expect(old_paths).to all(include(old_project.disk_path))
+ expect(new_paths).to all(include(new_project.disk_path))
+ end
- it 'does not remove old files' do
- expect(old_files).to all(exist)
+ it 'does not remove old files' do
+ expect(old_files).to all(exist)
+ end
+
+ it 'generates a new secret for each file' do
+ expect(new_paths).not_to include image_uploader.secret
+ expect(new_paths).not_to include zip_uploader.secret
+ end
end
+ end
- it 'generates a new secret for each file' do
- expect(new_paths).not_to include image_uploader.secret
- expect(new_paths).not_to include zip_uploader.secret
+ context "file are stored locally" do
+ include_examples "files are accessible"
+ end
+
+ context "files are stored remotely" do
+ before do
+ stub_uploads_object_storage(FileUploader)
+
+ old_files.each do |file|
+ file.migrate!(ObjectStorage::Store::REMOTE)
+ end
end
+
+ include_examples "files are accessible"
end
describe '#needs_rewrite?' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 6ec4b90d70c..5dd7af3a552 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -1402,94 +1402,84 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe "#copy_gitattributes" do
- shared_examples 'applying git attributes' do
- let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') }
+ let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') }
- after do
- FileUtils.rm_rf(attributes_path) if Dir.exist?(attributes_path)
- end
-
- it "raises an error with invalid ref" do
- expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef)
- end
+ after do
+ FileUtils.rm_rf(attributes_path) if Dir.exist?(attributes_path)
+ end
- context 'when forcing encoding issues' do
- let(:branch_name) { "ʕ•ᴥ•ʔ" }
+ it "raises an error with invalid ref" do
+ expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef)
+ end
- before do
- repository.create_branch(branch_name, "master")
- end
+ context 'when forcing encoding issues' do
+ let(:branch_name) { "ʕ•ᴥ•ʔ" }
- after do
- repository.rm_branch(branch_name, user: build(:admin))
- end
-
- it "doesn't raise with a valid unicode ref" do
- expect { repository.copy_gitattributes(branch_name) }.not_to raise_error
+ before do
+ repository.create_branch(branch_name, "master")
+ end
- repository
- end
+ after do
+ repository.rm_branch(branch_name, user: build(:admin))
end
- context "with no .gitattrbutes" do
- before do
- repository.copy_gitattributes("master")
- end
+ it "doesn't raise with a valid unicode ref" do
+ expect { repository.copy_gitattributes(branch_name) }.not_to raise_error
- it "does not have an info/attributes" do
- expect(File.exist?(attributes_path)).to be_falsey
- end
+ repository
end
+ end
- context "with .gitattrbutes" do
- before do
- repository.copy_gitattributes("gitattributes")
- end
+ context "with no .gitattrbutes" do
+ before do
+ repository.copy_gitattributes("master")
+ end
- it "has an info/attributes" do
- expect(File.exist?(attributes_path)).to be_truthy
- end
+ it "does not have an info/attributes" do
+ expect(File.exist?(attributes_path)).to be_falsey
+ end
+ end
- it "has the same content in info/attributes as .gitattributes" do
- contents = File.open(attributes_path, "rb") { |f| f.read }
- expect(contents).to eq("*.md binary\n")
- end
+ context "with .gitattrbutes" do
+ before do
+ repository.copy_gitattributes("gitattributes")
end
- context "with updated .gitattrbutes" do
- before do
- repository.copy_gitattributes("gitattributes")
- repository.copy_gitattributes("gitattributes-updated")
- end
+ it "has an info/attributes" do
+ expect(File.exist?(attributes_path)).to be_truthy
+ end
- it "has an info/attributes" do
- expect(File.exist?(attributes_path)).to be_truthy
- end
+ it "has the same content in info/attributes as .gitattributes" do
+ contents = File.open(attributes_path, "rb") { |f| f.read }
+ expect(contents).to eq("*.md binary\n")
+ end
+ end
- it "has the updated content in info/attributes" do
- contents = File.read(attributes_path)
- expect(contents).to eq("*.txt binary\n")
- end
+ context "with updated .gitattrbutes" do
+ before do
+ repository.copy_gitattributes("gitattributes")
+ repository.copy_gitattributes("gitattributes-updated")
end
- context "with no .gitattrbutes in HEAD but with previous info/attributes" do
- before do
- repository.copy_gitattributes("gitattributes")
- repository.copy_gitattributes("master")
- end
+ it "has an info/attributes" do
+ expect(File.exist?(attributes_path)).to be_truthy
+ end
- it "does not have an info/attributes" do
- expect(File.exist?(attributes_path)).to be_falsey
- end
+ it "has the updated content in info/attributes" do
+ contents = File.read(attributes_path)
+ expect(contents).to eq("*.txt binary\n")
end
end
- context 'when gitaly is enabled' do
- it_behaves_like 'applying git attributes'
- end
+ context "with no .gitattrbutes in HEAD but with previous info/attributes" do
+ before do
+ repository.copy_gitattributes("gitattributes")
+ repository.copy_gitattributes("master")
+ end
- context 'when gitaly is disabled', :disable_gitaly do
- it_behaves_like 'applying git attributes'
+ it "does not have an info/attributes" do
+ expect(File.exist?(attributes_path)).to be_falsey
+ end
end
end
@@ -1971,21 +1961,15 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- context 'with gitaly' do
- it "calls Gitaly's OperationService" do
- expect_any_instance_of(Gitlab::GitalyClient::OperationService)
- .to receive(:user_ff_branch).with(user, source_sha, target_branch)
- .and_return(nil)
-
- subject
- end
+ it "calls Gitaly's OperationService" do
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService)
+ .to receive(:user_ff_branch).with(user, source_sha, target_branch)
+ .and_return(nil)
- it_behaves_like '#ff_merge'
+ subject
end
- context 'without gitaly', :skip_gitaly_mock do
- it_behaves_like '#ff_merge'
- end
+ it_behaves_like '#ff_merge'
end
describe '#delete_all_refs_except' do
@@ -2308,92 +2292,95 @@ describe Gitlab::Git::Repository, seed_helper: true do
expect { subject }.to raise_error(Gitlab::Git::CommandError, 'error')
end
end
+ end
- describe '#squash' do
- let(:squash_id) { '1' }
- let(:branch_name) { 'fix' }
- let(:start_sha) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' }
- let(:end_sha) { '12d65c8dd2b2676fa3ac47d955accc085a37a9c1' }
+ describe '#squash' do
+ let(:squash_id) { '1' }
+ let(:branch_name) { 'fix' }
+ let(:start_sha) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' }
+ let(:end_sha) { '12d65c8dd2b2676fa3ac47d955accc085a37a9c1' }
- subject do
- opts = {
- branch: branch_name,
- start_sha: start_sha,
- end_sha: end_sha,
- author: user,
- message: 'Squash commit message'
- }
+ subject do
+ opts = {
+ branch: branch_name,
+ start_sha: start_sha,
+ end_sha: end_sha,
+ author: user,
+ message: 'Squash commit message'
+ }
+
+ repository.squash(user, squash_id, opts)
+ end
+
+ # Should be ported to gitaly-ruby rspec suite https://gitlab.com/gitlab-org/gitaly/issues/1234
+ skip 'sparse checkout' do
+ let(:expected_files) { %w(files files/js files/js/application.js) }
+
+ it 'checks out only the files in the diff' do
+ allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args|
+ m.call(*args) do
+ worktree_path = args[0]
+ files_pattern = File.join(worktree_path, '**', '*')
+ expected = expected_files.map do |path|
+ File.expand_path(path, worktree_path)
+ end
+
+ expect(Dir[files_pattern]).to eq(expected)
+ end
+ end
- repository.squash(user, squash_id, opts)
+ subject
end
- context 'sparse checkout', :skip_gitaly_mock do
- let(:expected_files) { %w(files files/js files/js/application.js) }
+ context 'when the diff contains a rename' do
+ let(:repo) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '').rugged }
+ let(:end_sha) { new_commit_move_file(repo).oid }
+
+ after do
+ # Erase our commits so other tests get the original repo
+ repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '').rugged
+ repo.references.update('refs/heads/master', SeedRepo::LastCommit::ID)
+ end
- it 'checks out only the files in the diff' do
+ it 'does not include the renamed file in the sparse checkout' do
allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args|
m.call(*args) do
worktree_path = args[0]
files_pattern = File.join(worktree_path, '**', '*')
- expected = expected_files.map do |path|
- File.expand_path(path, worktree_path)
- end
- expect(Dir[files_pattern]).to eq(expected)
+ expect(Dir[files_pattern]).not_to include('CHANGELOG')
+ expect(Dir[files_pattern]).not_to include('encoding/CHANGELOG')
end
end
subject
end
-
- context 'when the diff contains a rename' do
- let(:repo) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '').rugged }
- let(:end_sha) { new_commit_move_file(repo).oid }
-
- after do
- # Erase our commits so other tests get the original repo
- repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '').rugged
- repo.references.update('refs/heads/master', SeedRepo::LastCommit::ID)
- end
-
- it 'does not include the renamed file in the sparse checkout' do
- allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args|
- m.call(*args) do
- worktree_path = args[0]
- files_pattern = File.join(worktree_path, '**', '*')
-
- expect(Dir[files_pattern]).not_to include('CHANGELOG')
- expect(Dir[files_pattern]).not_to include('encoding/CHANGELOG')
- end
- end
-
- subject
- end
- end
end
+ end
- context 'with an ASCII-8BIT diff', :skip_gitaly_mock do
- let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+✓ testme\n ======\n \n Sample repo for testing gitlab features\n" }
+ # Should be ported to gitaly-ruby rspec suite https://gitlab.com/gitlab-org/gitaly/issues/1234
+ skip 'with an ASCII-8BIT diff' do
+ let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+✓ testme\n ======\n \n Sample repo for testing gitlab features\n" }
- it 'applies a ASCII-8BIT diff' do
- allow(repository).to receive(:run_git!).and_call_original
- allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT'))
+ it 'applies a ASCII-8BIT diff' do
+ allow(repository).to receive(:run_git!).and_call_original
+ allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT'))
- expect(subject).to match(/\h{40}/)
- end
+ expect(subject).to match(/\h{40}/)
end
+ end
- context 'with trailing whitespace in an invalid patch', :skip_gitaly_mock do
- let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+ \n ====== \n \n Sample repo for testing gitlab features\n" }
+ # Should be ported to gitaly-ruby rspec suite https://gitlab.com/gitlab-org/gitaly/issues/1234
+ skip 'with trailing whitespace in an invalid patch' do
+ let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+ \n ====== \n \n Sample repo for testing gitlab features\n" }
- it 'does not include whitespace warnings in the error' do
- allow(repository).to receive(:run_git!).and_call_original
- allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT'))
+ it 'does not include whitespace warnings in the error' do
+ allow(repository).to receive(:run_git!).and_call_original
+ allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT'))
- expect { subject }.to raise_error do |error|
- expect(error).to be_a(described_class::GitError)
- expect(error.message).not_to include('trailing whitespace')
- end
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(described_class::GitError)
+ expect(error.message).not_to include('trailing whitespace')
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 7951cbe7b1d..54f2ea33f90 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -17,7 +17,7 @@ describe Gitlab::GitalyClient::CommitService do
repository: repository_message,
left_commit_id: 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660',
right_commit_id: commit.id,
- collapse_diffs: true,
+ collapse_diffs: false,
enforce_limits: true,
**Gitlab::Git::DiffCollection.collection_limits.to_h
)
@@ -35,7 +35,7 @@ describe Gitlab::GitalyClient::CommitService do
repository: repository_message,
left_commit_id: Gitlab::Git::EMPTY_TREE_ID,
right_commit_id: initial_commit.id,
- collapse_diffs: true,
+ collapse_diffs: false,
enforce_limits: true,
**Gitlab::Git::DiffCollection.collection_limits.to_h
)
diff --git a/spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb b/spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb
new file mode 100644
index 00000000000..96615ae80de
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb
@@ -0,0 +1,112 @@
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::KeysetConnection do
+ let(:nodes) { Project.all.order(id: :asc) }
+ let(:arguments) { {} }
+ subject(:connection) do
+ described_class.new(nodes, arguments, max_page_size: 3)
+ end
+
+ def encoded_property(value)
+ Base64.strict_encode64(value.to_s)
+ end
+
+ describe '#cursor_from_nodes' do
+ let(:project) { create(:project) }
+
+ it 'returns an encoded ID' do
+ expect(connection.cursor_from_node(project))
+ .to eq(encoded_property(project.id))
+ end
+
+ context 'when an order was specified' do
+ let(:nodes) { Project.order(:updated_at) }
+
+ it 'returns the encoded value of the order' do
+ expect(connection.cursor_from_node(project))
+ .to eq(encoded_property(project.updated_at))
+ end
+ end
+ end
+
+ describe '#sliced_nodes' do
+ let(:projects) { create_list(:project, 4) }
+
+ context 'when before is passed' do
+ let(:arguments) { { before: encoded_property(projects[1].id) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(id: :desc) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+ end
+ end
+
+ context 'when after is passed' do
+ let(:arguments) { { after: encoded_property(projects[1].id) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(id: :desc) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+ end
+ end
+
+ context 'when both before and after are passed' do
+ let(:arguments) do
+ {
+ after: encoded_property(projects[1].id),
+ before: encoded_property(projects[3].id)
+ }
+ end
+
+ it 'returns the expected set' do
+ expect(subject.sliced_nodes).to contain_exactly(projects[2])
+ end
+ end
+ end
+
+ describe '#paged_nodes' do
+ let!(:projects) { create_list(:project, 5) }
+
+ it 'returns the collection limited to max page size' do
+ expect(subject.paged_nodes.size).to eq(3)
+ end
+
+ context 'when `first` is passed' do
+ let(:arguments) { { first: 2 } }
+
+ it 'returns only the first elements' do
+ expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
+ end
+ end
+
+ context 'when `last` is passed' do
+ let(:arguments) { { last: 2 } }
+
+ it 'returns only the last elements' do
+ expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
+ end
+ end
+
+ context 'when both are passed' do
+ let(:arguments) { { first: 2, last: 2 } }
+
+ it 'raises an error' do
+ expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb
index a2ba91dae80..b4837a1689a 100644
--- a/spec/lib/gitlab/middleware/multipart_spec.rb
+++ b/spec/lib/gitlab/middleware/multipart_spec.rb
@@ -7,18 +7,47 @@ describe Gitlab::Middleware::Multipart do
let(:middleware) { described_class.new(app) }
let(:original_filename) { 'filename' }
- it 'opens top-level files' do
- Tempfile.open('top-level') do |tempfile|
- env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
+ shared_examples_for 'multipart upload files' do
+ it 'opens top-level files' do
+ Tempfile.open('top-level') do |tempfile|
+ env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename, 'file.path' => tempfile.path, 'file.remote_id' => remote_id }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
+ expect_uploaded_file(tempfile, %w(file))
+
+ middleware.call(env)
+ end
+ end
+
+ it 'opens files one level deep' do
+ Tempfile.open('one-level') do |tempfile|
+ in_params = { 'user' => { 'avatar' => { '.name' => original_filename, '.path' => tempfile.path, '.remote_id' => remote_id } } }
+ env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
+
+ expect_uploaded_file(tempfile, %w(user avatar))
+
+ middleware.call(env)
+ end
+ end
+
+ it 'opens files two levels deep' do
+ Tempfile.open('two-levels') do |tempfile|
+ in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename, '.path' => tempfile.path, '.remote_id' => remote_id } } } }
+ env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
+
+ expect_uploaded_file(tempfile, %w(project milestone themesong))
+
+ middleware.call(env)
+ end
+ end
+
+ def expect_uploaded_file(tempfile, path, remote: false)
expect(app).to receive(:call) do |env|
- file = Rack::Request.new(env).params['file']
+ file = Rack::Request.new(env).params.dig(*path)
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
expect(file.original_filename).to eq(original_filename)
+ expect(file.remote_id).to eq(remote_id)
end
-
- middleware.call(env)
end
end
@@ -34,36 +63,16 @@ describe Gitlab::Middleware::Multipart do
expect { middleware.call(env) }.to raise_error(JWT::InvalidIssuerError)
end
- it 'opens files one level deep' do
- Tempfile.open('one-level') do |tempfile|
- in_params = { 'user' => { 'avatar' => { '.name' => original_filename } } }
- env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
+ context 'with remote file' do
+ let(:remote_id) { 'someid' }
- expect(app).to receive(:call) do |env|
- file = Rack::Request.new(env).params['user']['avatar']
- expect(file).to be_a(::UploadedFile)
- expect(file.path).to eq(tempfile.path)
- expect(file.original_filename).to eq(original_filename)
- end
-
- middleware.call(env)
- end
+ it_behaves_like 'multipart upload files'
end
- it 'opens files two levels deep' do
- Tempfile.open('two-levels') do |tempfile|
- in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename } } } }
- env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
+ context 'with local file' do
+ let(:remote_id) { nil }
- expect(app).to receive(:call) do |env|
- file = Rack::Request.new(env).params['project']['milestone']['themesong']
- expect(file).to be_a(::UploadedFile)
- expect(file.path).to eq(tempfile.path)
- expect(file.original_filename).to eq(original_filename)
- end
-
- middleware.call(env)
- end
+ it_behaves_like 'multipart upload files'
end
def post_env(rewritten_fields, params, secret, issuer)
diff --git a/spec/lib/gitlab/middleware/read_only_spec.rb b/spec/lib/gitlab/middleware/read_only_spec.rb
index 39ec2f37a83..5c398bc2063 100644
--- a/spec/lib/gitlab/middleware/read_only_spec.rb
+++ b/spec/lib/gitlab/middleware/read_only_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe Gitlab::Middleware::ReadOnly do
include Rack::Test::Methods
+ using RSpec::Parameterized::TableSyntax
RSpec::Matchers.define :be_a_redirect do
match do |response|
@@ -117,39 +118,41 @@ describe Gitlab::Middleware::ReadOnly do
context 'whitelisted requests' do
it 'expects a POST internal request to be allowed' do
expect(Rails.application.routes).not_to receive(:recognize_path)
-
response = request.post("/api/#{API::API.version}/internal")
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
- it 'expects a POST LFS request to batch URL to be allowed' do
- expect(Rails.application.routes).to receive(:recognize_path).and_call_original
- response = request.post('/root/rouge.git/info/lfs/objects/batch')
+ it 'expects requests to sidekiq admin to be allowed' do
+ response = request.post('/admin/sidekiq')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
- end
- it 'expects a POST request to git-upload-pack URL to be allowed' do
- expect(Rails.application.routes).to receive(:recognize_path).and_call_original
- response = request.post('/root/rouge.git/git-upload-pack')
+ response = request.get('/admin/sidekiq')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
- it 'expects requests to sidekiq admin to be allowed' do
- response = request.post('/admin/sidekiq')
-
- expect(response).not_to be_a_redirect
- expect(subject).not_to disallow_request
+ where(:description, :path) do
+ 'LFS request to batch' | '/root/rouge.git/info/lfs/objects/batch'
+ 'LFS request to locks verify' | '/root/rouge.git/info/lfs/locks/verify'
+ 'LFS request to locks create' | '/root/rouge.git/info/lfs/locks'
+ 'LFS request to locks unlock' | '/root/rouge.git/info/lfs/locks/1/unlock'
+ 'request to git-upload-pack' | '/root/rouge.git/git-upload-pack'
+ 'request to git-receive-pack' | '/root/rouge.git/git-receive-pack'
+ end
- response = request.get('/admin/sidekiq')
+ with_them do
+ it "expects a POST #{description} URL to be allowed" do
+ expect(Rails.application.routes).to receive(:recognize_path).and_call_original
+ response = request.post(path)
- expect(response).not_to be_a_redirect
- expect(subject).not_to disallow_request
+ expect(response).not_to be_a_redirect
+ expect(subject).not_to disallow_request
+ end
end
end
end
diff --git a/spec/migrations/enqueue_delete_diff_files_workers_spec.rb b/spec/migrations/enqueue_delete_diff_files_workers_spec.rb
deleted file mode 100644
index 686027822b8..00000000000
--- a/spec/migrations/enqueue_delete_diff_files_workers_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20180619121030_enqueue_delete_diff_files_workers.rb')
-
-describe EnqueueDeleteDiffFilesWorkers, :migration, :sidekiq do
- let(:merge_request_diffs) { table(:merge_request_diffs) }
- let(:merge_requests) { table(:merge_requests) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- namespaces.create!(id: 1, name: 'gitlab', path: 'gitlab')
- projects.create!(id: 1, namespace_id: 1, name: 'gitlab', path: 'gitlab')
-
- merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master', state: 'merged')
-
- merge_request_diffs.create!(id: 1, merge_request_id: 1, state: 'collected')
- merge_request_diffs.create!(id: 2, merge_request_id: 1, state: 'without_files')
- merge_request_diffs.create!(id: 3, merge_request_id: 1, state: 'collected')
- merge_request_diffs.create!(id: 4, merge_request_id: 1, state: 'collected')
- merge_request_diffs.create!(id: 5, merge_request_id: 1, state: 'empty')
- merge_request_diffs.create!(id: 6, merge_request_id: 1, state: 'collected')
-
- merge_requests.update(1, latest_merge_request_diff_id: 6)
- end
-
- it 'correctly schedules diff file deletion workers' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- # 1st batch
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(8.minutes, 1)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(9.minutes, 3)
- # 2nd batch
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(16.minutes, 4)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(17.minutes, 6)
- expect(BackgroundMigrationWorker.jobs.size).to eq(4)
- end
- end
- end
-
- it 'migrates the data' do
- expect { migrate! }.to change { merge_request_diffs.where(state: 'without_files').count }
- .from(1).to(4)
- end
-end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index f6433234573..953af2c4710 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -105,7 +105,7 @@ describe Ci::Runner do
end
end
- describe '.shared' do
+ describe '.instance_type' do
let(:group) { create(:group) }
let(:project) { create(:project) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
@@ -113,7 +113,7 @@ describe Ci::Runner do
let!(:shared_runner) { create(:ci_runner, :instance) }
it 'returns only shared runners' do
- expect(described_class.shared).to contain_exactly(shared_runner)
+ expect(described_class.instance_type).to contain_exactly(shared_runner)
end
end
@@ -155,7 +155,7 @@ describe Ci::Runner do
end
end
- describe '.owned_or_shared' do
+ describe '.owned_or_instance_wide' do
it 'returns a globally shared, a project specific and a group specific runner' do
# group specific
group = create(:group)
@@ -168,7 +168,7 @@ describe Ci::Runner do
# globally shared
shared_runner = create(:ci_runner, :instance)
- expect(described_class.owned_or_shared(project.id)).to contain_exactly(
+ expect(described_class.owned_or_instance_wide(project.id)).to contain_exactly(
group_runner, project_runner, shared_runner
)
end
@@ -202,7 +202,6 @@ describe Ci::Runner do
it 'transitions shared runner to project runner and assigns project' do
expect(subject).to be_truthy
- expect(runner).to be_specific
expect(runner).to be_project_type
expect(runner.projects).to eq([project])
expect(runner.only_for?(project)).to be_truthy
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 19bc88b1333..744a6ccae8b 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -9,6 +9,24 @@ describe WebHookLog do
it { is_expected.to validate_presence_of(:web_hook) }
+ describe '.recent' do
+ let(:hook) { create(:project_hook) }
+
+ it 'does not return web hook logs that are too old' do
+ create(:web_hook_log, web_hook: hook, created_at: 91.days.ago)
+
+ expect(described_class.recent.size).to be_zero
+ end
+
+ it 'returns the web hook logs in descending order' do
+ hook1 = create(:web_hook_log, web_hook: hook, created_at: 2.hours.ago)
+ hook2 = create(:web_hook_log, web_hook: hook, created_at: 1.hour.ago)
+ hooks = described_class.recent.to_a
+
+ expect(hooks).to eq([hook2, hook1])
+ end
+ end
+
describe '#success?' do
let(:web_hook_log) { build(:web_hook_log, response_status: status) }
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 48c01fc4d4e..ccc3ff861c5 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -153,6 +153,13 @@ describe MergeRequestDiff do
expect(mr_diff.empty?).to be_truthy
end
+ it 'expands collapsed diffs before saving' do
+ mr_diff = create(:merge_request, source_branch: 'expand-collapse-lines', target_branch: 'master').merge_request_diff
+ diff_file = mr_diff.merge_request_diff_files.find_by(new_path: 'expand-collapse/file-5.txt')
+
+ expect(diff_file.diff).not_to be_empty
+ end
+
it 'saves binary diffs correctly' do
path = 'files/images/icn-time-tracking.pdf'
mr_diff = create(:merge_request, source_branch: 'add-pdf-text-binary', target_branch: 'master').merge_request_diff
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index ec72fefd137..8c6b411ec9a 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2190,6 +2190,22 @@ describe MergeRequest do
end
end
end
+
+ context 'source branch is missing' do
+ subject { create(:merge_request, :invalid, :opened, merge_status: :unchecked, target_branch: 'master') }
+
+ before do
+ allow(subject.project.repository).to receive(:can_be_merged?).and_call_original
+ end
+
+ it 'does not raise error' do
+ expect(notification_service).not_to receive(:merge_request_unmergeable)
+ expect(todo_service).not_to receive(:merge_request_became_unmergeable)
+
+ expect { subject.mark_as_unmergeable }.not_to raise_error
+ expect(subject.cannot_be_merged?).to eq(true)
+ end
+ end
end
describe 'check_state?' do
diff --git a/spec/models/project_services/bamboo_service_spec.rb b/spec/models/project_services/bamboo_service_spec.rb
index 85baaccf035..f4f7afb1b92 100644
--- a/spec/models/project_services/bamboo_service_spec.rb
+++ b/spec/models/project_services/bamboo_service_spec.rb
@@ -120,6 +120,14 @@ describe BambooService, :use_clean_rails_memory_store_caching do
end
end
+ describe '#execute' do
+ it 'runs update and build action' do
+ stub_update_and_build_request
+
+ subject.execute(Gitlab::DataBuilder::Push::SAMPLE_DATA)
+ end
+ end
+
describe '#build_page' do
it 'returns the contents of the reactive cache' do
stub_reactive_cache(service, { build_page: 'foo' }, 'sha', 'ref')
@@ -216,10 +224,20 @@ describe BambooService, :use_clean_rails_memory_store_caching do
end
end
+ def stub_update_and_build_request(status: 200, body: nil)
+ bamboo_full_url = 'http://gitlab.com/bamboo/updateAndBuild.action?buildKey=foo&os_authType=basic'
+
+ stub_bamboo_request(bamboo_full_url, status, body)
+ end
+
def stub_request(status: 200, body: nil)
- bamboo_full_url = 'http://gitlab.com/bamboo/rest/api/latest/result?label=123&os_authType=basic'
+ bamboo_full_url = 'http://gitlab.com/bamboo/rest/api/latest/result/byChangeset/123?os_authType=basic'
+
+ stub_bamboo_request(bamboo_full_url, status, body)
+ end
- WebMock.stub_request(:get, bamboo_full_url).to_return(
+ def stub_bamboo_request(url, status, body)
+ WebMock.stub_request(:get, url).to_return(
status: status,
headers: { 'Content-Type' => 'application/json' },
body: body
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index cfa78c4472c..d060ab923d1 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1861,155 +1861,61 @@ describe Repository do
describe '#add_tag' do
let(:user) { build_stubbed(:user) }
- shared_examples 'adding tag' do
- context 'with a valid target' do
- it 'creates the tag' do
- repository.add_tag(user, '8.5', 'master', 'foo')
-
- tag = repository.find_tag('8.5')
- expect(tag).to be_present
- expect(tag.message).to eq('foo')
- expect(tag.dereferenced_target.id).to eq(repository.commit('master').id)
- end
-
- it 'returns a Gitlab::Git::Tag object' do
- tag = repository.add_tag(user, '8.5', 'master', 'foo')
-
- expect(tag).to be_a(Gitlab::Git::Tag)
- end
- end
+ context 'with a valid target' do
+ it 'creates the tag' do
+ repository.add_tag(user, '8.5', 'master', 'foo')
- context 'with an invalid target' do
- it 'returns false' do
- expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
- end
+ tag = repository.find_tag('8.5')
+ expect(tag).to be_present
+ expect(tag.message).to eq('foo')
+ expect(tag.dereferenced_target.id).to eq(repository.commit('master').id)
end
- end
-
- context 'when Gitaly operation_user_add_tag feature is enabled' do
- it_behaves_like 'adding tag'
- end
-
- context 'when Gitaly operation_user_add_tag feature is disabled', :disable_gitaly do
- it_behaves_like 'adding tag'
-
- it 'passes commit SHA to pre-receive and update hooks and tag SHA to post-receive hook' do
- pre_receive_hook = Gitlab::Git::Hook.new('pre-receive', project)
- update_hook = Gitlab::Git::Hook.new('update', project)
- post_receive_hook = Gitlab::Git::Hook.new('post-receive', project)
-
- allow(Gitlab::Git::Hook).to receive(:new)
- .and_return(pre_receive_hook, update_hook, post_receive_hook)
-
- allow(pre_receive_hook).to receive(:trigger).and_call_original
- allow(update_hook).to receive(:trigger).and_call_original
- allow(post_receive_hook).to receive(:trigger).and_call_original
+ it 'returns a Gitlab::Git::Tag object' do
tag = repository.add_tag(user, '8.5', 'master', 'foo')
- commit_sha = repository.commit('master').id
- tag_sha = tag.target
+ expect(tag).to be_a(Gitlab::Git::Tag)
+ end
+ end
- expect(pre_receive_hook).to have_received(:trigger)
- .with(anything, anything, anything, commit_sha, anything)
- expect(update_hook).to have_received(:trigger)
- .with(anything, anything, anything, commit_sha, anything)
- expect(post_receive_hook).to have_received(:trigger)
- .with(anything, anything, anything, tag_sha, anything)
+ context 'with an invalid target' do
+ it 'returns false' do
+ expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
end
end
end
describe '#rm_branch' do
- shared_examples "user deleting a branch" do
- it 'removes a branch' do
- expect(repository).to receive(:before_remove_branch)
- expect(repository).to receive(:after_remove_branch)
+ it 'removes a branch' do
+ expect(repository).to receive(:before_remove_branch)
+ expect(repository).to receive(:after_remove_branch)
- repository.rm_branch(user, 'feature')
- end
+ repository.rm_branch(user, 'feature')
end
- context 'with gitaly enabled' do
- it_behaves_like "user deleting a branch"
-
- context 'when pre hooks failed' do
- before do
- allow_any_instance_of(Gitlab::GitalyClient::OperationService)
- .to receive(:user_delete_branch).and_raise(Gitlab::Git::PreReceiveError)
- end
-
- it 'gets an error and does not delete the branch' do
- expect do
- repository.rm_branch(user, 'feature')
- end.to raise_error(Gitlab::Git::PreReceiveError)
-
- expect(repository.find_branch('feature')).not_to be_nil
- end
- end
- end
-
- context 'with gitaly disabled', :disable_gitaly do
- it_behaves_like "user deleting a branch"
-
- let(:old_rev) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } # git rev-parse feature
- let(:blank_sha) { '0000000000000000000000000000000000000000' }
-
- context 'when pre hooks were successful' do
- it 'runs without errors' do
- expect_any_instance_of(Gitlab::Git::HooksService).to receive(:execute)
- .with(git_user, repository.raw_repository, old_rev, blank_sha, 'refs/heads/feature')
-
- expect { repository.rm_branch(user, 'feature') }.not_to raise_error
- end
-
- it 'deletes the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
-
- expect { repository.rm_branch(user, 'feature') }.not_to raise_error
-
- expect(repository.find_branch('feature')).to be_nil
- end
+ context 'when pre hooks failed' do
+ before do
+ allow_any_instance_of(Gitlab::GitalyClient::OperationService)
+ .to receive(:user_delete_branch).and_raise(Gitlab::Git::PreReceiveError)
end
- context 'when pre hooks failed' do
- it 'gets an error' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
-
- expect do
- repository.rm_branch(user, 'feature')
- end.to raise_error(Gitlab::Git::PreReceiveError)
- end
-
- it 'does not delete the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
+ it 'gets an error and does not delete the branch' do
+ expect do
+ repository.rm_branch(user, 'feature')
+ end.to raise_error(Gitlab::Git::PreReceiveError)
- expect do
- repository.rm_branch(user, 'feature')
- end.to raise_error(Gitlab::Git::PreReceiveError)
- expect(repository.find_branch('feature')).not_to be_nil
- end
+ expect(repository.find_branch('feature')).not_to be_nil
end
end
end
describe '#rm_tag' do
- shared_examples 'removing tag' do
- it 'removes a tag' do
- expect(repository).to receive(:before_remove_tag)
+ it 'removes a tag' do
+ expect(repository).to receive(:before_remove_tag)
- repository.rm_tag(build_stubbed(:user), 'v1.1.0')
-
- expect(repository.find_tag('v1.1.0')).to be_nil
- end
- end
-
- context 'when Gitaly operation_user_delete_tag feature is enabled' do
- it_behaves_like 'removing tag'
- end
+ repository.rm_tag(build_stubbed(:user), 'v1.1.0')
- context 'when Gitaly operation_user_delete_tag feature is disabled', :skip_gitaly_mock do
- it_behaves_like 'removing tag'
+ expect(repository.find_tag('v1.1.0')).to be_nil
end
end
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index bd498269798..f29abcf536e 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -7,6 +7,7 @@ describe Todo do
it { is_expected.to belong_to(:author).class_name("User") }
it { is_expected.to belong_to(:note) }
it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:target).touch(true) }
it { is_expected.to belong_to(:user) }
end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index ad57c43bc87..deb6abbc026 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -67,4 +67,28 @@ describe 'getting merge request information nested in a project' do
expect(merge_request_graphql_data).to be_nil
end
end
+
+ context 'when there are pipelines' do
+ before do
+ pipeline = create(
+ :ci_pipeline,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha
+ )
+ merge_request.update!(head_pipeline: pipeline)
+ end
+
+ it 'has a head pipeline' do
+ post_graphql(query, current_user: current_user)
+
+ expect(merge_request_graphql_data['headPipeline']).to be_present
+ end
+
+ it 'has pipeline connections' do
+ post_graphql(query, current_user: current_user)
+
+ expect(merge_request_graphql_data['pipelines']['edges'].size).to eq(1)
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index a2b3dc5d121..0727ada4691 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -26,6 +26,18 @@ describe 'getting project information' do
post_graphql(query, current_user: current_user)
end
end
+
+ context 'when there are pipelines present' do
+ before do
+ create(:ci_pipeline, project: project)
+ end
+
+ it 'is included in the pipelines connection' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data['project']['pipelines']['edges'].size).to eq(1)
+ end
+ end
end
context 'when the user does not have access to the project' do
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index 0c7937feed6..b5e4b6011ea 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -90,6 +90,17 @@ describe API::Runners do
end
it 'filters runners by scope' do
+ get api('/runners/all?scope=shared', admin)
+
+ shared = json_response.all? { |r| r['is_shared'] }
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response[0]).to have_key('ip_address')
+ expect(shared).to be_truthy
+ end
+
+ it 'filters runners by scope' do
get api('/runners/all?scope=specific', admin)
shared = json_response.any? { |r| r['is_shared'] }
@@ -136,7 +147,7 @@ describe API::Runners do
delete api("/runners/#{unused_project_runner.id}", admin)
expect(response).to have_gitlab_http_status(204)
- end.to change { Ci::Runner.specific.count }.by(-1)
+ end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
@@ -300,7 +311,7 @@ describe API::Runners do
delete api("/runners/#{shared_runner.id}", admin)
expect(response).to have_gitlab_http_status(204)
- end.to change { Ci::Runner.shared.count }.by(-1)
+ end.to change { Ci::Runner.instance_type.count }.by(-1)
end
it_behaves_like '412 response' do
@@ -314,7 +325,7 @@ describe API::Runners do
delete api("/runners/#{project_runner.id}", admin)
expect(response).to have_http_status(204)
- end.to change { Ci::Runner.specific.count }.by(-1)
+ end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
@@ -349,7 +360,7 @@ describe API::Runners do
delete api("/runners/#{project_runner.id}", user)
expect(response).to have_http_status(204)
- end.to change { Ci::Runner.specific.count }.by(-1)
+ end.to change { Ci::Runner.project_type.count }.by(-1)
end
it_behaves_like '412 response' do
@@ -584,12 +595,12 @@ describe API::Runners do
end
end
- it 'enables a shared runner' do
+ it 'enables a instance type runner' do
expect do
post api("/projects/#{project.id}/runners", admin), runner_id: shared_runner.id
end.to change { project.runners.count }.by(1)
- expect(shared_runner.reload).not_to be_shared
+ expect(shared_runner.reload).not_to be_instance_type
expect(response).to have_gitlab_http_status(201)
end
end
diff --git a/spec/services/files/update_service_spec.rb b/spec/services/files/update_service_spec.rb
index 16bfbdf3089..eaee89fb1a5 100644
--- a/spec/services/files/update_service_spec.rb
+++ b/spec/services/files/update_service_spec.rb
@@ -71,17 +71,5 @@ describe Files::UpdateService do
expect(results.data).to eq(new_contents)
end
end
-
- context 'with gitaly disabled', :skip_gitaly_mock do
- context 'when target branch is different than source branch' do
- let(:branch_name) { "#{project.default_branch}-new" }
-
- it 'fires hooks only once' do
- expect(Gitlab::Git::HooksService).to receive(:new).once.and_call_original
-
- subject.execute
- end
- end
- end
end
end
diff --git a/spec/services/labels/find_or_create_service_spec.rb b/spec/services/labels/find_or_create_service_spec.rb
index 68d5660445a..97ba2742392 100644
--- a/spec/services/labels/find_or_create_service_spec.rb
+++ b/spec/services/labels/find_or_create_service_spec.rb
@@ -44,6 +44,26 @@ describe Labels::FindOrCreateService do
expect(service.execute).to eq project_label
end
end
+
+ context 'when include_ancestor_groups is true' do
+ let(:group) { create(:group, :nested) }
+ let(:params) do
+ {
+ title: 'Audit',
+ include_ancestor_groups: true
+ }
+ end
+
+ it 'returns the ancestor group labels' do
+ group_label = create(:group_label, group: group.parent, title: 'Audit')
+
+ expect(service.execute).to eq group_label
+ end
+
+ it 'creates new labels if labels are not found' do
+ expect { service.execute }.to change(project.labels, :count).by(1)
+ end
+ end
end
context 'when finding labels on group level' do
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
index d57852615d9..97da8a88660 100644
--- a/spec/services/merge_requests/conflicts/list_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -84,23 +84,5 @@ describe MergeRequests::Conflicts::ListService do
expect(service.can_be_resolved_in_ui?).to be_falsey
end
-
- context 'with gitaly disabled', :skip_gitaly_mock do
- it 'returns a falsey value when the MR has a missing ref after a force push' do
- merge_request = create_merge_request('conflict-resolvable')
- service = conflicts_service(merge_request)
- allow_any_instance_of(Rugged::Repository).to receive(:merge_commits).and_raise(Rugged::OdbError)
-
- expect(service.can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a falsey value when the MR has a missing revision after a force push' do
- merge_request = create_merge_request('conflict-resolvable')
- service = conflicts_service(merge_request)
- allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::BLANK_SHA)
-
- expect(service.can_be_resolved_in_ui?).to be_falsey
- end
- end
end
end
diff --git a/spec/services/merge_requests/conflicts/resolve_service_spec.rb b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
index cff09237005..7edf8a96c94 100644
--- a/spec/services/merge_requests/conflicts/resolve_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
@@ -123,17 +123,6 @@ describe MergeRequests::Conflicts::ResolveService do
expect(merge_request_from_fork.source_branch_head.parents.map(&:id))
.to eq(['404fa3fc7c2c9b5dacff102f353bdf55b1be2813', target_head])
end
-
- context 'when gitaly is disabled', :skip_gitaly_mock do
- it 'gets conflicts from the source project' do
- # REFACTOR NOTE: We used to test that `project.repository.rugged` wasn't
- # used in this case, but since the refactor, for simplification,
- # we always use that repository for read only operations.
- expect(forked_project.repository.rugged).to receive(:merge_commits).and_call_original
-
- subject
- end
- end
end
end
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 757c31ab692..4daa25f8cf2 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -36,9 +36,9 @@ describe MergeRequests::RebaseService do
end
end
- context 'when unexpected error occurs', :disable_gitaly do
+ context 'when unexpected error occurs' do
before do
- allow(repository).to receive(:run_git!).and_raise('Something went wrong')
+ allow(repository).to receive(:gitaly_operation_client).and_raise('Something went wrong')
end
it 'saves a generic error message' do
@@ -53,9 +53,9 @@ describe MergeRequests::RebaseService do
end
end
- context 'with git command failure', :disable_gitaly do
+ context 'with git command failure' do
before do
- allow(repository).to receive(:run_git!).and_raise(Gitlab::Git::Repository::GitError, 'Something went wrong')
+ allow(repository).to receive(:gitaly_operation_client).and_raise(Gitlab::Git::Repository::GitError, 'Something went wrong')
end
it 'saves a generic error message' do
@@ -71,7 +71,7 @@ describe MergeRequests::RebaseService do
end
context 'valid params' do
- shared_examples 'successful rebase' do
+ describe 'successful rebase' do
before do
service.execute(merge_request)
end
@@ -97,26 +97,8 @@ describe MergeRequests::RebaseService do
end
end
- context 'when Gitaly rebase feature is enabled' do
- it_behaves_like 'successful rebase'
- end
-
- context 'when Gitaly rebase feature is disabled', :disable_gitaly do
- it_behaves_like 'successful rebase'
- end
-
- context 'git commands', :disable_gitaly do
- it 'sets GL_REPOSITORY env variable when calling git commands' do
- expect(repository).to receive(:popen).exactly(3)
- .with(anything, anything, hash_including('GL_REPOSITORY'), anything)
- .and_return(['', 0])
-
- service.execute(merge_request)
- end
- end
-
context 'fork' do
- shared_examples 'successful fork rebase' do
+ describe 'successful fork rebase' do
let(:forked_project) do
fork_project(project, user, repository: true)
end
@@ -140,14 +122,6 @@ describe MergeRequests::RebaseService do
expect(parent_sha).to eq(target_branch_sha)
end
end
-
- context 'when Gitaly rebase feature is enabled' do
- it_behaves_like 'successful fork rebase'
- end
-
- context 'when Gitaly rebase feature is disabled', :disable_gitaly do
- it_behaves_like 'successful fork rebase'
- end
end
end
end
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index ded17fa92a4..8ab09412f55 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -124,51 +124,6 @@ describe MergeRequests::SquashService do
message: a_string_including('squash'))
end
end
-
- context 'with Gitaly disabled', :skip_gitaly_mock do
- stages = {
- 'add worktree for squash' => 'worktree',
- 'configure sparse checkout' => 'config',
- 'get files in diff' => 'diff --name-only',
- 'check out target branch' => 'checkout',
- 'apply patch' => 'diff --binary',
- 'commit squashed changes' => 'commit',
- 'get SHA of squashed commit' => 'rev-parse'
- }
-
- stages.each do |stage, command|
- context "when the #{stage} stage fails" do
- before do
- git_command = a_collection_containing_exactly(
- a_string_starting_with("#{Gitlab.config.git.bin_path} #{command}")
- ).or(
- a_collection_starting_with([Gitlab.config.git.bin_path] + command.split)
- )
-
- allow(repository).to receive(:popen).and_return(['', 0])
- allow(repository).to receive(:popen).with(git_command, anything, anything, anything).and_return([error, 1])
- end
-
- it 'logs the stage and output' do
- expect(service).to receive(:log_error).with(log_error)
- expect(service).to receive(:log_error).with(error)
-
- service.execute(merge_request)
- end
-
- it 'returns an error' do
- expect(service.execute(merge_request)).to match(status: :error,
- message: a_string_including('squash'))
- end
-
- it 'cleans up the temporary directory' do
- expect(File.exist?(squash_dir_path)).to be(false)
-
- service.execute(merge_request)
- end
- end
- end
- end
end
context 'when any other exception is thrown' do
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 0930b9da368..b9322975b5a 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -57,12 +57,12 @@ module GraphqlHelpers
type.fields.map do |name, field|
# We can't guess arguments, so skip fields that require them
- next if field.arguments.any?
+ next if required_arguments?(field)
- if scalar?(field)
- name
- else
+ if nested_fields?(field)
"#{name} { #{all_graphql_fields_for(field_type(field))} }"
+ else
+ name
end
end.compact.join("\n")
end
@@ -85,10 +85,22 @@ module GraphqlHelpers
json_response['data']
end
+ def nested_fields?(field)
+ !scalar?(field) && !enum?(field)
+ end
+
def scalar?(field)
field_type(field).kind.scalar?
end
+ def enum?(field)
+ field_type(field).kind.enum?
+ end
+
+ def required_arguments?(field)
+ field.arguments.values.any? { |argument| argument.type.non_null? }
+ end
+
def field_type(field)
if field.type.respond_to?(:of_type)
field.type.of_type
diff --git a/spec/support/shared_examples/controllers/todos_shared_examples.rb b/spec/support/shared_examples/controllers/todos_shared_examples.rb
new file mode 100644
index 00000000000..bafd9bac8d0
--- /dev/null
+++ b/spec/support/shared_examples/controllers/todos_shared_examples.rb
@@ -0,0 +1,43 @@
+shared_examples 'todos actions' do
+ context 'when authorized' do
+ before do
+ sign_in(user)
+ parent.add_developer(user)
+ end
+
+ it 'creates todo' do
+ expect do
+ post_create
+ end.to change { user.todos.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'returns todo path and pending count' do
+ post_create
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['count']).to eq 1
+ expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}})
+ end
+ end
+
+ context 'when not authorized for project/group' do
+ it 'does not create todo for resource that user has no access to' do
+ sign_in(user)
+ expect do
+ post_create
+ end.to change { user.todos.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+
+ it 'does not create todo when user is not logged in' do
+ expect do
+ post_create
+ end.to change { user.todos.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(parent.is_a?(Group) ? 401 : 302)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index bbbad86dcd5..7088fb1e5fb 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -260,4 +260,83 @@ shared_examples 'handle uploads' do
end
end
end
+
+ describe "POST #authorize" do
+ context 'when a user is not authorized to upload a file' do
+ it 'returns 404 status' do
+ post_authorize
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'when a user can upload a file' do
+ before do
+ sign_in(user)
+ model.add_developer(user)
+ end
+
+ context 'and the request bypassed workhorse' do
+ it 'raises an exception' do
+ expect { post_authorize(verified: false) }.to raise_error JWT::DecodeError
+ end
+ end
+
+ context 'and request is sent by gitlab-workhorse to authorize the request' do
+ shared_examples 'a valid response' do
+ before do
+ post_authorize
+ end
+
+ it 'responds with status 200' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'uses the gitlab-workhorse content type' do
+ expect(response.headers["Content-Type"]).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ end
+ end
+
+ shared_examples 'a local file' do
+ it_behaves_like 'a valid response' do
+ it 'responds with status 200, location of uploads store and object details' do
+ expect(json_response['TempPath']).to eq(uploader_class.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ end
+ end
+ end
+
+ context 'when using local storage' do
+ it_behaves_like 'a local file'
+ end
+
+ context 'when using remote storage' do
+ context 'when direct upload is enabled' do
+ before do
+ stub_uploads_object_storage(uploader_class, direct_upload: true)
+ end
+
+ it_behaves_like 'a valid response' do
+ it 'responds with status 200, location of uploads remote store and object details' do
+ expect(json_response['TempPath']).to eq(uploader_class.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to have_key('ID')
+ expect(json_response['RemoteObject']).to have_key('GetURL')
+ expect(json_response['RemoteObject']).to have_key('StoreURL')
+ expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).to have_key('MultipartUpload')
+ end
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ stub_uploads_object_storage(uploader_class, direct_upload: false)
+ end
+
+ it_behaves_like 'a local file'
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/requests/api/merge_requests_list.rb b/spec/support/shared_examples/requests/api/merge_requests_list.rb
index a401f7541f0..1aed8ab0113 100644
--- a/spec/support/shared_examples/requests/api/merge_requests_list.rb
+++ b/spec/support/shared_examples/requests/api/merge_requests_list.rb
@@ -136,8 +136,9 @@ shared_examples 'merge requests list' do
it 'returns an array of merge requests in given milestone' do
get api(endpoint_path, user), milestone: '0.9'
- expect(json_response.first['title']).to eq merge_request_closed.title
- expect(json_response.first['id']).to eq merge_request_closed.id
+ closed_issues = json_response.select { |mr| mr['id'] == merge_request_closed.id }
+ expect(closed_issues.length).to eq(1)
+ expect(closed_issues.first['title']).to eq merge_request_closed.title
end
it 'returns an array of merge requests matching state in milestone' do
diff --git a/spec/tasks/gitlab/git_rake_spec.rb b/spec/tasks/gitlab/git_rake_spec.rb
index 1efaecc63a5..d0263ad9a37 100644
--- a/spec/tasks/gitlab/git_rake_spec.rb
+++ b/spec/tasks/gitlab/git_rake_spec.rb
@@ -1,15 +1,20 @@
require 'rake_helper'
describe 'gitlab:git rake tasks' do
+ let(:base_path) { 'tmp/tests/default_storage' }
+
before(:all) do
@default_storage_hash = Gitlab.config.repositories.storages.default.to_h
end
before do
Rake.application.rake_require 'tasks/gitlab/git'
- storages = { 'default' => Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/default_storage')) }
+ storages = { 'default' => Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => base_path)) }
+
+ path = Settings.absolute("#{base_path}/@hashed/1/2/test.git")
+ FileUtils.mkdir_p(path)
+ Gitlab::Popen.popen(%W[git -C #{path} init --bare])
- FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/@hashed/1/2/test.git'))
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
allow_any_instance_of(String).to receive(:color) { |string, _color| string }
@@ -17,7 +22,7 @@ describe 'gitlab:git rake tasks' do
end
after do
- FileUtils.rm_rf(Settings.absolute('tmp/tests/default_storage'))
+ FileUtils.rm_rf(Settings.absolute(base_path))
end
describe 'fsck' do
@@ -26,14 +31,14 @@ describe 'gitlab:git rake tasks' do
end
it 'errors out about config.lock issues' do
- FileUtils.touch(Settings.absolute('tmp/tests/default_storage/@hashed/1/2/test.git/config.lock'))
+ FileUtils.touch(Settings.absolute("#{base_path}/@hashed/1/2/test.git/config.lock"))
expect { run_rake_task('gitlab:git:fsck') }.to output(/file exists\? ... yes/).to_stdout
end
it 'errors out about ref lock issues' do
- FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/@hashed/1/2/test.git/refs/heads'))
- FileUtils.touch(Settings.absolute('tmp/tests/default_storage/@hashed/1/2/test.git/refs/heads/blah.lock'))
+ FileUtils.mkdir_p(Settings.absolute("#{base_path}/@hashed/1/2/test.git/refs/heads"))
+ FileUtils.touch(Settings.absolute("#{base_path}/@hashed/1/2/test.git/refs/heads/blah.lock"))
expect { run_rake_task('gitlab:git:fsck') }.to output(/Ref lock files exist:/).to_stdout
end
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index 59013a02938..7ba28b4fc1f 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -80,6 +80,50 @@ describe FileUploader do
end
end
+ describe 'copy_to' do
+ shared_examples 'returns a valid uploader' do
+ describe 'returned uploader' do
+ let(:new_project) { create(:project) }
+ let(:moved) { described_class.copy_to(subject, new_project) }
+
+ it 'generates a new secret' do
+ expect(subject).to be
+ expect(described_class).to receive(:generate_secret).once.and_call_original
+ expect(moved).to be
+ end
+
+ it 'create new upload' do
+ expect(moved.upload).not_to eq(subject.upload)
+ end
+
+ it 'copies the file' do
+ expect(subject.file).to exist
+ expect(moved.file).to exist
+ expect(subject.file).not_to eq(moved.file)
+ expect(subject.object_store).to eq(moved.object_store)
+ end
+ end
+ end
+
+ context 'files are stored locally' do
+ before do
+ subject.store!(fixture_file_upload('spec/fixtures/dk.png'))
+ end
+
+ include_examples 'returns a valid uploader'
+ end
+
+ context 'files are stored remotely' do
+ before do
+ stub_uploads_object_storage
+ subject.store!(fixture_file_upload('spec/fixtures/dk.png'))
+ subject.migrate!(ObjectStorage::Store::REMOTE)
+ end
+
+ include_examples 'returns a valid uploader'
+ end
+ end
+
describe '#secret' do
it 'generates a secret if none is provided' do
expect(described_class).to receive(:generate_secret).and_return('secret')
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index 8c4daac5f80..b9b5445562f 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -9,50 +9,44 @@ describe ProjectCacheWorker do
let(:lease_key) { "project_cache_worker:#{project.id}:update_statistics" }
let(:lease_timeout) { ProjectCacheWorker::LEASE_TIMEOUT }
- before do
- stub_exclusive_lease(lease_key, timeout: lease_timeout)
-
- allow(Project).to receive(:find_by)
- .with(id: project.id)
- .and_return(project)
- end
-
describe '#perform' do
- context 'with a non-existing project' do
- it 'does not update statistic' do
- allow(Project).to receive(:find_by).with(id: -1).and_return(nil)
+ before do
+ stub_exclusive_lease(lease_key, timeout: lease_timeout)
+ end
- expect(subject).not_to receive(:update_statistics)
+ context 'with a non-existing project' do
+ it 'does nothing' do
+ expect(worker).not_to receive(:update_statistics)
- subject.perform(-1)
+ worker.perform(-1)
end
end
context 'with an existing project without a repository' do
- it 'does not update statistics' do
- allow(project.repository).to receive(:exists?).and_return(false)
+ it 'does nothing' do
+ allow_any_instance_of(Repository).to receive(:exists?).and_return(false)
- expect(subject).not_to receive(:update_statistics)
+ expect(worker).not_to receive(:update_statistics)
- subject.perform(project.id)
+ worker.perform(project.id)
end
end
context 'with an existing project' do
it 'updates the project statistics' do
- expect(subject).to receive(:update_statistics)
- .with(%w(repository_size))
- .and_call_original
+ expect(worker).to receive(:update_statistics)
+ .with(kind_of(Project), %i(repository_size))
+ .and_call_original
- subject.perform(project.id, [], %w(repository_size))
+ worker.perform(project.id, [], %w(repository_size))
end
it 'refreshes the method caches' do
- expect(project.repository).to receive(:refresh_method_caches)
- .with(%i(readme))
- .and_call_original
+ expect_any_instance_of(Repository).to receive(:refresh_method_caches)
+ .with(%i(readme))
+ .and_call_original
- subject.perform(project.id, %w(readme))
+ worker.perform(project.id, %w(readme))
end
context 'with plain readme' do
@@ -60,22 +54,23 @@ describe ProjectCacheWorker do
allow(MarkupHelper).to receive(:gitlab_markdown?).and_return(false)
allow(MarkupHelper).to receive(:plain?).and_return(true)
- expect(project.repository).to receive(:refresh_method_caches)
- .with(%i(readme))
- .and_call_original
-
- subject.perform(project.id, %w(readme))
+ expect_any_instance_of(Repository).to receive(:refresh_method_caches)
+ .with(%i(readme))
+ .and_call_original
+ worker.perform(project.id, %w(readme))
end
end
end
+ end
+ describe '#update_statistics' do
context 'when a lease could not be obtained' do
it 'does not update the repository size' do
stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
- expect(project.statistics).not_to receive(:refresh!)
+ expect(statistics).not_to receive(:refresh!)
- subject.perform(project.id, [], %w(repository_size))
+ worker.update_statistics(project)
end
end
@@ -83,17 +78,11 @@ describe ProjectCacheWorker do
it 'updates the project statistics' do
stub_exclusive_lease(lease_key, timeout: lease_timeout)
- expect(project.statistics).to receive(:refresh!)
- .with(only: %i(repository_size))
- .and_call_original
-
- subject.perform(project.id, [], %i(repository_size))
- end
-
- it 'cancels the lease after statistics has been updated' do
- expect(subject).to receive(:release_lease).with('uuid')
+ expect(statistics).to receive(:refresh!)
+ .with(only: %i(repository_size))
+ .and_call_original
- subject.perform(project.id, [], %i(repository_size))
+ worker.update_statistics(project, %i(repository_size))
end
end
end
diff --git a/spec/workers/prune_web_hook_logs_worker_spec.rb b/spec/workers/prune_web_hook_logs_worker_spec.rb
new file mode 100644
index 00000000000..d7d64a1f641
--- /dev/null
+++ b/spec/workers/prune_web_hook_logs_worker_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe PruneWebHookLogsWorker do
+ describe '#perform' do
+ before do
+ hook = create(:project_hook)
+
+ 5.times do
+ create(:web_hook_log, web_hook: hook, created_at: 5.months.ago)
+ end
+
+ create(:web_hook_log, web_hook: hook, response_status: '404')
+ end
+
+ it 'removes all web hook logs older than one month' do
+ described_class.new.perform
+
+ expect(WebHookLog.count).to eq(1)
+ expect(WebHookLog.first.response_status).to eq('404')
+ end
+ end
+end