summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorMartin Wortschack <mwortschack@gitlab.com>2018-09-06 16:15:59 +0200
committerMartin Wortschack <mwortschack@gitlab.com>2018-09-06 16:15:59 +0200
commit92bbcf9079270897d419a0b4c196ac17251bf68a (patch)
tree357cdf26cf811aaddc98ccebaedb02d9575ac74b /spec
parentd92b7f7ca738958add587188b4e415e4bf8fb394 (diff)
parentbac9a1dde1e75d057e212ab5d3b9b478d3981e1d (diff)
downloadgitlab-ce-92bbcf9079270897d419a0b4c196ac17251bf68a.tar.gz
merge master and resolve conflicts
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/concerns/issuable_collections_spec.rb28
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb31
-rw-r--r--spec/controllers/groups_controller_spec.rb10
-rw-r--r--spec/controllers/instance_statistics/cohorts_controller_spec.rb14
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb32
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb52
-rw-r--r--spec/controllers/projects_controller_spec.rb13
-rw-r--r--spec/factories/ci/job_artifacts.rb6
-rw-r--r--spec/factories/clusters/platforms/kubernetes.rb4
-rw-r--r--spec/factories/projects.rb27
-rw-r--r--spec/features/admin/admin_settings_spec.rb9
-rw-r--r--spec/features/groups/labels/search_labels_spec.rb48
-rw-r--r--spec/features/groups/settings/group_badges_spec.rb2
-rw-r--r--spec/features/instance_statistics/cohorts_spec.rb8
-rw-r--r--spec/features/instance_statistics/conversational_development_index_spec.rb10
-rw-r--r--spec/features/instance_statistics/instance_statistics.rb23
-rw-r--r--spec/features/issues/user_sees_breadcrumb_links_spec.rb18
-rw-r--r--spec/features/merge_request/user_sees_breadcrumb_links_spec.rb18
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb50
-rw-r--r--spec/features/projects/blobs/edit_spec.rb34
-rw-r--r--spec/features/projects/clusters/user_spec.rb22
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb2
-rw-r--r--spec/features/projects/settings/integration_settings_spec.rb1
-rw-r--r--spec/features/projects/settings/project_badges_spec.rb2
-rw-r--r--spec/features/projects/settings/user_changes_default_branch_spec.rb29
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb12
-rw-r--r--spec/features/projects/tags/user_views_tags_spec.rb69
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb28
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb2
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb26
-rw-r--r--spec/features/projects/wiki/user_views_wiki_page_spec.rb2
-rw-r--r--spec/features/projects_spec.rb56
-rw-r--r--spec/features/snippets/show_spec.rb44
-rw-r--r--spec/finders/template_finder_spec.rb48
-rw-r--r--spec/fixtures/api/schemas/ci_detailed_status.json24
-rw-r--r--spec/fixtures/api/schemas/entities/diff_line.json14
-rw-r--r--spec/fixtures/api/schemas/entities/diff_line_parallel.json11
-rw-r--r--spec/fixtures/api/schemas/http_method.json5
-rw-r--r--spec/fixtures/api/schemas/job/artifact.json11
-rw-r--r--spec/fixtures/api/schemas/job/job.json (renamed from spec/fixtures/api/schemas/job.json)13
-rw-r--r--spec/fixtures/api/schemas/job/job_details.json8
-rw-r--r--spec/fixtures/api/schemas/pipeline_stage.json2
-rw-r--r--spec/helpers/button_helper_spec.rb14
-rw-r--r--spec/helpers/events_helper_spec.rb43
-rw-r--r--spec/helpers/markup_helper_spec.rb47
-rw-r--r--spec/helpers/namespaces_helper_spec.rb5
-rw-r--r--spec/helpers/projects_helper_spec.rb12
-rw-r--r--spec/javascripts/badges/components/badge_form_spec.js150
-rw-r--r--spec/javascripts/fixtures/projects.rb10
-rw-r--r--spec/javascripts/jobs/store/actions_spec.js625
-rw-r--r--spec/javascripts/jobs/store/mutations_spec.js228
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js10
-rw-r--r--spec/javascripts/read_more_spec.js23
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js45
-rw-r--r--spec/lib/banzai/filter/wiki_link_filter_spec.rb40
-rw-r--r--spec/lib/banzai/pipeline/wiki_pipeline_spec.rb14
-rw-r--r--spec/lib/feature_spec.rb8
-rw-r--r--spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb156
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/extendable/entry_spec.rb227
-rw-r--r--spec/lib/gitlab/ci/config/extendable_spec.rb228
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb52
-rw-r--r--spec/lib/gitlab/ci/parsers/junit_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb60
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler_spec.rb4
-rw-r--r--spec/lib/gitlab/file_detector_spec.rb6
-rw-r--r--spec/lib/gitlab/file_markdown_link_builder_spec.rb80
-rw-r--r--spec/lib/gitlab/file_type_detection_spec.rb82
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/kubernetes/cluster_role_binding_spec.rb35
-rw-r--r--spec/lib/gitlab/kubernetes/helm/api_spec.rb109
-rw-r--r--spec/lib/gitlab/kubernetes/helm/base_command_spec.rb20
-rw-r--r--spec/lib/gitlab/kubernetes/helm/init_command_spec.rb130
-rw-r--r--spec/lib/gitlab/kubernetes/helm/install_command_spec.rb155
-rw-r--r--spec/lib/gitlab/kubernetes/helm/pod_spec.rb17
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb247
-rw-r--r--spec/lib/gitlab/kubernetes/service_account_spec.rb24
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb59
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb16
-rw-r--r--spec/lib/gitlab/user_extractor_spec.rb58
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb2
-rw-r--r--spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb73
-rw-r--r--spec/models/clusters/applications/helm_spec.rb14
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb9
-rw-r--r--spec/models/clusters/applications/jupyter_spec.rb9
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb35
-rw-r--r--spec/models/clusters/applications/runner_spec.rb9
-rw-r--r--spec/models/clusters/cluster_spec.rb4
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb24
-rw-r--r--spec/models/concerns/avatarable_spec.rb20
-rw-r--r--spec/models/concerns/case_sensitivity_spec.rb202
-rw-r--r--spec/models/concerns/triggerable_hooks_spec.rb24
-rw-r--r--spec/models/hooks/active_hook_filter_spec.rb68
-rw-r--r--spec/models/hooks/web_hook_spec.rb20
-rw-r--r--spec/models/project_services/chat_notification_service_spec.rb50
-rw-r--r--spec/models/project_spec.rb40
-rw-r--r--spec/models/user_spec.rb17
-rw-r--r--spec/policies/issue_policy_spec.rb42
-rw-r--r--spec/presenters/project_presenter_spec.rb153
-rw-r--r--spec/requests/api/internal_spec.rb76
-rw-r--r--spec/requests/api/merge_requests_spec.rb9
-rw-r--r--spec/requests/api/project_hooks_spec.rb14
-rw-r--r--spec/requests/api/wikis_spec.rb124
-rw-r--r--spec/rubocop/cop/line_break_around_conditional_block_spec.rb16
-rw-r--r--spec/serializers/diff_file_entity_spec.rb17
-rw-r--r--spec/serializers/diff_line_serializer_spec.rb25
-rw-r--r--spec/services/preview_markdown_service_spec.rb7
-rw-r--r--spec/services/wikis/create_attachment_service_spec.rb202
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb19
-rw-r--r--spec/support/helpers/stub_feature_flags.rb4
-rw-r--r--spec/support/helpers/test_env.rb3
-rw-r--r--spec/support/shared_examples/wiki_file_attachments_examples.rb88
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb47
-rw-r--r--spec/uploaders/uploader_helper_spec.rb25
-rw-r--r--spec/validators/branch_filter_validator_spec.rb42
-rw-r--r--spec/views/projects/_home_panel.html.haml_spec.rb6
121 files changed, 5015 insertions, 520 deletions
diff --git a/spec/controllers/concerns/issuable_collections_spec.rb b/spec/controllers/concerns/issuable_collections_spec.rb
index c1f42bbb9d7..d16a3464495 100644
--- a/spec/controllers/concerns/issuable_collections_spec.rb
+++ b/spec/controllers/concerns/issuable_collections_spec.rb
@@ -21,6 +21,34 @@ describe IssuableCollections do
controller
end
+ describe '#set_set_order_from_cookie' do
+ describe 'when sort param given' do
+ let(:cookies) { {} }
+ let(:params) { { sort: 'downvotes_asc' } }
+
+ it 'sets the cookie with the right values and flags' do
+ allow(controller).to receive(:cookies).and_return(cookies)
+
+ controller.send(:set_sort_order_from_cookie)
+
+ expect(cookies['issue_sort']).to eq({ value: 'popularity', secure: false, httponly: false })
+ end
+ end
+
+ describe 'when cookie exists' do
+ let(:cookies) { { 'issue_sort' => 'id_asc' } }
+ let(:params) { {} }
+
+ it 'sets the cookie with the right values and flags' do
+ allow(controller).to receive(:cookies).and_return(cookies)
+
+ controller.send(:set_sort_order_from_cookie)
+
+ expect(cookies['issue_sort']).to eq({ value: 'created_asc', secure: false, httponly: false })
+ end
+ end
+ end
+
describe '#page_count_for_relation' do
let(:params) { { state: 'opened' } }
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
index 58bb91a0c80..767fba7fd58 100644
--- a/spec/controllers/concerns/send_file_upload_spec.rb
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -52,7 +52,7 @@ describe SendFileUpload do
end
context 'with attachment' do
- subject { controller.send_upload(uploader, attachment: 'test.js') }
+ let(:send_attachment) { controller.send_upload(uploader, attachment: 'test.js') }
it 'sends a file with content-type of text/plain' do
expected_params = {
@@ -62,7 +62,29 @@ describe SendFileUpload do
}
expect(controller).to receive(:send_file).with(uploader.path, expected_params)
- subject
+ send_attachment
+ end
+
+ context 'with a proxied file in object storage' do
+ before do
+ stub_uploads_object_storage(uploader: uploader_class)
+ uploader.object_store = ObjectStorage::Store::REMOTE
+ uploader.store!(temp_file)
+ allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true }
+ end
+
+ it 'sends a file with a custom type' do
+ headers = double
+ expected_headers = %r(response-content-disposition=attachment%3Bfilename%3D%22test.js%22&response-content-type=application/javascript)
+ expect(Gitlab::Workhorse).to receive(:send_url).with(expected_headers).and_call_original
+ expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
+
+ expect(controller).not_to receive(:send_file)
+ expect(controller).to receive(:headers) { headers }
+ expect(controller).to receive(:head).with(:ok)
+
+ send_attachment
+ end
end
end
@@ -80,7 +102,12 @@ describe SendFileUpload do
it 'sends a file' do
headers = double
+ expect(Gitlab::Workhorse).not_to receive(:send_url).with(/response-content-disposition/)
+ expect(Gitlab::Workhorse).not_to receive(:send_url).with(/response-content-type/)
+ expect(Gitlab::Workhorse).to receive(:send_url).and_call_original
+
expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
+ expect(controller).not_to receive(:send_file)
expect(controller).to receive(:headers) { headers }
expect(controller).to receive(:head).with(:ok)
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 7a037828035..ae49490f31c 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -57,6 +57,16 @@ describe GroupsController do
end
end
+ describe 'GET edit' do
+ it 'sets the badge API endpoint' do
+ sign_in(owner)
+
+ get :edit, id: group.to_param
+
+ expect(assigns(:badge_api_endpoint)).not_to be_nil
+ end
+ end
+
describe 'GET #new' do
context 'when creating subgroups', :nested_groups do
[true, false].each do |can_create_group_status|
diff --git a/spec/controllers/instance_statistics/cohorts_controller_spec.rb b/spec/controllers/instance_statistics/cohorts_controller_spec.rb
index e4eedede93a..596d3c7abe5 100644
--- a/spec/controllers/instance_statistics/cohorts_controller_spec.rb
+++ b/spec/controllers/instance_statistics/cohorts_controller_spec.rb
@@ -3,5 +3,19 @@
require 'spec_helper'
describe InstanceStatistics::CohortsController do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
it_behaves_like 'instance statistics availability'
+
+ it 'renders a 404 when the usage ping is disabled' do
+ stub_application_setting(usage_ping_enabled: false)
+
+ get :index
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 42917d0d505..26a532ee01d 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -274,11 +274,43 @@ describe Projects::ClustersController do
context 'when creates a cluster' do
it 'creates a new cluster' do
expect(ClusterProvisionWorker).to receive(:perform_async)
+
expect { go }.to change { Clusters::Cluster.count }
.and change { Clusters::Platforms::Kubernetes.count }
+
expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
+
+ expect(project.clusters.first).to be_user
+ expect(project.clusters.first).to be_kubernetes
+ end
+ end
+
+ context 'when creates a RBAC-enabled cluster' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ platform_kubernetes_attributes: {
+ api_url: 'http://my-url',
+ token: 'test',
+ namespace: 'aaa',
+ authorization_type: 'rbac'
+ }
+ }
+ }
+ end
+
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+
+ expect { go }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Platforms::Kubernetes.count }
+
+ expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
+
expect(project.clusters.first).to be_user
expect(project.clusters.first).to be_kubernetes
+ expect(project.clusters.first).to be_platform_kubernetes_rbac
end
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 1aca44c6e74..ca7d30fec83 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -147,12 +147,52 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
get_show(id: job.id, format: :json)
end
- it 'exposes needed information' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['raw_path']).to match(%r{jobs/\d+/raw\z})
- expect(json_response.dig('merge_request', 'path')).to match(%r{merge_requests/\d+\z})
- expect(json_response['new_issue_path'])
- .to include('/issues/new')
+ context 'when job failed' do
+ it 'exposes needed information' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['raw_path']).to match(%r{jobs/\d+/raw\z})
+ expect(json_response.dig('merge_request', 'path')).to match(%r{merge_requests/\d+\z})
+ expect(json_response['new_issue_path']).to include('/issues/new')
+ end
+ end
+
+ context 'when job has artifacts' do
+ context 'with not expiry date' do
+ let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
+
+ it 'exposes needed information' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['artifact']['download_path']).to match(%r{artifacts/download})
+ expect(json_response['artifact']['browse_path']).to match(%r{artifacts/browse})
+ expect(json_response['artifact']).not_to have_key(:expired)
+ expect(json_response['artifact']).not_to have_key(:expired_at)
+ end
+ end
+
+ context 'with expiry date' do
+ let(:job) { create(:ci_build, :success, :artifacts, :expired, pipeline: pipeline) }
+
+ it 'exposes needed information' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['artifact']).not_to have_key(:download_path)
+ expect(json_response['artifact']).not_to have_key(:browse_path)
+ expect(json_response['artifact']['expired']).to eq(true)
+ expect(json_response['artifact']['expire_at']).not_to be_empty
+ end
+ end
+ end
+
+ context 'when job has terminal' do
+ let(:job) { create(:ci_build, :running, :with_runner_session, pipeline: pipeline) }
+
+ it 'exposes the terminal path' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['terminal_path']).to match(%r{/terminal})
+ end
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 94644b1f9fd..c3a66477b6a 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -284,6 +284,19 @@ describe ProjectsController do
end
end
+ describe 'GET edit' do
+ it 'sets the badge API endpoint' do
+ sign_in(user)
+ project.add_maintainer(user)
+
+ get :edit,
+ namespace_id: project.namespace.path,
+ id: project.path
+
+ expect(assigns(:badge_api_endpoint)).not_to be_nil
+ end
+ end
+
describe "#update" do
render_views
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 46aaaf6aa5d..f028803ca74 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -24,6 +24,12 @@ FactoryBot.define do
end
end
+ trait :legacy_archive do
+ archive
+
+ file_location :legacy_path
+ end
+
trait :metadata do
file_type :metadata
file_format :gzip
diff --git a/spec/factories/clusters/platforms/kubernetes.rb b/spec/factories/clusters/platforms/kubernetes.rb
index 89f6ddebf6a..36ac2372204 100644
--- a/spec/factories/clusters/platforms/kubernetes.rb
+++ b/spec/factories/clusters/platforms/kubernetes.rb
@@ -16,5 +16,9 @@ FactoryBot.define do
platform_kubernetes.ca_cert = File.read(pem_file)
end
end
+
+ trait :rbac_enabled do
+ authorization_type :rbac
+ end
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 1215b04913e..17e457c04a5 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -130,6 +130,33 @@ FactoryBot.define do
end
end
+ # Build a custom repository by specifying a hash of `filename => content` in
+ # the transient `files` attribute. Each file will be created in its own
+ # commit, operating against the master branch. So, the following call:
+ #
+ # create(:project, :custom_repo, files: { 'foo/a.txt' => 'foo', 'b.txt' => bar' })
+ #
+ # will create a repository containing two files, and two commits, in master
+ trait :custom_repo do
+ transient do
+ files {}
+ end
+
+ after :create do |project, evaluator|
+ raise "Failed to create repository!" unless project.create_repository
+
+ evaluator.files.each do |filename, content|
+ project.repository.create_file(
+ project.creator,
+ filename,
+ content,
+ message: "Automatically created file #{filename}",
+ branch_name: 'master'
+ )
+ end
+ end
+ end
+
# Test repository - https://gitlab.com/gitlab-org/gitlab-test
trait :repository do
test_repo
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 8a4809f28cd..3f175dcfb92 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -381,6 +381,15 @@ describe 'Admin updates settings' do
expect(find_field('ED25519 SSH keys').value).to eq(forbidden)
end
+ it 'loads usage ping payload on click', :js do
+ expect(page).to have_button 'Preview payload'
+
+ find('.js-usage-ping-payload-trigger').click
+
+ expect(page).to have_selector '.js-usage-ping-payload'
+ expect(page).to have_button 'Hide payload'
+ end
+
def check_all_events
page.check('Active')
page.check('Push')
diff --git a/spec/features/groups/labels/search_labels_spec.rb b/spec/features/groups/labels/search_labels_spec.rb
new file mode 100644
index 00000000000..14b88a561b1
--- /dev/null
+++ b/spec/features/groups/labels/search_labels_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Search for labels', :js do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let!(:label1) { create(:group_label, title: 'Foo', description: 'Lorem ipsum', group: group) }
+ let!(:label2) { create(:group_label, title: 'Bar', description: 'Fusce consequat', group: group) }
+
+ before do
+ group.add_maintainer(user)
+ sign_in(user)
+
+ visit group_labels_path(group)
+ end
+
+ it 'searches for label by title' do
+ fill_in 'label-search', with: 'Bar'
+ find('#label-search').native.send_keys(:enter)
+
+ expect(page).to have_content(label2.title)
+ expect(page).to have_content(label2.description)
+ expect(page).not_to have_content(label1.title)
+ expect(page).not_to have_content(label1.description)
+ end
+
+ it 'searches for label by description' do
+ fill_in 'label-search', with: 'Lorem'
+ find('#label-search').native.send_keys(:enter)
+
+ expect(page).to have_content(label1.title)
+ expect(page).to have_content(label1.description)
+ expect(page).not_to have_content(label2.title)
+ expect(page).not_to have_content(label2.description)
+ end
+
+ it 'shows nothing found message' do
+ fill_in 'label-search', with: 'nonexistent'
+ find('#label-search').native.send_keys(:enter)
+
+ expect(page).to have_content('No labels with such name or description')
+ expect(page).not_to have_content(label1.title)
+ expect(page).not_to have_content(label1.description)
+ expect(page).not_to have_content(label2.title)
+ expect(page).not_to have_content(label2.description)
+ end
+end
diff --git a/spec/features/groups/settings/group_badges_spec.rb b/spec/features/groups/settings/group_badges_spec.rb
index 070a4a31ffa..a5c8dbf18d0 100644
--- a/spec/features/groups/settings/group_badges_spec.rb
+++ b/spec/features/groups/settings/group_badges_spec.rb
@@ -14,7 +14,7 @@ describe 'Group Badges' do
group.add_owner(user)
sign_in(user)
- visit(group_settings_badges_path(group))
+ visit(edit_group_path(group))
end
it 'shows a list of badges', :js do
diff --git a/spec/features/instance_statistics/cohorts_spec.rb b/spec/features/instance_statistics/cohorts_spec.rb
index 573f8600be1..81fc5eff980 100644
--- a/spec/features/instance_statistics/cohorts_spec.rb
+++ b/spec/features/instance_statistics/cohorts_spec.rb
@@ -12,12 +12,4 @@ describe 'Cohorts page' do
expect(page).to have_content("#{Time.now.strftime('%b %Y')} 3 0")
end
-
- it 'shows usage data', :js do
- visit instance_statistics_cohorts_path
-
- wait_for_requests
-
- expect(find('.js-syntax-highlight').text).not_to eq('')
- end
end
diff --git a/spec/features/instance_statistics/conversational_development_index_spec.rb b/spec/features/instance_statistics/conversational_development_index_spec.rb
index a6c16b6a2a3..d8be554d734 100644
--- a/spec/features/instance_statistics/conversational_development_index_spec.rb
+++ b/spec/features/instance_statistics/conversational_development_index_spec.rb
@@ -16,13 +16,21 @@ describe 'Conversational Development Index' do
end
context 'when usage ping is disabled' do
- it 'shows empty state' do
+ before do
stub_application_setting(usage_ping_enabled: false)
+ end
+ it 'shows empty state' do
visit instance_statistics_conversational_development_index_index_path
expect(page).to have_content('Usage ping is not enabled')
end
+
+ it 'hides the intro callout' do
+ visit instance_statistics_conversational_development_index_index_path
+
+ expect(page).not_to have_content 'Introducing Your Conversational Development Index'
+ end
end
context 'when there is no data to display' do
diff --git a/spec/features/instance_statistics/instance_statistics.rb b/spec/features/instance_statistics/instance_statistics.rb
new file mode 100644
index 00000000000..d03e6e68075
--- /dev/null
+++ b/spec/features/instance_statistics/instance_statistics.rb
@@ -0,0 +1,23 @@
+require 'rails_helper'
+
+describe 'Cohorts page', :js do
+ before do
+ sign_in(create(:admin))
+ end
+
+ it 'hides cohorts nav button when usage ping is disabled' do
+ stub_application_setting(usage_ping_enabled: false)
+
+ visit instance_statistics_root_path
+
+ expect(find('.nav-sidebar')).not_to have_content('Cohorts')
+ end
+
+ it 'shows cohorts nav button when usage ping is enabled' do
+ stub_application_setting(usage_ping_enabled: true)
+
+ visit instance_statistics_root_path
+
+ expect(find('.nav-sidebar')).to have_content('Cohorts')
+ end
+end
diff --git a/spec/features/issues/user_sees_breadcrumb_links_spec.rb b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
new file mode 100644
index 00000000000..ca234321235
--- /dev/null
+++ b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
@@ -0,0 +1,18 @@
+require 'rails_helper'
+
+describe 'New issue breadcrumbs' do
+ let(:project) { create(:project) }
+ let(:user) { project.creator }
+
+ before do
+ sign_in(user)
+ visit new_project_issue_path(project)
+ end
+
+ it 'display a link to project issues and new issue pages' do
+ page.within '.breadcrumbs' do
+ expect(find_link('Issues')[:href]).to end_with(project_issues_path(project))
+ expect(find_link('New')[:href]).to end_with(new_project_issue_path(project))
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb b/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
new file mode 100644
index 00000000000..f17acb35a5a
--- /dev/null
+++ b/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
@@ -0,0 +1,18 @@
+require 'rails_helper'
+
+describe 'New merge request breadcrumbs' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.creator }
+
+ before do
+ sign_in(user)
+ visit project_new_merge_request_path(project)
+ end
+
+ it 'display a link to project merge requests and new merge request pages' do
+ page.within '.breadcrumbs' do
+ expect(find_link('Merge Requests')[:href]).to end_with(project_merge_requests_path(project))
+ expect(find_link('New')[:href]).to end_with(project_new_merge_request_path(project))
+ end
+ end
+end
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 1064f72c271..2ba4d4918ff 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -5,8 +5,8 @@ describe 'File blob', :js do
let(:project) { create(:project, :public, :repository) }
- def visit_blob(path, anchor: nil, ref: 'master')
- visit project_blob_path(project, File.join(ref, path), anchor: anchor)
+ def visit_blob(path, anchor: nil, ref: 'master', legacy_render: nil)
+ visit project_blob_path(project, File.join(ref, path), anchor: anchor, legacy_render: legacy_render)
wait_for_requests
end
@@ -142,6 +142,52 @@ describe 'File blob', :js do
end
end
+ context 'Markdown rendering' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add RedCarpet and CommonMark Markdown ",
+ file_path: 'files/commonmark/file.md',
+ file_content: "1. one\n - sublist\n"
+ ).execute
+ end
+
+ context 'when rendering default markdown' do
+ before do
+ visit_blob('files/commonmark/file.md')
+
+ wait_for_requests
+ end
+
+ it 'renders using CommonMark' do
+ aggregate_failures do
+ expect(page).to have_content("sublist")
+ expect(page).not_to have_xpath("//ol//li//ul")
+ end
+ end
+ end
+
+ context 'when rendering legacy markdown' do
+ before do
+ visit_blob('files/commonmark/file.md', legacy_render: 1)
+
+ wait_for_requests
+ end
+
+ it 'renders using RedCarpet' do
+ aggregate_failures do
+ expect(page).to have_content("sublist")
+ expect(page).to have_xpath("//ol//li//ul")
+ end
+ end
+ end
+ end
+
context 'Markdown file (stored in LFS)' do
before do
project.add_maintainer(project.creator)
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 0e036b4ea68..d5b20605860 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -7,6 +7,7 @@ describe 'Editing file blob', :js do
let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
let(:branch) { 'master' }
let(:file_path) { project.repository.ls_files(project.repository.root_ref)[1] }
+ let(:readme_file_path) { 'README.md' }
context 'as a developer' do
let(:user) { create(:user) }
@@ -20,14 +21,19 @@ describe 'Editing file blob', :js do
def edit_and_commit(commit_changes: true)
wait_for_requests
find('.js-edit-blob').click
- find('#editor')
- execute_script('ace.edit("editor").setValue("class NextFeature\nend\n")')
+ fill_editor(content: "class NextFeature\\nend\\n")
if commit_changes
click_button 'Commit changes'
end
end
+ def fill_editor(content: "class NextFeature\\nend\\n")
+ wait_for_requests
+ find('#editor')
+ execute_script("ace.edit('editor').setValue('#{content}')")
+ end
+
context 'from MR diff' do
before do
visit diffs_project_merge_request_path(project, merge_request)
@@ -63,6 +69,30 @@ describe 'Editing file blob', :js do
expect(new_line_count).to be > 0
end
end
+
+ context 'when rendering the preview' do
+ it 'renders content with CommonMark' do
+ visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
+ fill_editor(content: "1. one\\n - sublist\\n")
+ click_link 'Preview'
+ wait_for_requests
+
+ # the above generates two seperate lists (not embedded) in CommonMark
+ expect(page).to have_content("sublist")
+ expect(page).not_to have_xpath("//ol//li//ul")
+ end
+
+ it 'renders content with RedCarpet when legacy_render is set' do
+ visit project_edit_blob_path(project, tree_join(branch, readme_file_path), legacy_render: 1)
+ fill_editor(content: "1. one\\n - sublist\\n")
+ click_link 'Preview'
+ wait_for_requests
+
+ # the above generates a sublist list in RedCarpet
+ expect(page).to have_content("sublist")
+ expect(page).to have_xpath("//ol//li//ul")
+ end
+ end
end
context 'visit blob edit' do
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index babf47cc341..ec968bfcf7d 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -38,6 +38,28 @@ describe 'User Cluster', :js do
end
end
+ context 'rbac_clusters feature flag is enabled' do
+ before do
+ stub_feature_flags(rbac_clusters: true)
+
+ fill_in 'cluster_name', with: 'dev-cluster'
+ fill_in 'cluster_platform_kubernetes_attributes_api_url', with: 'http://example.com'
+ fill_in 'cluster_platform_kubernetes_attributes_token', with: 'my-token'
+ check 'cluster_platform_kubernetes_attributes_authorization_type'
+ click_button 'Add Kubernetes cluster'
+ end
+
+ it 'user sees a cluster details page' do
+ expect(page).to have_content('Kubernetes cluster integration')
+ expect(page.find_field('cluster[name]').value).to eq('dev-cluster')
+ expect(page.find_field('cluster[platform_kubernetes_attributes][api_url]').value)
+ .to have_content('http://example.com')
+ expect(page.find_field('cluster[platform_kubernetes_attributes][token]').value)
+ .to have_content('my-token')
+ expect(page.find_field('cluster[platform_kubernetes_attributes][authorization_type]', disabled: true)).to be_checked
+ end
+ end
+
context 'when user filled form with invalid parameters' do
before do
click_button 'Add Kubernetes cluster'
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index ac6c8c337fa..6762460971f 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -36,7 +36,7 @@ describe 'Projects > Files > Project owner creates a license file', :js do
end
it 'project maintainer creates a license file from the "Add license" link' do
- click_link 'Add License'
+ click_link 'Add license'
expect(page).to have_content('New file')
expect(current_path).to eq(
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index 801291c1f77..0b8474fb87a 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -10,7 +10,7 @@ describe 'Projects > Files > Project owner sees a link to create a license file
it 'project maintainer creates a license file from a template' do
visit project_path(project)
- click_on 'Add License'
+ click_on 'Add license'
expect(page).to have_content('New file')
expect(current_path).to eq(
diff --git a/spec/features/projects/settings/integration_settings_spec.rb b/spec/features/projects/settings/integration_settings_spec.rb
index 8745ff72df0..32959969f54 100644
--- a/spec/features/projects/settings/integration_settings_spec.rb
+++ b/spec/features/projects/settings/integration_settings_spec.rb
@@ -51,6 +51,7 @@ describe 'Projects > Settings > Integration settings' do
fill_in 'hook_url', with: url
check 'Tag push events'
+ fill_in 'hook_push_events_branch_filter', with: 'master'
check 'Enable SSL verification'
check 'Job events'
diff --git a/spec/features/projects/settings/project_badges_spec.rb b/spec/features/projects/settings/project_badges_spec.rb
index 2ec94274f80..42b5547d43b 100644
--- a/spec/features/projects/settings/project_badges_spec.rb
+++ b/spec/features/projects/settings/project_badges_spec.rb
@@ -15,7 +15,7 @@ describe 'Project Badges' do
group.add_maintainer(user)
sign_in(user)
- visit(project_settings_badges_path(project))
+ visit(edit_project_path(project))
end
it 'shows a list of badges', :js do
diff --git a/spec/features/projects/settings/user_changes_default_branch_spec.rb b/spec/features/projects/settings/user_changes_default_branch_spec.rb
index e925539351d..fcf05e04a5c 100644
--- a/spec/features/projects/settings/user_changes_default_branch_spec.rb
+++ b/spec/features/projects/settings/user_changes_default_branch_spec.rb
@@ -1,20 +1,35 @@
require 'spec_helper'
describe 'Projects > Settings > User changes default branch' do
+ include Select2Helper
+
let(:user) { create(:user) }
- let(:project) { create(:project, :repository, namespace: user.namespace) }
before do
sign_in(user)
- visit edit_project_path(project)
+
+ visit project_settings_repository_path(project)
end
- it 'allows to change the default branch' do
- select 'fix', from: 'project_default_branch'
- page.within '.general-settings' do
- click_button 'Save changes'
+ context 'with normal project' do
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+
+ it 'allows to change the default branch', :js do
+ select2('fix', from: '#project_default_branch')
+
+ page.within '#default-branch-settings' do
+ click_button 'Save changes'
+ end
+
+ expect(find('#project_default_branch', visible: false).value).to eq 'fix'
end
+ end
- expect(find(:css, 'select#project_default_branch').value).to eq 'fix'
+ context 'with empty project' do
+ let(:project) { create(:project_empty_repo, namespace: user.namespace) }
+
+ it 'does not show default branch selector' do
+ expect(page).not_to have_selector('#project_default_branch')
+ end
end
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 0405e21a0d7..b8326edd4fd 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe 'Projects > Show > User sees setup shortcut buttons' do
- # For "New file", "Add License" functionality,
+ # For "New file", "Add license" functionality,
# see spec/features/projects/files/project_owner_creates_license_file_spec.rb
# see spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -58,9 +58,9 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
end
end
- it '"Add License" button linked to new file populated for a license' do
- page.within('.project-stats') do
- expect(page).to have_link('Add License', href: presenter.add_license_path)
+ it '"Add license" button linked to new file populated for a license' do
+ page.within('.project-metadata') do
+ expect(page).to have_link('Add license', href: presenter.add_license_path)
end
end
@@ -201,13 +201,13 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
end
end
- it 'no "Add License" button if the project already has a license' do
+ it 'no "Add license" button if the project already has a license' do
visit project_path(project)
expect(project.repository.license_blob).not_to be_nil
page.within('.project-stats') do
- expect(page).not_to have_link('Add License')
+ expect(page).not_to have_link('Add license')
end
end
diff --git a/spec/features/projects/tags/user_views_tags_spec.rb b/spec/features/projects/tags/user_views_tags_spec.rb
new file mode 100644
index 00000000000..f344b682715
--- /dev/null
+++ b/spec/features/projects/tags/user_views_tags_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe 'User views tags', :feature do
+ context 'rss' do
+ shared_examples 'has access to the tags RSS feed' do
+ it do
+ visit project_tags_path(project, format: :atom)
+
+ expect(page).to have_gitlab_http_status(200)
+ end
+ end
+
+ shared_examples 'does not have access to the tags RSS feed' do
+ it do
+ visit project_tags_path(project, format: :atom)
+
+ expect(page).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'when project public' do
+ let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
+
+ context 'when user signed in' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ visit project_tags_path(project)
+ end
+
+ it_behaves_like "it has an RSS button with current_user's feed token"
+ it_behaves_like "an autodiscoverable RSS feed with current_user's feed token"
+ it_behaves_like 'has access to the tags RSS feed'
+ end
+
+ context 'when user signed out' do
+ before do
+ visit project_tags_path(project)
+ end
+
+ it_behaves_like 'it has an RSS button without a feed token'
+ it_behaves_like 'an autodiscoverable RSS feed without a feed token'
+ it_behaves_like 'has access to the tags RSS feed'
+ end
+ end
+
+ context 'when project is not public' do
+ let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
+
+ context 'when user signed in' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it_behaves_like 'has access to the tags RSS feed'
+ end
+
+ context 'when user signed out' do
+ it_behaves_like 'does not have access to the tags RSS feed'
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
index ed5f8105487..f505023d1d0 100644
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ b/spec/features/projects/wiki/markdown_preview_spec.rb
@@ -162,6 +162,34 @@ describe 'Projects > Wiki > User previews markdown changes', :js do
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>")
end
end
+
+ context 'when rendering the preview' do
+ it 'renders content with CommonMark' do
+ create_wiki_page 'a-page/b-page/c-page/common-mark'
+ click_link 'Edit'
+
+ fill_in :wiki_content, with: "1. one\n - sublist\n"
+ click_on "Preview"
+
+ # the above generates two seperate lists (not embedded) in CommonMark
+ expect(page).to have_content("sublist")
+ expect(page).not_to have_xpath("//ol//li//ul")
+ end
+
+ it 'renders content with RedCarpet when legacy_render is set' do
+ wiki_page = create(:wiki_page,
+ wiki: project.wiki,
+ attrs: { title: 'home', content: "Empty content" })
+ visit(project_wiki_edit_path(project, wiki_page, legacy_render: 1))
+
+ fill_in :wiki_content, with: "1. one\n - sublist\n"
+ click_on "Preview"
+
+ # the above generates a sublist list in RedCarpet
+ expect(page).to have_content("sublist")
+ expect(page).to have_xpath("//ol//li//ul")
+ end
+ end
end
it "does not linkify double brackets inside code blocks as expected" do
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index 149eeb4f9ba..b30286e4446 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -146,6 +146,8 @@ describe "User creates wiki page" do
expect(page).to have_selector(".katex", count: 3).and have_content("2+2 is 4")
end
end
+
+ it_behaves_like 'wiki file attachments'
end
context "in a group namespace", :js do
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index 2840d28cf30..2ce5ee0e87d 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
describe 'User updates wiki page' do
shared_examples 'wiki page user update' do
let(:user) { create(:user) }
+
before do
project.add_maintainer(user)
sign_in(user)
@@ -55,6 +56,8 @@ describe 'User updates wiki page' do
expect(page).to have_content('Updated Wiki Content')
end
+
+ it_behaves_like 'wiki file attachments'
end
end
@@ -64,14 +67,14 @@ describe 'User updates wiki page' do
before do
visit(project_wikis_path(project))
+
+ click_link('Edit')
end
context 'in a user namespace' do
let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
it 'updates a page' do
- click_link('Edit')
-
# Commit message field should have correct value.
expect(page).to have_field('wiki[message]', with: 'Update home')
@@ -84,8 +87,6 @@ describe 'User updates wiki page' do
end
it 'shows a validation error message' do
- click_link('Edit')
-
fill_in(:wiki_content, with: '')
click_button('Save changes')
@@ -97,8 +98,6 @@ describe 'User updates wiki page' do
end
it 'shows the emoji autocompletion dropdown', :js do
- click_link('Edit')
-
find('#wiki_content').native.send_keys('')
fill_in(:wiki_content, with: ':')
@@ -106,8 +105,6 @@ describe 'User updates wiki page' do
end
it 'shows the error message' do
- click_link('Edit')
-
wiki_page.update(content: 'Update')
click_button('Save changes')
@@ -116,30 +113,27 @@ describe 'User updates wiki page' do
end
it 'updates a page' do
- click_on('Edit')
fill_in('Content', with: 'Updated Wiki Content')
click_on('Save changes')
expect(page).to have_content('Updated Wiki Content')
end
- it 'cancels edititng of a page' do
- click_on('Edit')
-
+ it 'cancels editing of a page' do
page.within(:css, '.wiki-form .form-actions') do
click_on('Cancel')
end
expect(current_path).to eq(project_wiki_path(project, wiki_page))
end
+
+ it_behaves_like 'wiki file attachments'
end
context 'in a group namespace' do
let(:project) { create(:project, :wiki_repo, namespace: create(:group, :public)) }
it 'updates a page' do
- click_link('Edit')
-
# Commit message field should have correct value.
expect(page).to have_field('wiki[message]', with: 'Update home')
@@ -151,6 +145,8 @@ describe 'User updates wiki page' do
expect(page).to have_content("Last edited by #{user.name}")
expect(page).to have_content('My awesome wiki!')
end
+
+ it_behaves_like 'wiki file attachments'
end
end
@@ -222,6 +218,8 @@ describe 'User updates wiki page' do
expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}"))
end
+
+ it_behaves_like 'wiki file attachments'
end
end
diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
index 760324adacc..747406efc8b 100644
--- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
@@ -93,7 +93,7 @@ describe 'User views a wiki page' do
allow(wiki_file).to receive(:mime_type).and_return('image/jpeg')
allow_any_instance_of(ProjectWiki).to receive(:find_file).with('image.jpg', nil).and_return(wiki_file)
- expect(page).to have_xpath('//img[@data-src="image.jpg"]')
+ expect(page).to have_xpath("//img[@data-src='#{project.wiki.wiki_base_path}/image.jpg']")
expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg")
click_on('image')
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 56ed0c936a6..22e3a99072f 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe 'Project' do
include ProjectForksHelper
+ include MobileHelpers
describe 'creating from template' do
let(:user) { create(:user) }
@@ -54,25 +55,72 @@ describe 'Project' do
it 'parses Markdown' do
project.update_attribute(:description, 'This is **my** project')
visit path
- expect(page).to have_css('.project-home-desc > p > strong')
+ expect(page).to have_css('.project-description > .project-description-markdown > p > strong')
end
it 'passes through html-pipeline' do
project.update_attribute(:description, 'This project is the :poop:')
visit path
- expect(page).to have_css('.project-home-desc > p > gl-emoji')
+ expect(page).to have_css('.project-description > .project-description-markdown > p > gl-emoji')
end
it 'sanitizes unwanted tags' do
project.update_attribute(:description, "```\ncode\n```")
visit path
- expect(page).not_to have_css('.project-home-desc code')
+ expect(page).not_to have_css('.project-description code')
end
it 'permits `rel` attribute on links' do
project.update_attribute(:description, 'https://google.com/')
visit path
- expect(page).to have_css('.project-home-desc a[rel]')
+ expect(page).to have_css('.project-description a[rel]')
+ end
+
+ context 'read more', :js do
+ let(:read_more_selector) { '.read-more-container' }
+ let(:read_more_trigger_selector) { '.project-home-desc .js-read-more-trigger' }
+
+ it 'does not display "read more" link on desktop breakpoint' do
+ project.update_attribute(:description, 'This is **my** project')
+ visit path
+
+ expect(find(read_more_trigger_selector, visible: false)).not_to be_visible
+ end
+
+ it 'displays "read more" link on mobile breakpoint' do
+ project.update_attribute(:description, 'This is **my** project')
+ visit path
+ resize_screen_xs
+
+ find(read_more_trigger_selector).click
+
+ expect(page).to have_css('.project-description .is-expanded')
+ end
+ end
+ end
+
+ describe 'copy clone URL to clipboard', :js do
+ let(:project) { create(:project, :repository) }
+ let(:path) { project_path(project) }
+
+ before do
+ sign_in(create(:admin))
+ visit path
+ end
+
+ context 'desktop component' do
+ it 'shows on md and larger breakpoints' do
+ expect(find('.git-clone-holder')).to be_visible
+ expect(find('.mobile-git-clone', visible: false)).not_to be_visible
+ end
+ end
+
+ context 'mobile component' do
+ it 'shows mobile component on sm and smaller breakpoints' do
+ resize_screen_xs
+ expect(find('.mobile-git-clone')).to be_visible
+ expect(find('.git-clone-holder', visible: false)).not_to be_visible
+ end
end
end
diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb
index 3fe0b60b18f..367a479f62a 100644
--- a/spec/features/snippets/show_spec.rb
+++ b/spec/features/snippets/show_spec.rb
@@ -68,23 +68,45 @@ describe 'Snippet', :js do
end
end
- context 'with cached Redcarpet html' do
- let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content, cached_markdown_version: CacheMarkdownField::CACHE_REDCARPET_VERSION) }
+ context 'Markdown rendering' do
+ let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content) }
let(:file_name) { 'test.md' }
let(:content) { "1. one\n - sublist\n" }
- it 'renders correctly' do
- expect(page).to have_xpath("//ol//li//ul")
+ context 'when rendering default markdown' do
+ it 'renders using CommonMark' do
+ expect(page).to have_content("sublist")
+ expect(page).not_to have_xpath("//ol//li//ul")
+ end
end
- end
- context 'with cached CommonMark html' do
- let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content, cached_markdown_version: CacheMarkdownField::CACHE_COMMONMARK_VERSION) }
- let(:file_name) { 'test.md' }
- let(:content) { "1. one\n - sublist\n" }
+ context 'when rendering legacy markdown' do
+ before do
+ visit snippet_path(snippet, legacy_render: 1)
- it 'renders correctly' do
- expect(page).not_to have_xpath("//ol//li//ul")
+ wait_for_requests
+ end
+
+ it 'renders using RedCarpet' do
+ expect(page).to have_content("sublist")
+ expect(page).to have_xpath("//ol//li//ul")
+ end
+ end
+
+ context 'with cached CommonMark html' do
+ let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content, cached_markdown_version: CacheMarkdownField::CACHE_COMMONMARK_VERSION) }
+
+ it 'renders correctly' do
+ expect(page).not_to have_xpath("//ol//li//ul")
+ end
+ end
+
+ context 'with cached Redcarpet html' do
+ let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content, cached_markdown_version: CacheMarkdownField::CACHE_REDCARPET_VERSION) }
+
+ it 'renders correctly' do
+ expect(page).to have_xpath("//ol//li//ul")
+ end
end
end
diff --git a/spec/finders/template_finder_spec.rb b/spec/finders/template_finder_spec.rb
new file mode 100644
index 00000000000..1d399e8194f
--- /dev/null
+++ b/spec/finders/template_finder_spec.rb
@@ -0,0 +1,48 @@
+require 'spec_helper'
+
+describe TemplateFinder do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#build' do
+ where(:type, :expected_class) do
+ :dockerfiles | described_class
+ :gitignores | described_class
+ :gitlab_ci_ymls | described_class
+ :licenses | ::LicenseTemplateFinder
+ end
+
+ with_them do
+ subject { described_class.build(type) }
+
+ it { is_expected.to be_a(expected_class) }
+ end
+ end
+
+ describe '#execute' do
+ where(:type, :vendored_name) do
+ :dockerfiles | 'Binary'
+ :gitignores | 'Actionscript'
+ :gitlab_ci_ymls | 'Android'
+ end
+
+ with_them do
+ it 'returns all vendored templates when no name is specified' do
+ result = described_class.new(type).execute
+
+ expect(result).to include(have_attributes(name: vendored_name))
+ end
+
+ it 'returns only the specified vendored template when a name is specified' do
+ result = described_class.new(type, name: vendored_name).execute
+
+ expect(result).to have_attributes(name: vendored_name)
+ end
+
+ it 'returns nil when an unknown name is specified' do
+ result = described_class.new(type, name: 'unknown').execute
+
+ expect(result).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/ci_detailed_status.json b/spec/fixtures/api/schemas/ci_detailed_status.json
index 01e34249bf1..d74248eabef 100644
--- a/spec/fixtures/api/schemas/ci_detailed_status.json
+++ b/spec/fixtures/api/schemas/ci_detailed_status.json
@@ -18,7 +18,29 @@
"tooltip": { "type": "string" },
"has_details": { "type": "boolean" },
"details_path": { "type": "string" },
- "favicon": { "type": "string" }
+ "favicon": { "type": "string" },
+ "action": {
+ "type": "object",
+ "required": [
+ "icon",
+ "title",
+ "path",
+ "method"
+ ],
+ "properties": {
+ "icon": {
+ "type": "string",
+ "enum": [
+ "retry",
+ "play",
+ "cancel"
+ ]
+ },
+ "title": { "type": "string" },
+ "path": { "type": "string" },
+ "method": { "$ref": "http_method.json" }
+ }
+ }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/entities/diff_line.json b/spec/fixtures/api/schemas/entities/diff_line.json
new file mode 100644
index 00000000000..66e8b443e1b
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/diff_line.json
@@ -0,0 +1,14 @@
+{
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "line_code": { "type": ["string", "null"] },
+ "type": { "type": ["string", "null"] },
+ "old_line": { "type": ["integer", "null"] },
+ "new_line": { "type": ["integer", "null"] },
+ "text": { "type": ["string"] },
+ "rich_text": { "type": ["string"] },
+ "meta_data": { "type": ["object", "null"] }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/entities/diff_line_parallel.json b/spec/fixtures/api/schemas/entities/diff_line_parallel.json
new file mode 100644
index 00000000000..f924eb0c601
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/diff_line_parallel.json
@@ -0,0 +1,11 @@
+{
+ "required" : [
+ "left",
+ "right"
+ ],
+ "properties" : {
+ "left": { "$ref": "diff_line.json" },
+ "right": { "$ref": "diff_line.json" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/http_method.json b/spec/fixtures/api/schemas/http_method.json
new file mode 100644
index 00000000000..c0f8acc5781
--- /dev/null
+++ b/spec/fixtures/api/schemas/http_method.json
@@ -0,0 +1,5 @@
+{
+ "type": "string",
+ "description": "HTTP methods that the API can specify to send.",
+ "enum": [ "post", "get", "put", "patch" ]
+}
diff --git a/spec/fixtures/api/schemas/job/artifact.json b/spec/fixtures/api/schemas/job/artifact.json
new file mode 100644
index 00000000000..1812e69fbd6
--- /dev/null
+++ b/spec/fixtures/api/schemas/job/artifact.json
@@ -0,0 +1,11 @@
+{
+ "type": "object",
+ "properties": {
+ "download_path": { "type": "string"},
+ "browse_path": { "type": "string"},
+ "keep_path": { "type": "string"},
+ "expired": { "type": "boolean" },
+ "expire_at": { "type": "string", "format": "date-time" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/job.json b/spec/fixtures/api/schemas/job/job.json
index 7b92ab25bc1..c793d93c0f6 100644
--- a/spec/fixtures/api/schemas/job.json
+++ b/spec/fixtures/api/schemas/job/job.json
@@ -1,4 +1,5 @@
{
+ "description": "Basic job information",
"type": "object",
"required": [
"id",
@@ -13,12 +14,18 @@
"properties": {
"id": { "type": "integer" },
"name": { "type": "string" },
- "started": { "type": "boolean" } ,
+ "started": {
+ "oneOf": [
+ { "type": "string", "format": "date-time" },
+ { "type": "boolean" }
+ ]
+ },
"build_path": { "type": "string" },
+ "retry_path": { "type": "string" },
"playable": { "type": "boolean" },
"created_at": { "type": "string" },
"updated_at": { "type": "string" },
- "status": { "$ref": "ci_detailed_status.json" }
+ "status": { "$ref": "../ci_detailed_status.json" }
},
- "additionalProperties": false
+ "additionalProperties": true
}
diff --git a/spec/fixtures/api/schemas/job/job_details.json b/spec/fixtures/api/schemas/job/job_details.json
new file mode 100644
index 00000000000..b8c099250be
--- /dev/null
+++ b/spec/fixtures/api/schemas/job/job_details.json
@@ -0,0 +1,8 @@
+{
+ "allOf": [{ "$ref": "job.json" }],
+ "description": "An extension of job.json with more detailed information",
+ "properties": {
+ "artifact": { "$ref": "artifact.json" },
+ "terminal_path": { "type": "string" }
+ }
+}
diff --git a/spec/fixtures/api/schemas/pipeline_stage.json b/spec/fixtures/api/schemas/pipeline_stage.json
index 55454200bb3..eb2667295f0 100644
--- a/spec/fixtures/api/schemas/pipeline_stage.json
+++ b/spec/fixtures/api/schemas/pipeline_stage.json
@@ -13,7 +13,7 @@
"groups": { "optional": true },
"latest_statuses": {
"type": "array",
- "items": { "$ref": "job.json" },
+ "items": { "$ref": "job/job.json" },
"optional": true
},
"status": { "$ref": "ci_detailed_status.json" },
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index 0c0a0003231..eebae1d7290 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -40,12 +40,24 @@ describe ButtonHelper do
end
context 'when user has no personal access tokens' do
- it 'has a personal access token text on the dropdown description ' do
+ it 'has a personal access token text on the dropdown description' do
description = element.search('.dropdown-menu-inner-content').first
expect(description.inner_text).to eq 'Create a personal access token on your account to pull or push via HTTP.'
end
end
+
+ context 'when user has personal access tokens' do
+ before do
+ create(:personal_access_token, user: user)
+ end
+
+ it 'does not have a personal access token text on the dropdown description' do
+ description = element.search('.dropdown-menu-inner-content').first
+
+ expect(description).to be_nil
+ end
+ end
end
context 'when user is ldap user' do
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index fccde8b7eba..466e018d68c 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -25,4 +25,47 @@ describe EventsHelper do
expect(helper.event_commit_title("foo & bar")).to eq("foo & bar")
end
end
+
+ describe '#event_feed_url' do
+ let(:event) { create(:event) }
+ let(:project) { create(:project, :public, :repository) }
+
+ it "returns project issue url" do
+ event.target = create(:issue)
+
+ expect(helper.event_feed_url(event)).to eq(project_issue_url(event.project, event.issue))
+ end
+
+ it "returns project merge_request url" do
+ event.target = create(:merge_request)
+
+ expect(helper.event_feed_url(event)).to eq(project_merge_request_url(event.project, event.merge_request))
+ end
+
+ it "returns project commit url" do
+ event.target = create(:note_on_commit, project: project)
+
+ expect(helper.event_feed_url(event)).to eq(project_commit_url(event.project, event.note_target))
+ end
+
+ it "returns event note target url" do
+ event.target = create(:note)
+
+ expect(helper.event_feed_url(event)).to eq(event_note_target_url(event))
+ end
+
+ it "returns project url" do
+ event.project = project
+ event.action = 1
+
+ expect(helper.event_feed_url(event)).to eq(project_url(event.project))
+ end
+
+ it "returns push event feed url" do
+ event = create(:push_event)
+ create(:push_event_payload, event: event, action: :pushed)
+
+ expect(helper.event_feed_url(event)).to eq(push_event_feed_url(event))
+ end
+ end
end
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 597648b064d..9a29ac26eff 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -25,17 +25,17 @@ describe MarkupHelper do
let(:actual) { "#{merge_request.to_reference} -> #{commit.to_reference} -> #{issue.to_reference}" }
it "links to the merge request" do
- expected = project_merge_request_path(project, merge_request)
+ expected = urls.project_merge_request_path(project, merge_request)
expect(helper.markdown(actual)).to match(expected)
end
it "links to the commit" do
- expected = project_commit_path(project, commit)
+ expected = urls.project_commit_path(project, commit)
expect(helper.markdown(actual)).to match(expected)
end
it "links to the issue" do
- expected = project_issue_path(project, issue)
+ expected = urls.project_issue_path(project, issue)
expect(helper.markdown(actual)).to match(expected)
end
end
@@ -46,7 +46,7 @@ describe MarkupHelper do
let(:second_issue) { create(:issue, project: second_project) }
it 'links to the issue' do
- expected = project_issue_path(second_project, second_issue)
+ expected = urls.project_issue_path(second_project, second_issue)
expect(markdown(actual, project: second_project)).to match(expected)
end
end
@@ -93,7 +93,7 @@ describe MarkupHelper do
# First issue link
expect(doc.css('a')[1].attr('href'))
- .to eq project_issue_path(project, issues[0])
+ .to eq urls.project_issue_path(project, issues[0])
expect(doc.css('a')[1].text).to eq issues[0].to_reference
# Internal commit link
@@ -102,7 +102,7 @@ describe MarkupHelper do
# Second issue link
expect(doc.css('a')[3].attr('href'))
- .to eq project_issue_path(project, issues[1])
+ .to eq urls.project_issue_path(project, issues[1])
expect(doc.css('a')[3].text).to eq issues[1].to_reference
# Trailing commit link
@@ -128,7 +128,7 @@ describe MarkupHelper do
# First issue link
expect(doc.css('a')[1].attr('href'))
- .to eq project_issue_path(project, issues[0])
+ .to eq urls.project_issue_path(project, issues[0])
expect(doc.css('a')[1].text).to eq issues[0].to_reference
# Internal commit link
@@ -137,7 +137,7 @@ describe MarkupHelper do
# Second issue link
expect(doc.css('a')[3].attr('href'))
- .to eq project_issue_path(project, issues[1])
+ .to eq urls.project_issue_path(project, issues[1])
expect(doc.css('a')[3].text).to eq issues[1].to_reference
# Trailing commit link
@@ -183,7 +183,7 @@ describe MarkupHelper do
doc = Nokogiri::HTML.parse(rendered)
expect(doc.css('a')[0].attr('href'))
- .to eq project_issue_path(project, issue)
+ .to eq urls.project_issue_path(project, issue)
expect(doc.css('a')[0].text).to eq issue.to_reference
wrapped = helper.link_to_html(rendered, link)
@@ -207,6 +207,17 @@ describe MarkupHelper do
expect(helper).to receive(:markdown_unsafe).with('wiki content',
pipeline: :wiki, project: project, project_wiki: @wiki, page_slug: "nested/page",
+ issuable_state_filter_enabled: true)
+
+ helper.render_wiki_content(@wiki)
+ end
+
+ it 'uses Wiki pipeline for markdown files with RedCarpet if feature disabled' do
+ stub_feature_flags(commonmark_for_repositories: false)
+ allow(@wiki).to receive(:format).and_return(:markdown)
+
+ expect(helper).to receive(:markdown_unsafe).with('wiki content',
+ pipeline: :wiki, project: project, project_wiki: @wiki, page_slug: "nested/page",
issuable_state_filter_enabled: true, markdown_engine: :redcarpet)
helper.render_wiki_content(@wiki)
@@ -259,10 +270,18 @@ describe MarkupHelper do
expect(helper.markup('foo.md', content, rendered: '<p>NOEL</p>')).to eq('<p>NOEL</p>')
end
- it 'defaults to Redcarpet' do
- expect(helper).to receive(:markdown_unsafe).with(content, hash_including(markdown_engine: :redcarpet)).and_return('NOEL')
+ it 'defaults to CommonMark' do
+ expect(helper.markup('foo.md', 'x^2')).to include('x^2')
+ end
- expect(helper.markup('foo.md', content)).to eq('NOEL')
+ it 'honors markdown_engine for RedCarpet' do
+ expect(helper.markup('foo.md', 'x^2', { markdown_engine: :redcarpet })).to include('x<sup>2</sup>')
+ end
+
+ it 'uses RedCarpet if feature disabled' do
+ stub_feature_flags(commonmark_for_repositories: false)
+
+ expect(helper.markup('foo.md', 'x^2', { markdown_engine: :redcarpet })).to include('x<sup>2</sup>')
end
end
@@ -414,4 +433,8 @@ describe MarkupHelper do
expect(helper.cross_project_reference(project, issue)).to include(project.full_path)
end
end
+
+ def urls
+ Gitlab::Routing.url_helpers
+ end
end
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index 234690e742b..7ccbdcd1332 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -50,9 +50,12 @@ describe NamespacesHelper do
end
it 'selects the new group by default' do
+ # Ensure we don't select a group with the same name
+ create(:group, name: 'new-group', path: 'another-path')
+
allow(helper).to receive(:current_user).and_return(user)
- options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: 'new-group'))
+ options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: 'new-group', path: 'new-group'))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index cbd4ff0fb4a..976b6c312b4 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -470,4 +470,16 @@ describe ProjectsHelper do
end
end
end
+
+ describe '#legacy_render_context' do
+ it 'returns the redcarpet engine' do
+ params = { legacy_render: '1' }
+
+ expect(helper.legacy_render_context(params)).to include(markdown_engine: :redcarpet)
+ end
+
+ it 'returns nothing' do
+ expect(helper.legacy_render_context({})).to be_empty
+ end
+ end
end
diff --git a/spec/javascripts/badges/components/badge_form_spec.js b/spec/javascripts/badges/components/badge_form_spec.js
index dd21ec279cb..31195bd762b 100644
--- a/spec/javascripts/badges/components/badge_form_spec.js
+++ b/spec/javascripts/badges/components/badge_form_spec.js
@@ -1,21 +1,31 @@
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import store from '~/badges/store';
+import createEmptyBadge from '~/badges/empty_badge';
import BadgeForm from '~/badges/components/badge_form.vue';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import { createDummyBadge } from '../dummy_badge';
+import { DUMMY_IMAGE_URL, TEST_HOST } from '../../test_constants';
+
+// avoid preview background process
+BadgeForm.methods.debouncedPreview = () => {};
describe('BadgeForm component', () => {
const Component = Vue.extend(BadgeForm);
+ let axiosMock;
let vm;
beforeEach(() => {
setFixtures(`
<div id="dummy-element"></div>
`);
+
+ axiosMock = new MockAdapter(axios);
});
afterEach(() => {
vm.$destroy();
+ axiosMock.restore();
});
describe('methods', () => {
@@ -38,93 +48,86 @@ describe('BadgeForm component', () => {
expect(vm.stopEditing).toHaveBeenCalled();
});
});
+ });
- describe('onSubmit', () => {
- describe('if isEditing is true', () => {
- beforeEach(() => {
- spyOn(vm, 'saveBadge').and.returnValue(Promise.resolve());
- store.replaceState({
- ...store.state,
- isSaving: false,
- badgeInEditForm: createDummyBadge(),
- });
- vm.isEditing = true;
- });
-
- it('returns immediately if imageUrl is empty', () => {
- store.state.badgeInEditForm.imageUrl = '';
-
- vm.onSubmit();
-
- expect(vm.saveBadge).not.toHaveBeenCalled();
- });
-
- it('returns immediately if linkUrl is empty', () => {
- store.state.badgeInEditForm.linkUrl = '';
-
- vm.onSubmit();
-
- expect(vm.saveBadge).not.toHaveBeenCalled();
- });
-
- it('returns immediately if isSaving is true', () => {
- store.state.isSaving = true;
+ const sharedSubmitTests = submitAction => {
+ const imageUrlSelector = '#badge-image-url';
+ const findImageUrlElement = () => vm.$el.querySelector(imageUrlSelector);
+ const linkUrlSelector = '#badge-link-url';
+ const findLinkUrlElement = () => vm.$el.querySelector(linkUrlSelector);
+ const setValue = (inputElementSelector, url) => {
+ const inputElement = vm.$el.querySelector(inputElementSelector);
+ inputElement.value = url;
+ inputElement.dispatchEvent(new Event('input'));
+ };
+ const submitForm = () => {
+ const submitButton = vm.$el.querySelector('button[type="submit"]');
+ submitButton.click();
+ };
+ const expectInvalidInput = inputElementSelector => {
+ const inputElement = vm.$el.querySelector(inputElementSelector);
+ expect(inputElement).toBeMatchedBy(':invalid');
+ const feedbackElement = vm.$el.querySelector(`${inputElementSelector} + .invalid-feedback`);
+ expect(feedbackElement).toBeVisible();
+ };
- vm.onSubmit();
+ beforeEach(() => {
+ spyOn(vm, submitAction).and.returnValue(Promise.resolve());
+ store.replaceState({
+ ...store.state,
+ badgeInAddForm: createEmptyBadge(),
+ badgeInEditForm: createEmptyBadge(),
+ isSaving: false,
+ });
- expect(vm.saveBadge).not.toHaveBeenCalled();
- });
+ setValue(linkUrlSelector, `${TEST_HOST}/link/url`);
+ setValue(imageUrlSelector, `${window.location.origin}${DUMMY_IMAGE_URL}`);
+ });
- it('calls saveBadge', () => {
- vm.onSubmit();
+ it('returns immediately if imageUrl is empty', () => {
+ setValue(imageUrlSelector, '');
- expect(vm.saveBadge).toHaveBeenCalled();
- });
- });
+ submitForm();
- describe('if isEditing is false', () => {
- beforeEach(() => {
- spyOn(vm, 'addBadge').and.returnValue(Promise.resolve());
- store.replaceState({
- ...store.state,
- isSaving: false,
- badgeInAddForm: createDummyBadge(),
- });
- vm.isEditing = false;
- });
+ expectInvalidInput(imageUrlSelector);
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
- it('returns immediately if imageUrl is empty', () => {
- store.state.badgeInAddForm.imageUrl = '';
+ it('returns immediately if imageUrl is malformed', () => {
+ setValue(imageUrlSelector, 'not-a-url');
- vm.onSubmit();
+ submitForm();
- expect(vm.addBadge).not.toHaveBeenCalled();
- });
+ expectInvalidInput(imageUrlSelector);
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
- it('returns immediately if linkUrl is empty', () => {
- store.state.badgeInAddForm.linkUrl = '';
+ it('returns immediately if linkUrl is empty', () => {
+ setValue(linkUrlSelector, '');
- vm.onSubmit();
+ submitForm();
- expect(vm.addBadge).not.toHaveBeenCalled();
- });
+ expectInvalidInput(linkUrlSelector);
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
- it('returns immediately if isSaving is true', () => {
- store.state.isSaving = true;
+ it('returns immediately if linkUrl is malformed', () => {
+ setValue(linkUrlSelector, 'not-a-url');
- vm.onSubmit();
+ submitForm();
- expect(vm.addBadge).not.toHaveBeenCalled();
- });
+ expectInvalidInput(linkUrlSelector);
+ expect(vm[submitAction]).not.toHaveBeenCalled();
+ });
- it('calls addBadge', () => {
- vm.onSubmit();
+ it(`calls ${submitAction}`, () => {
+ submitForm();
- expect(vm.addBadge).toHaveBeenCalled();
- });
- });
+ expect(findImageUrlElement()).toBeMatchedBy(':valid');
+ expect(findLinkUrlElement()).toBeMatchedBy(':valid');
+ expect(vm[submitAction]).toHaveBeenCalled();
});
- });
+ };
describe('if isEditing is false', () => {
beforeEach(() => {
@@ -138,12 +141,15 @@ describe('BadgeForm component', () => {
});
it('renders one button', () => {
- const buttons = vm.$el.querySelectorAll('.row-content-block button');
+ expect(vm.$el.querySelector('.row-content-block')).toBeNull();
+ const buttons = vm.$el.querySelectorAll('.form-group:last-of-type button');
expect(buttons.length).toBe(1);
const buttonAddElement = buttons[0];
expect(buttonAddElement).toBeVisible();
expect(buttonAddElement).toHaveText('Add badge');
});
+
+ sharedSubmitTests('addBadge');
});
describe('if isEditing is true', () => {
@@ -167,5 +173,7 @@ describe('BadgeForm component', () => {
expect(buttonCancelElement).toBeVisible();
expect(buttonCancelElement).toHaveText('Cancel');
});
+
+ sharedSubmitTests('saveBadge');
});
});
diff --git a/spec/javascripts/fixtures/projects.rb b/spec/javascripts/fixtures/projects.rb
index 57c78182abc..d98f7f55b20 100644
--- a/spec/javascripts/fixtures/projects.rb
+++ b/spec/javascripts/fixtures/projects.rb
@@ -6,6 +6,7 @@ describe 'Projects (JavaScript fixtures)', type: :controller do
let(:admin) { create(:admin) }
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project, namespace: namespace, path: 'builds-project') }
+ let(:project_with_repo) { create(:project, :repository, description: 'Code and stuff') }
let(:project_variable_populated) { create(:project, namespace: namespace, path: 'builds-project2') }
let!(:variable1) { create(:ci_variable, project: project_variable_populated) }
let!(:variable2) { create(:ci_variable, project: project_variable_populated) }
@@ -35,6 +36,15 @@ describe 'Projects (JavaScript fixtures)', type: :controller do
store_frontend_fixture(response, example.description)
end
+ it 'projects/overview.html.raw' do |example|
+ get :show,
+ namespace_id: project_with_repo.namespace.to_param,
+ id: project_with_repo
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+
it 'projects/edit.html.raw' do |example|
get :edit,
namespace_id: project.namespace.to_param,
diff --git a/spec/javascripts/jobs/store/actions_spec.js b/spec/javascripts/jobs/store/actions_spec.js
new file mode 100644
index 00000000000..5042718dfa0
--- /dev/null
+++ b/spec/javascripts/jobs/store/actions_spec.js
@@ -0,0 +1,625 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import {
+ setJobEndpoint,
+ setTraceEndpoint,
+ setStagesEndpoint,
+ setJobsEndpoint,
+ clearEtagPoll,
+ stopPolling,
+ requestJob,
+ fetchJob,
+ receiveJobSuccess,
+ receiveJobError,
+ scrollTop,
+ scrollBottom,
+ requestTrace,
+ fetchTrace,
+ stopPollingTrace,
+ receiveTraceSuccess,
+ receiveTraceError,
+ fetchFavicon,
+ requestStatusFavicon,
+ receiveStatusFaviconSuccess,
+ requestStatusFaviconError,
+ requestStages,
+ fetchStages,
+ receiveStagesSuccess,
+ receiveStagesError,
+ requestJobsForStage,
+ setSelectedStage,
+ fetchJobsForStage,
+ receiveJobsForStageSuccess,
+ receiveJobsForStageError,
+} from '~/jobs/store/actions';
+import state from '~/jobs/store/state';
+import * as types from '~/jobs/store/mutation_types';
+import testAction from 'spec/helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+
+describe('Job State actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state();
+ });
+
+ describe('setJobEndpoint', () => {
+ it('should commit SET_JOB_ENDPOINT mutation', done => {
+ testAction(
+ setJobEndpoint,
+ 'job/872324.json',
+ mockedState,
+ [{ type: types.SET_JOB_ENDPOINT, payload: 'job/872324.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setTraceEndpoint', () => {
+ it('should commit SET_TRACE_ENDPOINT mutation', done => {
+ testAction(
+ setTraceEndpoint,
+ 'job/872324/trace.json',
+ mockedState,
+ [{ type: types.SET_TRACE_ENDPOINT, payload: 'job/872324/trace.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setStagesEndpoint', () => {
+ it('should commit SET_STAGES_ENDPOINT mutation', done => {
+ testAction(
+ setStagesEndpoint,
+ 'job/872324/stages.json',
+ mockedState,
+ [{ type: types.SET_STAGES_ENDPOINT, payload: 'job/872324/stages.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setJobsEndpoint', () => {
+ it('should commit SET_JOBS_ENDPOINT mutation', done => {
+ testAction(
+ setJobsEndpoint,
+ 'job/872324/stages/build.json',
+ mockedState,
+ [{ type: types.SET_JOBS_ENDPOINT, payload: 'job/872324/stages/build.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestJob', () => {
+ it('should commit REQUEST_JOB mutation', done => {
+ testAction(requestJob, null, mockedState, [{ type: types.REQUEST_JOB }], [], done);
+ });
+ });
+
+ describe('fetchJob', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.jobEndpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ stopPolling();
+ clearEtagPoll();
+ });
+
+ describe('success', () => {
+ it('dispatches requestJob and receiveJobSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { id: 121212, name: 'karma' });
+
+ testAction(
+ fetchJob,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestJob',
+ },
+ {
+ payload: { id: 121212, name: 'karma' },
+ type: 'receiveJobSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
+ });
+
+ it('dispatches requestJob and receiveJobError ', done => {
+ testAction(
+ fetchJob,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestJob',
+ },
+ {
+ type: 'receiveJobError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('receiveJobSuccess', () => {
+ it('should commit RECEIVE_JOB_SUCCESS mutation', done => {
+ testAction(
+ receiveJobSuccess,
+ { id: 121232132 },
+ mockedState,
+ [{ type: types.RECEIVE_JOB_SUCCESS, payload: { id: 121232132 } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveJobError', () => {
+ it('should commit RECEIVE_JOB_ERROR mutation', done => {
+ testAction(receiveJobError, null, mockedState, [{ type: types.RECEIVE_JOB_ERROR }], [], done);
+ });
+ });
+
+ describe('scrollTop', () => {
+ it('should commit SCROLL_TO_TOP mutation', done => {
+ testAction(scrollTop, null, mockedState, [{ type: types.SCROLL_TO_TOP }], [], done);
+ });
+ });
+
+ describe('scrollBottom', () => {
+ it('should commit SCROLL_TO_BOTTOM mutation', done => {
+ testAction(scrollBottom, null, mockedState, [{ type: types.SCROLL_TO_BOTTOM }], [], done);
+ });
+ });
+
+ describe('requestTrace', () => {
+ it('should commit REQUEST_TRACE mutation', done => {
+ testAction(requestTrace, null, mockedState, [{ type: types.REQUEST_TRACE }], [], done);
+ });
+ });
+
+ describe('fetchTrace', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.traceEndpoint = `${TEST_HOST}/endpoint`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ stopPolling();
+ clearEtagPoll();
+ });
+
+ describe('success', () => {
+ it('dispatches requestTrace, fetchFavicon, receiveTraceSuccess and stopPollingTrace when job is complete', done => {
+ mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, {
+ html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
+ complete: true,
+ });
+
+ testAction(
+ fetchTrace,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestTrace',
+ },
+ {
+ type: 'fetchFavicon',
+ },
+ {
+ payload: {
+ html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :', complete: true,
+ },
+ type: 'receiveTraceSuccess',
+ },
+ {
+ type: 'stopPollingTrace',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(`${TEST_HOST}/endpoint/trace.json`).reply(500);
+ });
+
+ it('dispatches requestTrace and receiveTraceError ', done => {
+ testAction(
+ fetchTrace,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestTrace',
+ },
+ {
+ type: 'receiveTraceError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('stopPollingTrace', () => {
+ it('should commit STOP_POLLING_TRACE mutation ', done => {
+ testAction(
+ stopPollingTrace,
+ null,
+ mockedState,
+ [{ type: types.STOP_POLLING_TRACE }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveTraceSuccess', () => {
+ it('should commit RECEIVE_TRACE_SUCCESS mutation ', done => {
+ testAction(
+ receiveTraceSuccess,
+ 'hello world',
+ mockedState,
+ [{ type: types.RECEIVE_TRACE_SUCCESS, payload: 'hello world' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveTraceError', () => {
+ it('should commit RECEIVE_TRACE_ERROR mutation ', done => {
+ testAction(
+ receiveTraceError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_TRACE_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchFavicon', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.pagePath = `${TEST_HOST}/endpoint`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestStatusFavicon and receiveStatusFaviconSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint/status.json`).replyOnce(200);
+
+ testAction(
+ fetchFavicon,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestStatusFavicon',
+ },
+ {
+ type: 'receiveStatusFaviconSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(`${TEST_HOST}/endpoint/status.json`).replyOnce(500);
+ });
+
+ it('dispatches requestStatusFavicon and requestStatusFaviconError ', done => {
+ testAction(
+ fetchFavicon,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestStatusFavicon',
+ },
+ {
+ type: 'requestStatusFaviconError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestStatusFavicon', () => {
+ it('should commit REQUEST_STATUS_FAVICON mutation ', done => {
+ testAction(
+ requestStatusFavicon,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_STATUS_FAVICON }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveStatusFaviconSuccess', () => {
+ it('should commit RECEIVE_STATUS_FAVICON_SUCCESS mutation ', done => {
+ testAction(
+ receiveStatusFaviconSuccess,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_STATUS_FAVICON_SUCCESS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestStatusFaviconError', () => {
+ it('should commit RECEIVE_STATUS_FAVICON_ERROR mutation ', done => {
+ testAction(
+ requestStatusFaviconError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_STATUS_FAVICON_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestStages', () => {
+ it('should commit REQUEST_STAGES mutation ', done => {
+ testAction(requestStages, null, mockedState, [{ type: types.REQUEST_STAGES }], [], done);
+ });
+ });
+
+ describe('fetchStages', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.stagesEndpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestStages and receiveStagesSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, [{ id: 121212, name: 'build' }]);
+
+ testAction(
+ fetchStages,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestStages',
+ },
+ {
+ payload: [{ id: 121212, name: 'build' }],
+ type: 'receiveStagesSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
+ });
+
+ it('dispatches requestStages and receiveStagesError ', done => {
+ testAction(
+ fetchStages,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestStages',
+ },
+ {
+ type: 'receiveStagesError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('receiveStagesSuccess', () => {
+ it('should commit RECEIVE_STAGES_SUCCESS mutation ', done => {
+ testAction(
+ receiveStagesSuccess,
+ {},
+ mockedState,
+ [{ type: types.RECEIVE_STAGES_SUCCESS, payload: {} }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveStagesError', () => {
+ it('should commit RECEIVE_STAGES_ERROR mutation ', done => {
+ testAction(
+ receiveStagesError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_STAGES_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestJobsForStage', () => {
+ it('should commit REQUEST_JOBS_FOR_STAGE mutation ', done => {
+ testAction(
+ requestJobsForStage,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_JOBS_FOR_STAGE }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setSelectedStage', () => {
+ it('should commit SET_SELECTED_STAGE mutation ', done => {
+ testAction(
+ setSelectedStage,
+ { name: 'build' },
+ mockedState,
+ [{ type: types.SET_SELECTED_STAGE, payload: { name: 'build' } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchJobsForStage', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.stageJobsEndpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches setSelectedStage, requestJobsForStage and receiveJobsForStageSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, [{ id: 121212, name: 'build' }]);
+
+ testAction(
+ fetchJobsForStage,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'setSelectedStage',
+ payload: null,
+ },
+ {
+ type: 'requestJobsForStage',
+ },
+ {
+ payload: [{ id: 121212, name: 'build' }],
+ type: 'receiveJobsForStageSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
+ });
+
+ it('dispatches setSelectedStage, requestJobsForStage and receiveJobsForStageError', done => {
+ testAction(
+ fetchJobsForStage,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ payload: null,
+ type: 'setSelectedStage',
+ },
+ {
+ type: 'requestJobsForStage',
+ },
+ {
+ type: 'receiveJobsForStageError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('receiveJobsForStageSuccess', () => {
+ it('should commit RECEIVE_JOBS_FOR_STAGE_SUCCESS mutation ', done => {
+ testAction(
+ receiveJobsForStageSuccess,
+ [{ id: 121212, name: 'karma' }],
+ mockedState,
+ [{ type: types.RECEIVE_JOBS_FOR_STAGE_SUCCESS, payload: [{ id: 121212, name: 'karma' }] }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveJobsForStageError', () => {
+ it('should commit RECEIVE_JOBS_FOR_STAGE_ERROR mutation ', done => {
+ testAction(
+ receiveJobsForStageError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_JOBS_FOR_STAGE_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/javascripts/jobs/store/mutations_spec.js b/spec/javascripts/jobs/store/mutations_spec.js
new file mode 100644
index 00000000000..6900b2e5602
--- /dev/null
+++ b/spec/javascripts/jobs/store/mutations_spec.js
@@ -0,0 +1,228 @@
+import state from '~/jobs/store/state';
+import mutations from '~/jobs/store/mutations';
+import * as types from '~/jobs/store/mutation_types';
+
+describe('Jobs Store Mutations', () => {
+ let stateCopy;
+
+ const html =
+ 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
+
+ beforeEach(() => {
+ stateCopy = state();
+ });
+
+ describe('REQUEST_STATUS_FAVICON', () => {
+ it('should set fetchingStatusFavicon to true', () => {
+ mutations[types.REQUEST_STATUS_FAVICON](stateCopy);
+ expect(stateCopy.fetchingStatusFavicon).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_STATUS_FAVICON_SUCCESS', () => {
+ it('should set fetchingStatusFavicon to false', () => {
+ mutations[types.RECEIVE_STATUS_FAVICON_SUCCESS](stateCopy);
+ expect(stateCopy.fetchingStatusFavicon).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_STATUS_FAVICON_ERROR', () => {
+ it('should set fetchingStatusFavicon to false', () => {
+ mutations[types.RECEIVE_STATUS_FAVICON_ERROR](stateCopy);
+ expect(stateCopy.fetchingStatusFavicon).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_TRACE_SUCCESS', () => {
+ describe('when trace has state', () => {
+ it('sets traceState', () => {
+ const stateLog =
+ 'eyJvZmZzZXQiOjczNDQ1MSwibl9vcGVuX3RhZ3MiOjAsImZnX2NvbG9yIjpudWxsLCJiZ19jb2xvciI6bnVsbCwic3R5bGVfbWFzayI6MH0=';
+ mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ state: stateLog,
+ });
+ expect(stateCopy.traceState).toEqual(stateLog);
+ });
+ });
+
+ describe('when traceSize is smaller than the total size', () => {
+ it('sets isTraceSizeVisible to true', () => {
+ mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, { total: 51184600, size: 1231 });
+
+ expect(stateCopy.isTraceSizeVisible).toEqual(true);
+ });
+ });
+
+ describe('when traceSize is bigger than the total size', () => {
+ it('sets isTraceSizeVisible to false', () => {
+ const copy = Object.assign({}, stateCopy, { traceSize: 5118460, size: 2321312 });
+
+ mutations[types.RECEIVE_TRACE_SUCCESS](copy, { total: 511846 });
+
+ expect(copy.isTraceSizeVisible).toEqual(false);
+ });
+ });
+
+ it('sets trace, trace size and isTraceComplete', () => {
+ mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ append: true,
+ html,
+ size: 511846,
+ complete: true,
+ });
+ expect(stateCopy.trace).toEqual(html);
+ expect(stateCopy.traceSize).toEqual(511846);
+ expect(stateCopy.isTraceComplete).toEqual(true);
+ });
+ });
+
+ describe('STOP_POLLING_TRACE', () => {
+ it('sets isTraceComplete to true', () => {
+ mutations[types.STOP_POLLING_TRACE](stateCopy);
+ expect(stateCopy.isTraceComplete).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_TRACE_ERROR', () => {
+ it('resets trace state and sets error to true', () => {
+ mutations[types.RECEIVE_TRACE_ERROR](stateCopy);
+ expect(stateCopy.isLoadingTrace).toEqual(false);
+ expect(stateCopy.isTraceComplete).toEqual(true);
+ expect(stateCopy.hasTraceError).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_JOB', () => {
+ it('sets isLoading to true', () => {
+ mutations[types.REQUEST_JOB](stateCopy);
+
+ expect(stateCopy.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_JOB_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_JOB_SUCCESS](stateCopy, { id: 1312321 });
+ });
+
+ it('sets is loading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('sets hasError to false', () => {
+ expect(stateCopy.hasError).toEqual(false);
+ });
+
+ it('sets job data', () => {
+ expect(stateCopy.job).toEqual({ id: 1312321 });
+ });
+ });
+
+ describe('RECEIVE_JOB_ERROR', () => {
+ it('resets job data', () => {
+ mutations[types.RECEIVE_JOB_ERROR](stateCopy);
+
+ expect(stateCopy.isLoading).toEqual(false);
+ expect(stateCopy.hasError).toEqual(true);
+ expect(stateCopy.job).toEqual({});
+ });
+ });
+
+ describe('SCROLL_TO_TOP', () => {
+ beforeEach(() => {
+ mutations[types.SCROLL_TO_TOP](stateCopy);
+ });
+
+ it('sets isTraceScrolledToBottom to false', () => {
+ expect(stateCopy.isTraceScrolledToBottom).toEqual(false);
+ });
+
+ it('sets hasBeenScrolled to true', () => {
+ expect(stateCopy.hasBeenScrolled).toEqual(true);
+ });
+ });
+
+ describe('SCROLL_TO_BOTTOM', () => {
+ beforeEach(() => {
+ mutations[types.SCROLL_TO_BOTTOM](stateCopy);
+ });
+
+ it('sets isTraceScrolledToBottom to true', () => {
+ expect(stateCopy.isTraceScrolledToBottom).toEqual(true);
+ });
+
+ it('sets hasBeenScrolled to true', () => {
+ expect(stateCopy.hasBeenScrolled).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_STAGES', () => {
+ it('sets isLoadingStages to true', () => {
+ mutations[types.REQUEST_STAGES](stateCopy);
+ expect(stateCopy.isLoadingStages).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_STAGES_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_STAGES_SUCCESS](stateCopy, [{ name: 'build' }]);
+ });
+
+ it('sets isLoadingStages to false', () => {
+ expect(stateCopy.isLoadingStages).toEqual(false);
+ });
+
+ it('sets stages', () => {
+ expect(stateCopy.stages).toEqual([{ name: 'build' }]);
+ });
+ });
+
+ describe('RECEIVE_STAGES_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_STAGES_ERROR](stateCopy);
+ });
+
+ it('sets isLoadingStages to false', () => {
+ expect(stateCopy.isLoadingStages).toEqual(false);
+ });
+
+ it('resets stages', () => {
+ expect(stateCopy.stages).toEqual([]);
+ });
+ });
+
+ describe('REQUEST_JOBS_FOR_STAGE', () => {
+ it('sets isLoadingStages to true', () => {
+ mutations[types.REQUEST_JOBS_FOR_STAGE](stateCopy);
+ expect(stateCopy.isLoadingJobs).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_JOBS_FOR_STAGE_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_JOBS_FOR_STAGE_SUCCESS](stateCopy, [{ name: 'karma' }]);
+ });
+
+ it('sets isLoadingJobs to false', () => {
+ expect(stateCopy.isLoadingJobs).toEqual(false);
+ });
+
+ it('sets jobs', () => {
+ expect(stateCopy.jobs).toEqual([{ name: 'karma' }]);
+ });
+ });
+
+ describe('RECEIVE_JOBS_FOR_STAGE_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_JOBS_FOR_STAGE_ERROR](stateCopy);
+ });
+
+ it('sets isLoadingJobs to false', () => {
+ expect(stateCopy.isLoadingJobs).toEqual(false);
+ });
+
+ it('resets jobs', () => {
+ expect(stateCopy.jobs).toEqual([]);
+ });
+ });
+});
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index 71b26a315af..babad296f09 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -403,6 +403,7 @@ describe('common_utils', () => {
afterEach(() => {
document.body.removeChild(document.getElementById('favicon'));
});
+
it('should set page favicon to provided favicon', () => {
const faviconPath = '//custom_favicon';
commonUtils.setFavicon(faviconPath);
@@ -479,17 +480,14 @@ describe('common_utils', () => {
});
it('should reset favicon in case of error', (done) => {
- mock.onGet(BUILD_URL).networkError();
+ mock.onGet(BUILD_URL).replyOnce(500);
commonUtils.setCiStatusFavicon(BUILD_URL)
- .then(() => {
+ .catch(() => {
const favicon = document.getElementById('favicon');
expect(favicon.getAttribute('href')).toEqual(faviconDataUrl);
done();
- })
- // Error is already caught in catch() block of setCiStatusFavicon,
- // It won't throw another error for us to catch
- .catch(done.fail);
+ });
});
it('should set page favicon to CI status favicon based on provided status', (done) => {
diff --git a/spec/javascripts/read_more_spec.js b/spec/javascripts/read_more_spec.js
new file mode 100644
index 00000000000..b1af0f80a50
--- /dev/null
+++ b/spec/javascripts/read_more_spec.js
@@ -0,0 +1,23 @@
+import initReadMore from '~/read_more';
+
+describe('Read more click-to-expand functionality', () => {
+ const fixtureName = 'projects/overview.html.raw';
+
+ preloadFixtures(fixtureName);
+
+ beforeEach(() => {
+ loadFixtures(fixtureName);
+ });
+
+ describe('expands target element', () => {
+ it('adds "is-expanded" class to target element', () => {
+ const target = document.querySelector('.read-more-container');
+ const trigger = document.querySelector('.js-read-more-trigger');
+ initReadMore();
+
+ trigger.click();
+
+ expect(target.classList.contains('is-expanded')).toEqual(true);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js
index 0e42537faed..237e2fa79f2 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js
@@ -114,28 +114,31 @@ describe('MRWidgetHeader', () => {
});
describe('with an open merge request', () => {
+ const mrDefaultOptions = {
+ iid: 1,
+ divergedCommitsCount: 12,
+ sourceBranch: 'mr-widget-refactor',
+ sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>',
+ sourceBranchRemoved: false,
+ targetBranchPath: 'foo/bar/commits-path',
+ targetBranchTreePath: 'foo/bar/tree/path',
+ targetBranch: 'master',
+ isOpen: true,
+ canPushToSourceBranch: true,
+ emailPatchesPath: '/mr/email-patches',
+ plainDiffPath: '/mr/plainDiffPath',
+ statusPath: 'abc',
+ sourceProjectFullPath: 'root/gitlab-ce',
+ targetProjectFullPath: 'gitlab-org/gitlab-ce',
+ };
+
afterEach(() => {
vm.$destroy();
});
beforeEach(() => {
vm = mountComponent(Component, {
- mr: {
- iid: 1,
- divergedCommitsCount: 12,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>',
- sourceBranchRemoved: false,
- targetBranchPath: 'foo/bar/commits-path',
- targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'master',
- isOpen: true,
- emailPatchesPath: '/mr/email-patches',
- plainDiffPath: '/mr/plainDiffPath',
- statusPath: 'abc',
- sourceProjectFullPath: 'root/gitlab-ce',
- targetProjectFullPath: 'gitlab-org/gitlab-ce',
- },
+ mr: Object.assign({}, mrDefaultOptions),
});
});
@@ -151,11 +154,21 @@ describe('MRWidgetHeader', () => {
const button = vm.$el.querySelector('.js-web-ide');
expect(button.textContent.trim()).toEqual('Open in Web IDE');
+ expect(button.classList.contains('disabled')).toBe(false);
expect(button.getAttribute('href')).toEqual(
'/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=gitlab-org%2Fgitlab-ce',
);
});
+ it('renders web ide button in disabled state with no href', () => {
+ const mr = Object.assign({}, mrDefaultOptions, { canPushToSourceBranch: false });
+ vm = mountComponent(Component, { mr });
+
+ const link = vm.$el.querySelector('.js-web-ide');
+ expect(link.classList.contains('disabled')).toBe(true);
+ expect(link.getAttribute('href')).toBeNull();
+ });
+
it('renders web ide button with blank query string if target & source project branch', done => {
vm.mr.targetProjectFullPath = 'root/gitlab-ce';
diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
index 50d053011b3..b9059b85fdc 100644
--- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
@@ -7,6 +7,7 @@ describe Banzai::Filter::WikiLinkFilter do
let(:project) { build_stubbed(:project, :public, name: "wiki_link_project", namespace: namespace) }
let(:user) { double }
let(:wiki) { ProjectWiki.new(project, user) }
+ let(:repository_upload_folder) { Wikis::CreateAttachmentService::ATTACHMENT_PATH }
it "doesn't rewrite absolute links" do
filtered_link = filter("<a href='http://example.com:8000/'>Link</a>", project_wiki: wiki).children[0]
@@ -20,6 +21,45 @@ describe Banzai::Filter::WikiLinkFilter do
expect(filtered_link.attribute('href').value).to eq('/uploads/a.test')
end
+ describe "when links point to the #{Wikis::CreateAttachmentService::ATTACHMENT_PATH} folder" do
+ context 'with an "a" html tag' do
+ it 'rewrites links' do
+ filtered_link = filter("<a href='#{repository_upload_folder}/a.test'>Link</a>", project_wiki: wiki).children[0]
+
+ expect(filtered_link.attribute('href').value).to eq("#{wiki.wiki_base_path}/#{repository_upload_folder}/a.test")
+ end
+ end
+
+ context 'with "img" html tag' do
+ let(:path) { "#{wiki.wiki_base_path}/#{repository_upload_folder}/a.jpg" }
+
+ context 'inside an "a" html tag' do
+ it 'rewrites links' do
+ filtered_elements = filter("<a href='#{repository_upload_folder}/a.jpg'><img src='#{repository_upload_folder}/a.jpg'>example</img></a>", project_wiki: wiki)
+
+ expect(filtered_elements.search('img').first.attribute('src').value).to eq(path)
+ expect(filtered_elements.search('a').first.attribute('href').value).to eq(path)
+ end
+ end
+
+ context 'outside an "a" html tag' do
+ it 'rewrites links' do
+ filtered_link = filter("<img src='#{repository_upload_folder}/a.jpg'>example</img>", project_wiki: wiki).children[0]
+
+ expect(filtered_link.attribute('src').value).to eq(path)
+ end
+ end
+ end
+
+ context 'with "video" html tag' do
+ it 'rewrites links' do
+ filtered_link = filter("<video src='#{repository_upload_folder}/a.mp4'></video>", project_wiki: wiki).children[0]
+
+ expect(filtered_link.attribute('src').value).to eq("#{wiki.wiki_base_path}/#{repository_upload_folder}/a.mp4")
+ end
+ end
+ end
+
describe "invalid links" do
invalid_links = ["http://:8080", "http://", "http://:8080/path"]
diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
index 88ae4c1e07a..52b8c9be647 100644
--- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
@@ -121,6 +121,13 @@ describe Banzai::Pipeline::WikiPipeline do
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/page\"")
end
+ it 'rewrites non-file links (with spaces) to be at the scope of the wiki root' do
+ markdown = "[Link to Page](page slug)"
+ output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+
+ expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/page%20slug\"")
+ end
+
it "rewrites file links to be at the scope of the current directory" do
markdown = "[Link to Page](page.md)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
@@ -134,6 +141,13 @@ describe Banzai::Pipeline::WikiPipeline do
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/start-page#title\"")
end
+
+ it 'rewrites links (with spaces) with anchor' do
+ markdown = '[Link to Header](start page#title)'
+ output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+
+ expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/start%20page#title\"")
+ end
end
describe "when creating root links" do
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 8bb5a843484..48c0ba8a653 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -119,6 +119,10 @@ describe Feature do
expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
end
+ it 'returns true for undefined feature with default_enabled' do
+ expect(described_class.enabled?(:some_random_feature_flag, default_enabled: true)).to be_truthy
+ end
+
it 'returns false for existing disabled feature in the database' do
described_class.disable(:disabled_feature_flag)
@@ -160,6 +164,10 @@ describe Feature do
expect(described_class.disabled?(:some_random_feature_flag)).to be_truthy
end
+ it 'returns false for undefined feature with default_enabled' do
+ expect(described_class.disabled?(:some_random_feature_flag, default_enabled: true)).to be_falsey
+ end
+
it 'returns true for existing disabled feature in the database' do
described_class.disable(:disabled_feature_flag)
diff --git a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
new file mode 100644
index 00000000000..2d1505dacfe
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
@@ -0,0 +1,156 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts, :migration, schema: 20180816161409 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:pipelines) { table(:ci_pipelines) }
+ let(:jobs) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+
+ subject { described_class.new.perform(*range) }
+
+ context 'when a pipeline exists' do
+ let!(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let!(:project) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
+ let!(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
+
+ context 'when a legacy artifacts exists' do
+ let(:artifacts_expire_at) { 1.day.since.to_s }
+ let(:file_store) { ::ObjectStorage::Store::REMOTE }
+
+ let!(:job) do
+ jobs.create!(
+ commit_id: pipeline.id,
+ project_id: project.id,
+ status: :success,
+ **artifacts_archive_attributes,
+ **artifacts_metadata_attributes)
+ end
+
+ let(:artifacts_archive_attributes) do
+ {
+ artifacts_file: 'archive.zip',
+ artifacts_file_store: file_store,
+ artifacts_size: 123,
+ artifacts_expire_at: artifacts_expire_at
+ }
+ end
+
+ let(:artifacts_metadata_attributes) do
+ {
+ artifacts_metadata: 'metadata.gz',
+ artifacts_metadata_store: file_store
+ }
+ end
+
+ it 'has legacy artifacts' do
+ expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([artifacts_archive_attributes.values])
+ expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([artifacts_metadata_attributes.values])
+ end
+
+ it 'does not have new artifacts yet' do
+ expect(job_artifacts.count).to be_zero
+ end
+
+ context 'when the record exists inside of the range of a background migration' do
+ let(:range) { [job.id, job.id] }
+
+ it 'migrates a legacy artifact to ci_job_artifacts table' do
+ expect { subject }.to change { job_artifacts.count }.by(2)
+
+ expect(job_artifacts.order(:id).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
+ .to eq([[project.id,
+ job.id,
+ described_class::ARCHIVE_FILE_TYPE,
+ file_store,
+ artifacts_archive_attributes[:artifacts_size],
+ artifacts_expire_at,
+ 'archive.zip',
+ nil,
+ described_class::LEGACY_PATH_FILE_LOCATION],
+ [project.id,
+ job.id,
+ described_class::METADATA_FILE_TYPE,
+ file_store,
+ nil,
+ artifacts_expire_at,
+ 'metadata.gz',
+ nil,
+ described_class::LEGACY_PATH_FILE_LOCATION]])
+
+ expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, artifacts_expire_at]])
+ expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
+ end
+
+ context 'when file_store is nil' do
+ let(:file_store) { nil }
+
+ it 'has nullified file_store in all legacy artifacts' do
+ expect(jobs.pluck('artifacts_file_store, artifacts_metadata_store')).to eq([[nil, nil]])
+ end
+
+ it 'fills file_store by the value of local file store' do
+ subject
+
+ expect(job_artifacts.pluck('file_store')).to all(eq(::ObjectStorage::Store::LOCAL))
+ end
+ end
+
+ context 'when new artifacts has already existed' do
+ context 'when only archive.zip existed' do
+ before do
+ job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
+ end
+
+ it 'had archive.zip already' do
+ expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE)).to be_truthy
+ end
+
+ it 'migrates metadata' do
+ expect { subject }.to change { job_artifacts.count }.by(1)
+
+ expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::METADATA_FILE_TYPE)).to be_truthy
+ end
+ end
+
+ context 'when both archive and metadata existed' do
+ before do
+ job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
+ job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::METADATA_FILE_TYPE, size: 999, file: 'metadata.zip')
+ end
+
+ it 'does not migrate' do
+ expect { subject }.not_to change { job_artifacts.count }
+ end
+ end
+ end
+ end
+
+ context 'when the record exists outside of the range of a background migration' do
+ let(:range) { [job.id + 1, job.id + 1] }
+
+ it 'does not migrate' do
+ expect { subject }.not_to change { job_artifacts.count }
+ end
+ end
+ end
+
+ context 'when the job does not have legacy artifacts' do
+ let!(:job) { jobs.create!(commit_id: pipeline.id, project_id: project.id, status: :success) }
+
+ it 'does not have the legacy artifacts in database' do
+ expect(jobs.count).to eq(1)
+ expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, nil]])
+ expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
+ end
+
+ context 'when the record exists inside of the range of a background migration' do
+ let(:range) { [job.id, job.id] }
+
+ it 'does not migrate' do
+ expect { subject }.not_to change { job_artifacts.count }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 6769f64f950..2c9758401b7 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -1,4 +1,5 @@
-require 'spec_helper'
+require 'fast_spec_helper'
+require_dependency 'active_model'
describe Gitlab::Ci::Config::Entry::Job do
let(:entry) { described_class.new(config, name: :rspec) }
@@ -81,6 +82,15 @@ describe Gitlab::Ci::Config::Entry::Job do
end
end
+ context 'when extends key is not a string' do
+ let(:config) { { extends: 123 } }
+
+ it 'returns error about wrong value type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include "job extends should be a string"
+ end
+ end
+
context 'when retry value is not correct' do
context 'when it is not a numeric value' do
let(:config) { { retry: true } }
@@ -124,6 +134,8 @@ describe Gitlab::Ci::Config::Entry::Job do
describe '#relevant?' do
it 'is a relevant entry' do
+ entry = described_class.new({ script: 'rspec' }, name: :rspec)
+
expect(entry).to be_relevant
end
end
diff --git a/spec/lib/gitlab/ci/config/extendable/entry_spec.rb b/spec/lib/gitlab/ci/config/extendable/entry_spec.rb
new file mode 100644
index 00000000000..0a148375d11
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/extendable/entry_spec.rb
@@ -0,0 +1,227 @@
+require 'fast_spec_helper'
+
+describe Gitlab::Ci::Config::Extendable::Entry do
+ describe '.new' do
+ context 'when entry key is not included in the context hash' do
+ it 'raises error' do
+ expect { described_class.new(:test, something: 'something') }
+ .to raise_error StandardError, 'Invalid entry key!'
+ end
+ end
+ end
+
+ describe '#value' do
+ it 'reads a hash value from the context' do
+ entry = described_class.new(:test, test: 'something')
+
+ expect(entry.value).to eq 'something'
+ end
+ end
+
+ describe '#extensible?' do
+ context 'when entry has inheritance defined' do
+ it 'is extensible' do
+ entry = described_class.new(:test, test: { extends: 'something' })
+
+ expect(entry).to be_extensible
+ end
+ end
+
+ context 'when entry does not have inheritance specified' do
+ it 'is not extensible' do
+ entry = described_class.new(:test, test: { script: 'something' })
+
+ expect(entry).not_to be_extensible
+ end
+ end
+
+ context 'when entry value is not a hash' do
+ it 'is not extensible' do
+ entry = described_class.new(:test, test: 'something')
+
+ expect(entry).not_to be_extensible
+ end
+ end
+ end
+
+ describe '#extends_key' do
+ context 'when entry is extensible' do
+ it 'returns symbolized extends key value' do
+ entry = described_class.new(:test, test: { extends: 'something' })
+
+ expect(entry.extends_key).to eq :something
+ end
+ end
+
+ context 'when entry is not extensible' do
+ it 'returns nil' do
+ entry = described_class.new(:test, test: 'something')
+
+ expect(entry.extends_key).to be_nil
+ end
+ end
+ end
+
+ describe '#ancestors' do
+ let(:parent) do
+ described_class.new(:test, test: { extends: 'something' })
+ end
+
+ let(:child) do
+ described_class.new(:job, { job: { script: 'something' } }, parent)
+ end
+
+ it 'returns ancestors keys' do
+ expect(child.ancestors).to eq [:test]
+ end
+ end
+
+ describe '#base_hash!' do
+ subject { described_class.new(:test, hash) }
+
+ context 'when base hash is not extensible' do
+ let(:hash) do
+ {
+ template: { script: 'rspec' },
+ test: { extends: 'template' }
+ }
+ end
+
+ it 'returns unchanged base hash' do
+ expect(subject.base_hash!).to eq(script: 'rspec')
+ end
+ end
+
+ context 'when base hash is extensible too' do
+ let(:hash) do
+ {
+ first: { script: 'rspec' },
+ second: { extends: 'first' },
+ test: { extends: 'second' }
+ }
+ end
+
+ it 'extends the base hash first' do
+ expect(subject.base_hash!).to eq(extends: 'first', script: 'rspec')
+ end
+
+ it 'mutates original context' do
+ subject.base_hash!
+
+ expect(hash.fetch(:second)).to eq(extends: 'first', script: 'rspec')
+ end
+ end
+ end
+
+ describe '#extend!' do
+ subject { described_class.new(:test, hash) }
+
+ context 'when extending a non-hash value' do
+ let(:hash) do
+ {
+ first: 'my value',
+ test: { extends: 'first' }
+ }
+ end
+
+ it 'raises an error' do
+ expect { subject.extend! }
+ .to raise_error(described_class::InvalidExtensionError,
+ /invalid base hash/)
+ end
+ end
+
+ context 'when extending unknown key' do
+ let(:hash) do
+ { test: { extends: 'something' } }
+ end
+
+ it 'raises an error' do
+ expect { subject.extend! }
+ .to raise_error(described_class::InvalidExtensionError,
+ /unknown key/)
+ end
+ end
+
+ context 'when extending a hash correctly' do
+ let(:hash) do
+ {
+ first: { script: 'my value' },
+ second: { extends: 'first' },
+ test: { extends: 'second' }
+ }
+ end
+
+ let(:result) do
+ {
+ first: { script: 'my value' },
+ second: { extends: 'first', script: 'my value' },
+ test: { extends: 'second', script: 'my value' }
+ }
+ end
+
+ it 'returns extended part of the hash' do
+ expect(subject.extend!).to eq result[:test]
+ end
+
+ it 'mutates original context' do
+ subject.extend!
+
+ expect(hash).to eq result
+ end
+ end
+
+ context 'when hash is not extensible' do
+ let(:hash) do
+ {
+ first: { script: 'my value' },
+ second: { extends: 'first' },
+ test: { value: 'something' }
+ }
+ end
+
+ it 'returns original key value' do
+ expect(subject.extend!).to eq(value: 'something')
+ end
+
+ it 'does not mutate orignal context' do
+ original = hash.deep_dup
+
+ subject.extend!
+
+ expect(hash).to eq original
+ end
+ end
+
+ context 'when circular depenency gets detected' do
+ let(:hash) do
+ { test: { extends: 'test' } }
+ end
+
+ it 'raises an error' do
+ expect { subject.extend! }
+ .to raise_error(described_class::CircularDependencyError,
+ /circular dependency detected/)
+ end
+ end
+
+ context 'when nesting level is too deep' do
+ before do
+ stub_const("#{described_class}::MAX_NESTING_LEVELS", 0)
+ end
+
+ let(:hash) do
+ {
+ first: { script: 'my value' },
+ second: { extends: 'first' },
+ test: { extends: 'second' }
+ }
+ end
+
+ it 'raises an error' do
+ expect { subject.extend! }
+ .to raise_error(described_class::NestingTooDeepError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/extendable_spec.rb b/spec/lib/gitlab/ci/config/extendable_spec.rb
new file mode 100644
index 00000000000..90213f6603d
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/extendable_spec.rb
@@ -0,0 +1,228 @@
+require 'fast_spec_helper'
+
+describe Gitlab::Ci::Config::Extendable do
+ subject { described_class.new(hash) }
+
+ describe '#each' do
+ context 'when there is extendable entry in the hash' do
+ let(:test) do
+ { extends: 'something', only: %w[master] }
+ end
+
+ let(:hash) do
+ { something: { script: 'ls' }, test: test }
+ end
+
+ it 'yields control' do
+ expect { |b| subject.each(&b) }.to yield_control
+ end
+ end
+ end
+
+ describe '#to_hash' do
+ context 'when hash does not contain extensions' do
+ let(:hash) do
+ {
+ test: { script: 'test' },
+ production: {
+ script: 'deploy',
+ only: { variables: %w[$SOMETHING] }
+ }
+ }
+ end
+
+ it 'does not modify the hash' do
+ expect(subject.to_hash).to eq hash
+ end
+ end
+
+ context 'when hash has a single simple extension' do
+ let(:hash) do
+ {
+ something: {
+ script: 'deploy',
+ only: { variables: %w[$SOMETHING] }
+ },
+
+ test: {
+ extends: 'something',
+ script: 'ls',
+ only: { refs: %w[master] }
+ }
+ }
+ end
+
+ it 'extends a hash with a deep reverse merge' do
+ expect(subject.to_hash).to eq(
+ something: {
+ script: 'deploy',
+ only: { variables: %w[$SOMETHING] }
+ },
+
+ test: {
+ extends: 'something',
+ script: 'ls',
+ only: {
+ refs: %w[master],
+ variables: %w[$SOMETHING]
+ }
+ }
+ )
+ end
+ end
+
+ context 'when a hash uses recursive extensions' do
+ let(:hash) do
+ {
+ test: {
+ extends: 'something',
+ script: 'ls',
+ only: { refs: %w[master] }
+ },
+
+ build: {
+ extends: 'something',
+ stage: 'build'
+ },
+
+ deploy: {
+ stage: 'deploy',
+ extends: '.first'
+ },
+
+ something: {
+ extends: '.first',
+ script: 'exec',
+ only: { variables: %w[$SOMETHING] }
+ },
+
+ '.first': {
+ script: 'run',
+ only: { kubernetes: 'active' }
+ }
+ }
+ end
+
+ it 'extends a hash with a deep reverse merge' do
+ expect(subject.to_hash).to eq(
+ '.first': {
+ script: 'run',
+ only: { kubernetes: 'active' }
+ },
+
+ something: {
+ extends: '.first',
+ script: 'exec',
+ only: {
+ kubernetes: 'active',
+ variables: %w[$SOMETHING]
+ }
+ },
+
+ deploy: {
+ script: 'run',
+ stage: 'deploy',
+ only: { kubernetes: 'active' },
+ extends: '.first'
+ },
+
+ build: {
+ extends: 'something',
+ script: 'exec',
+ stage: 'build',
+ only: {
+ kubernetes: 'active',
+ variables: %w[$SOMETHING]
+ }
+ },
+
+ test: {
+ extends: 'something',
+ script: 'ls',
+ only: {
+ refs: %w[master],
+ variables: %w[$SOMETHING],
+ kubernetes: 'active'
+ }
+ }
+ )
+ end
+ end
+
+ context 'when nested circular dependecy has been detected' do
+ let(:hash) do
+ {
+ test: {
+ extends: 'something',
+ script: 'ls',
+ only: { refs: %w[master] }
+ },
+
+ something: {
+ extends: '.first',
+ script: 'deploy',
+ only: { variables: %w[$SOMETHING] }
+ },
+
+ '.first': {
+ extends: 'something',
+ script: 'run',
+ only: { kubernetes: 'active' }
+ }
+ }
+ end
+
+ it 'raises an error about circular dependency' do
+ expect { subject.to_hash }
+ .to raise_error(described_class::Entry::CircularDependencyError)
+ end
+ end
+
+ context 'when circular dependecy to self has been detected' do
+ let(:hash) do
+ {
+ test: {
+ extends: 'test',
+ script: 'ls',
+ only: { refs: %w[master] }
+ }
+ }
+ end
+
+ it 'raises an error about circular dependency' do
+ expect { subject.to_hash }
+ .to raise_error(described_class::Entry::CircularDependencyError)
+ end
+ end
+
+ context 'when invalid extends value is specified' do
+ let(:hash) do
+ { something: { extends: 1, script: 'ls' } }
+ end
+
+ it 'raises an error about invalid extension' do
+ expect { subject.to_hash }
+ .to raise_error(described_class::Entry::InvalidExtensionError)
+ end
+ end
+
+ context 'when extensible entry has non-hash inheritance defined' do
+ let(:hash) do
+ {
+ test: {
+ extends: 'something',
+ script: 'ls',
+ only: { refs: %w[master] }
+ },
+
+ something: 'some text'
+ }
+ end
+
+ it 'raises an error about invalid base' do
+ expect { subject.to_hash }
+ .to raise_error(described_class::Entry::InvalidExtensionError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 2e204da307d..5a78ce783dd 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -1,4 +1,6 @@
-require 'spec_helper'
+require 'fast_spec_helper'
+
+require_dependency 'active_model'
describe Gitlab::Ci::Config do
let(:config) do
@@ -42,6 +44,36 @@ describe Gitlab::Ci::Config do
end
end
+ context 'when using extendable hash' do
+ let(:yml) do
+ <<-EOS
+ image: ruby:2.2
+
+ rspec:
+ script: rspec
+
+ test:
+ extends: rspec
+ image: ruby:alpine
+ EOS
+ end
+
+ it 'correctly extends the hash' do
+ hash = {
+ image: 'ruby:2.2',
+ rspec: { script: 'rspec' },
+ test: {
+ extends: 'rspec',
+ image: 'ruby:alpine',
+ script: 'rspec'
+ }
+ }
+
+ expect(config).to be_valid
+ expect(config.to_hash).to eq hash
+ end
+ end
+
context 'when config is invalid' do
context 'when yml is incorrect' do
let(:yml) { '// invalid' }
@@ -49,7 +81,7 @@ describe Gitlab::Ci::Config do
describe '.new' do
it 'raises error' do
expect { config }.to raise_error(
- ::Gitlab::Ci::Config::Loader::FormatError,
+ described_class::ConfigError,
/Invalid configuration format/
)
end
@@ -75,5 +107,21 @@ describe Gitlab::Ci::Config do
end
end
end
+
+ context 'when invalid extended hash has been provided' do
+ let(:yml) do
+ <<-EOS
+ test:
+ extends: test
+ script: rspec
+ EOS
+ end
+
+ it 'raises an error' do
+ expect { config }.to raise_error(
+ described_class::ConfigError, /circular dependency detected/
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/junit_spec.rb b/spec/lib/gitlab/ci/parsers/junit_spec.rb
index f7ec86f5385..47497f06229 100644
--- a/spec/lib/gitlab/ci/parsers/junit_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/junit_spec.rb
@@ -1,4 +1,4 @@
-require 'spec_helper'
+require 'fast_spec_helper'
describe Gitlab::Ci::Parsers::Junit do
describe '#parse!' do
@@ -8,21 +8,35 @@ describe Gitlab::Ci::Parsers::Junit do
let(:test_cases) { flattened_test_cases(test_suite) }
context 'when data is JUnit style XML' do
- context 'when there are no test cases' do
+ context 'when there are no <testcases> in <testsuite>' do
let(:junit) do
<<-EOF.strip_heredoc
<testsuite></testsuite>
EOF
end
- it 'raises an error and does not add any test cases' do
- expect { subject }.to raise_error(described_class::JunitParserError)
+ it 'ignores the case' do
+ expect { subject }.not_to raise_error
+
+ expect(test_cases.count).to eq(0)
+ end
+ end
+
+ context 'when there are no <testcases> in <testsuites>' do
+ let(:junit) do
+ <<-EOF.strip_heredoc
+ <testsuites><testsuite /></testsuites>
+ EOF
+ end
+
+ it 'ignores the case' do
+ expect { subject }.not_to raise_error
expect(test_cases.count).to eq(0)
end
end
- context 'when there is a test case' do
+ context 'when there is only one <testcase> in <testsuite>' do
let(:junit) do
<<-EOF.strip_heredoc
<testsuite>
@@ -40,6 +54,46 @@ describe Gitlab::Ci::Parsers::Junit do
end
end
+ context 'when there is only one <testsuite> in <testsuites>' do
+ let(:junit) do
+ <<-EOF.strip_heredoc
+ <testsuites>
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
+ </testsuite>
+ </testsuites>
+ EOF
+ end
+
+ it 'parses XML and adds a test case to a suite' do
+ expect { subject }.not_to raise_error
+
+ expect(test_cases[0].classname).to eq('Calculator')
+ expect(test_cases[0].name).to eq('sumTest1')
+ expect(test_cases[0].execution_time).to eq(0.01)
+ end
+ end
+
+ context 'PHPUnit' do
+ let(:junit) do
+ <<-EOF.strip_heredoc
+ <testsuites>
+ <testsuite name="Project Test Suite" tests="1" assertions="1" failures="0" errors="0" time="1.376748">
+ <testsuite name="XXX\\FrontEnd\\WebBundle\\Tests\\Controller\\LogControllerTest" file="/Users/mcfedr/projects/xxx/server/tests/XXX/FrontEnd/WebBundle/Tests/Controller/LogControllerTest.php" tests="1" assertions="1" failures="0" errors="0" time="1.376748">
+ <testcase name="testIndexAction" class="XXX\\FrontEnd\\WebBundle\\Tests\\Controller\\LogControllerTest" file="/Users/mcfedr/projects/xxx/server/tests/XXX/FrontEnd/WebBundle/Tests/Controller/LogControllerTest.php" line="9" assertions="1" time="1.376748"/>
+ </testsuite>
+ </testsuite>
+ </testsuites>
+ EOF
+ end
+
+ it 'parses XML and adds a test case to a suite' do
+ expect { subject }.not_to raise_error
+
+ expect(test_cases.count).to eq(1)
+ end
+ end
+
context 'when there are two test cases' do
let(:junit) do
<<-EOF.strip_heredoc
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index e73cdc54a15..fcbdf71a4e9 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -562,6 +562,58 @@ module Gitlab
end
end
+ context 'when using `extends`' do
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+
+ subject { config_processor.builds.first }
+
+ context 'when using simple `extends`' do
+ let(:config) do
+ <<~YAML
+ .template:
+ script: test
+
+ rspec:
+ extends: .template
+ image: ruby:alpine
+ YAML
+ end
+
+ it 'correctly extends rspec job' do
+ expect(config_processor.builds).to be_one
+ expect(subject.dig(:commands)).to eq 'test'
+ expect(subject.dig(:options, :image, :name)).to eq 'ruby:alpine'
+ end
+ end
+
+ context 'when using recursive `extends`' do
+ let(:config) do
+ <<~YAML
+ rspec:
+ extends: .test
+ script: rspec
+ when: always
+
+ .template:
+ before_script:
+ - bundle install
+
+ .test:
+ extends: .template
+ script: test
+ image: image:test
+ YAML
+ end
+
+ it 'correctly extends rspec job' do
+ expect(config_processor.builds).to be_one
+ expect(subject.dig(:commands)).to eq "bundle install\nrspec"
+ expect(subject.dig(:options, :image, :name)).to eq 'image:test'
+ expect(subject.dig(:when)).to eq 'always'
+ end
+ end
+ end
+
describe "When" do
%w(on_success on_failure always).each do |when_state|
it "returns #{when_state} when defined" do
@@ -1309,6 +1361,14 @@ module Gitlab
.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:only variables invalid expression syntax')
end
+
+ it 'returns errors if extended hash configuration is invalid' do
+ config = YAML.dump({ rspec: { extends: 'something', script: 'test' } })
+
+ expect { Gitlab::Ci::YamlProcessor.new(config) }
+ .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'rspec: unknown key in `extends`')
+ end
end
describe "Validate configuration templates" do
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index 154ab4b3856..1d75e8cb5da 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Email::Handler::CreateIssueHandler do
diff --git a/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
index 43c6280f251..ace3104f36f 100644
--- a/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Email::Handler::CreateMergeRequestHandler do
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index 950a7dd7d6c..b1f48c15c21 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Email::Handler::CreateNoteHandler do
diff --git a/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb b/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
index ce160e11de2..b8660b133ec 100644
--- a/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Email::Handler::UnsubscribeHandler do
diff --git a/spec/lib/gitlab/email/handler_spec.rb b/spec/lib/gitlab/email/handler_spec.rb
index cedbfcc0d18..c651765dc0f 100644
--- a/spec/lib/gitlab/email/handler_spec.rb
+++ b/spec/lib/gitlab/email/handler_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Email::Handler do
@@ -40,7 +42,7 @@ describe Gitlab::Email::Handler do
end
def ce_handlers
- @ce_handlers ||= Gitlab::Email::Handler::HANDLERS.reject do |handler|
+ @ce_handlers ||= Gitlab::Email::Handler.handlers.reject do |handler|
handler.name.start_with?('Gitlab::Email::Handler::EE::')
end
end
diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb
index 8e524f9b05a..9e351368b22 100644
--- a/spec/lib/gitlab/file_detector_spec.rb
+++ b/spec/lib/gitlab/file_detector_spec.rb
@@ -29,11 +29,15 @@ describe Gitlab::FileDetector do
end
it 'returns the type of a license file' do
- %w(LICENSE LICENCE COPYING).each do |file|
+ %w(LICENSE LICENCE COPYING UNLICENSE UNLICENCE).each do |file|
expect(described_class.type_of(file)).to eq(:license)
end
end
+ it 'returns nil for an UNCOPYING file' do
+ expect(described_class.type_of('UNCOPYING')).to be_nil
+ end
+
it 'returns the type of a version file' do
expect(described_class.type_of('VERSION')).to eq(:version)
end
diff --git a/spec/lib/gitlab/file_markdown_link_builder_spec.rb b/spec/lib/gitlab/file_markdown_link_builder_spec.rb
new file mode 100644
index 00000000000..feb2776c5d0
--- /dev/null
+++ b/spec/lib/gitlab/file_markdown_link_builder_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+require 'rails_helper'
+
+describe Gitlab::FileMarkdownLinkBuilder do
+ let(:custom_class) do
+ Class.new do
+ include Gitlab::FileMarkdownLinkBuilder
+ end.new
+ end
+
+ before do
+ allow(custom_class).to receive(:filename).and_return(filename)
+ end
+
+ describe 'markdown_link' do
+ let(:url) { "/uploads/#{filename}"}
+
+ before do
+ allow(custom_class).to receive(:secure_url).and_return(url)
+ end
+
+ context 'when file name has the character ]' do
+ let(:filename) { 'd]k.png' }
+
+ it 'escapes the character' do
+ expect(custom_class.markdown_link).to eq '![d\\]k](/uploads/d]k.png)'
+ end
+ end
+
+ context 'when file is an image or video' do
+ let(:filename) { 'dk.png' }
+
+ it 'returns preview markdown link' do
+ expect(custom_class.markdown_link).to eq '![dk](/uploads/dk.png)'
+ end
+ end
+
+ context 'when file is not an image or video' do
+ let(:filename) { 'dk.zip' }
+
+ it 'returns markdown link' do
+ expect(custom_class.markdown_link).to eq '[dk.zip](/uploads/dk.zip)'
+ end
+ end
+
+ context 'when file name is blank' do
+ let(:filename) { nil }
+
+ it 'returns nil' do
+ expect(custom_class.markdown_link).to eq nil
+ end
+ end
+ end
+
+ describe 'mardown_name' do
+ context 'when file is an image or video' do
+ let(:filename) { 'dk.png' }
+
+ it 'retrieves the name without the extension' do
+ expect(custom_class.markdown_name).to eq 'dk'
+ end
+ end
+
+ context 'when file is not an image or video' do
+ let(:filename) { 'dk.zip' }
+
+ it 'retrieves the name with the extesion' do
+ expect(custom_class.markdown_name).to eq 'dk.zip'
+ end
+ end
+
+ context 'when file name is blank' do
+ let(:filename) { nil }
+
+ it 'returns nil' do
+ expect(custom_class.markdown_name).to eq nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/file_type_detection_spec.rb b/spec/lib/gitlab/file_type_detection_spec.rb
new file mode 100644
index 00000000000..5e9b8988cc8
--- /dev/null
+++ b/spec/lib/gitlab/file_type_detection_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+require 'rails_helper'
+
+describe Gitlab::FileTypeDetection do
+ def upload_fixture(filename)
+ fixture_file_upload(File.join('spec', 'fixtures', filename))
+ end
+
+ describe '#image_or_video?' do
+ context 'when class is an uploader' do
+ let(:uploader) do
+ example_uploader = Class.new(CarrierWave::Uploader::Base) do
+ include Gitlab::FileTypeDetection
+
+ storage :file
+ end
+
+ example_uploader.new
+ end
+
+ it 'returns true for an image file' do
+ uploader.store!(upload_fixture('dk.png'))
+
+ expect(uploader).to be_image_or_video
+ end
+
+ it 'returns true for a video file' do
+ uploader.store!(upload_fixture('video_sample.mp4'))
+
+ expect(uploader).to be_image_or_video
+ end
+
+ it 'returns false for other extensions' do
+ uploader.store!(upload_fixture('doc_sample.txt'))
+
+ expect(uploader).not_to be_image_or_video
+ end
+
+ it 'returns false if filename is blank' do
+ uploader.store!(upload_fixture('dk.png'))
+
+ allow(uploader).to receive(:filename).and_return(nil)
+
+ expect(uploader).not_to be_image_or_video
+ end
+ end
+
+ context 'when class is a regular class' do
+ let(:custom_class) do
+ custom_class = Class.new do
+ include Gitlab::FileTypeDetection
+ end
+
+ custom_class.new
+ end
+
+ it 'returns true for an image file' do
+ allow(custom_class).to receive(:filename).and_return('dk.png')
+
+ expect(custom_class).to be_image_or_video
+ end
+
+ it 'returns true for a video file' do
+ allow(custom_class).to receive(:filename).and_return('video_sample.mp4')
+
+ expect(custom_class).to be_image_or_video
+ end
+
+ it 'returns false for other extensions' do
+ allow(custom_class).to receive(:filename).and_return('doc_sample.txt')
+
+ expect(custom_class).not_to be_image_or_video
+ end
+
+ it 'returns false if filename is blank' do
+ allow(custom_class).to receive(:filename).and_return(nil)
+
+ expect(custom_class).not_to be_image_or_video
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 0a1e3eb83d3..579f175c4a8 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -416,6 +416,7 @@ ProjectHook:
- type
- service_id
- push_events
+- push_events_branch_filter
- issues_events
- merge_requests_events
- tag_push_events
diff --git a/spec/lib/gitlab/kubernetes/cluster_role_binding_spec.rb b/spec/lib/gitlab/kubernetes/cluster_role_binding_spec.rb
new file mode 100644
index 00000000000..4a669408025
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/cluster_role_binding_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::ClusterRoleBinding do
+ let(:cluster_role_binding) { described_class.new(name, cluster_role_name, subjects) }
+ let(:name) { 'cluster-role-binding-name' }
+ let(:cluster_role_name) { 'cluster-admin' }
+
+ let(:subjects) { [{ kind: 'ServiceAccount', name: 'sa', namespace: 'ns' }] }
+
+ describe '#generate' do
+ let(:role_ref) do
+ {
+ apiGroup: 'rbac.authorization.k8s.io',
+ kind: 'ClusterRole',
+ name: cluster_role_name
+ }
+ end
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ metadata: { name: name },
+ roleRef: role_ref,
+ subjects: subjects
+ )
+ end
+
+ subject { cluster_role_binding.generate }
+
+ it 'should build a Kubeclient Resource' do
+ is_expected.to eq(resource)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
index 341f71a3e49..25c3b37753d 100644
--- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
@@ -5,9 +5,18 @@ describe Gitlab::Kubernetes::Helm::Api do
let(:helm) { described_class.new(client) }
let(:gitlab_namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
let(:namespace) { Gitlab::Kubernetes::Namespace.new(gitlab_namespace, client) }
- let(:application) { create(:clusters_applications_prometheus) }
-
- let(:command) { application.install_command }
+ let(:application_name) { 'app-name' }
+ let(:rbac) { false }
+ let(:files) { {} }
+
+ let(:command) do
+ Gitlab::Kubernetes::Helm::InstallCommand.new(
+ name: application_name,
+ chart: 'chart-name',
+ rbac: rbac,
+ files: files
+ )
+ end
subject { helm }
@@ -28,6 +37,8 @@ describe Gitlab::Kubernetes::Helm::Api do
before do
allow(client).to receive(:create_pod).and_return(nil)
allow(client).to receive(:create_config_map).and_return(nil)
+ allow(client).to receive(:create_service_account).and_return(nil)
+ allow(client).to receive(:create_cluster_role_binding).and_return(nil)
allow(namespace).to receive(:ensure_exists!).once
end
@@ -39,7 +50,7 @@ describe Gitlab::Kubernetes::Helm::Api do
end
context 'with a ConfigMap' do
- let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application.name, application.files).generate }
+ let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application_name, files).generate }
it 'creates a ConfigMap on kubeclient' do
expect(client).to receive(:create_config_map).with(resource).once
@@ -47,6 +58,96 @@ describe Gitlab::Kubernetes::Helm::Api do
subject.install(command)
end
end
+
+ context 'without a service account' do
+ it 'does not create a service account on kubeclient' do
+ expect(client).not_to receive(:create_service_account)
+ expect(client).not_to receive(:create_cluster_role_binding)
+
+ subject.install(command)
+ end
+ end
+
+ context 'with a service account' do
+ let(:command) { Gitlab::Kubernetes::Helm::InitCommand.new(name: application_name, files: files, rbac: rbac) }
+
+ context 'rbac-enabled cluster' do
+ let(:rbac) { true }
+
+ let(:service_account_resource) do
+ Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' })
+ end
+
+ let(:cluster_role_binding_resource) do
+ Kubeclient::Resource.new(
+ metadata: { name: 'tiller-admin' },
+ roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' },
+ subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }]
+ )
+ end
+
+ context 'service account and cluster role binding does not exist' do
+ before do
+ expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil))
+ expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil))
+ end
+
+ it 'creates a service account, followed the cluster role binding on kubeclient' do
+ expect(client).to receive(:create_service_account).with(service_account_resource).once.ordered
+ expect(client).to receive(:create_cluster_role_binding).with(cluster_role_binding_resource).once.ordered
+
+ subject.install(command)
+ end
+ end
+
+ context 'service account already exists' do
+ before do
+ expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_return(service_account_resource)
+ expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil))
+ end
+
+ it 'updates the service account, followed by creating the cluster role binding' do
+ expect(client).to receive(:update_service_account).with(service_account_resource).once.ordered
+ expect(client).to receive(:create_cluster_role_binding).with(cluster_role_binding_resource).once.ordered
+
+ subject.install(command)
+ end
+ end
+
+ context 'service account and cluster role binding already exists' do
+ before do
+ expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_return(service_account_resource)
+ expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_return(cluster_role_binding_resource)
+ end
+
+ it 'updates the service account, followed by creating the cluster role binding' do
+ expect(client).to receive(:update_service_account).with(service_account_resource).once.ordered
+ expect(client).to receive(:update_cluster_role_binding).with(cluster_role_binding_resource).once.ordered
+
+ subject.install(command)
+ end
+ end
+
+ context 'a non-404 error is thrown' do
+ before do
+ expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ end
+
+ it 'raises an error' do
+ expect { subject.install(command) }.to raise_error(Kubeclient::HttpError)
+ end
+ end
+ end
+
+ context 'legacy abac cluster' do
+ it 'does not create a service account on kubeclient' do
+ expect(client).not_to receive(:create_service_account)
+ expect(client).not_to receive(:create_cluster_role_binding)
+
+ subject.install(command)
+ end
+ end
+ end
end
describe '#status' do
diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
index d50616e95e8..aacae78be43 100644
--- a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
@@ -2,14 +2,24 @@ require 'spec_helper'
describe Gitlab::Kubernetes::Helm::BaseCommand do
let(:application) { create(:clusters_applications_helm) }
+ let(:rbac) { false }
+
let(:test_class) do
Class.new do
include Gitlab::Kubernetes::Helm::BaseCommand
+ def initialize(rbac)
+ @rbac = rbac
+ end
+
def name
"test-class-name"
end
+ def rbac?
+ @rbac
+ end
+
def files
{
some: 'value'
@@ -19,7 +29,7 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do
end
let(:base_command) do
- test_class.new
+ test_class.new(rbac)
end
subject { base_command }
@@ -34,6 +44,14 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do
it 'should returns a kubeclient resoure with pod content for application' do
is_expected.to be_an_instance_of ::Kubeclient::Resource
end
+
+ context 'when rbac is true' do
+ let(:rbac) { true }
+
+ it 'also returns a kubeclient resource' do
+ is_expected.to be_an_instance_of ::Kubeclient::Resource
+ end
+ end
end
describe '#pod_name' do
diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
index dcbc046cf00..72dc1817936 100644
--- a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
@@ -2,9 +2,135 @@ require 'spec_helper'
describe Gitlab::Kubernetes::Helm::InitCommand do
let(:application) { create(:clusters_applications_helm) }
- let(:commands) { 'helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem >/dev/null' }
+ let(:rbac) { false }
+ let(:files) { {} }
+ let(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac) }
- subject { described_class.new(name: application.name, files: {}) }
+ let(:commands) do
+ <<~EOS
+ helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem >/dev/null
+ EOS
+ end
+
+ subject { init_command }
it_behaves_like 'helm commands'
+
+ context 'on a rbac-enabled cluster' do
+ let(:rbac) { true }
+
+ it_behaves_like 'helm commands' do
+ let(:commands) do
+ <<~EOS
+ helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem --service-account tiller >/dev/null
+ EOS
+ end
+ end
+ end
+
+ describe '#rbac?' do
+ subject { init_command.rbac? }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#config_map_resource' do
+ let(:metadata) do
+ {
+ name: 'values-content-configuration-helm',
+ namespace: 'gitlab-managed-apps',
+ labels: { name: 'values-content-configuration-helm' }
+ }
+ end
+
+ let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: files) }
+
+ subject { init_command.config_map_resource }
+
+ it 'returns a KubeClient resource with config map content for the application' do
+ is_expected.to eq(resource)
+ end
+ end
+
+ describe '#pod_resource' do
+ subject { init_command.pod_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a pod that uses the tiller serviceAccountName' do
+ expect(subject.spec.serviceAccountName).to eq('tiller')
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates a pod that uses the default serviceAccountName' do
+ expect(subject.spec.serviceAcccountName).to be_nil
+ end
+ end
+ end
+
+ describe '#service_account_resource' do
+ let(:resource) do
+ Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' })
+ end
+
+ subject { init_command.service_account_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a Kubeclient resource for the tiller ServiceAccount' do
+ is_expected.to eq(resource)
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#cluster_role_binding_resource' do
+ let(:resource) do
+ Kubeclient::Resource.new(
+ metadata: { name: 'tiller-admin' },
+ roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' },
+ subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }]
+ )
+ end
+
+ subject { init_command.cluster_role_binding_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a Kubeclient resource for the ClusterRoleBinding for tiller' do
+ is_expected.to eq(resource)
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
index 982e2f41043..f28941ce58f 100644
--- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
@@ -3,14 +3,17 @@ require 'rails_helper'
describe Gitlab::Kubernetes::Helm::InstallCommand do
let(:files) { { 'ca.pem': 'some file content' } }
let(:repository) { 'https://repository.example.com' }
+ let(:rbac) { false }
let(:version) { '1.2.3' }
let(:install_command) do
described_class.new(
name: 'app-name',
chart: 'chart-name',
+ rbac: rbac,
files: files,
- version: version, repository: repository
+ version: version,
+ repository: repository
)
end
@@ -21,19 +24,76 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
<<~EOS
helm init --client-only >/dev/null
helm repo add app-name https://repository.example.com
- helm install chart-name --name app-name --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
+ #{helm_install_comand}
+ EOS
+ end
+
+ let(:helm_install_comand) do
+ <<~EOS.squish
+ helm install chart-name
+ --name app-name
+ --tls
+ --tls-ca-cert /data/helm/app-name/config/ca.pem
+ --tls-cert /data/helm/app-name/config/cert.pem
+ --tls-key /data/helm/app-name/config/key.pem
+ --version 1.2.3
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml >/dev/null
EOS
end
end
+ context 'when rbac is true' do
+ let(:rbac) { true }
+
+ it_behaves_like 'helm commands' do
+ let(:commands) do
+ <<~EOS
+ helm init --client-only >/dev/null
+ helm repo add app-name https://repository.example.com
+ #{helm_install_command}
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm install chart-name
+ --name app-name
+ --tls
+ --tls-ca-cert /data/helm/app-name/config/ca.pem
+ --tls-cert /data/helm/app-name/config/cert.pem
+ --tls-key /data/helm/app-name/config/key.pem
+ --version 1.2.3
+ --set rbac.create\\=true,rbac.enabled\\=true
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml >/dev/null
+ EOS
+ end
+ end
+ end
+
context 'when there is no repository' do
let(:repository) { nil }
it_behaves_like 'helm commands' do
let(:commands) do
<<~EOS
- helm init --client-only >/dev/null
- helm install chart-name --name app-name --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
+ helm init --client-only >/dev/null
+ #{helm_install_command}
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm install chart-name
+ --name app-name
+ --tls
+ --tls-ca-cert /data/helm/app-name/config/ca.pem
+ --tls-cert /data/helm/app-name/config/cert.pem
+ --tls-key /data/helm/app-name/config/key.pem
+ --version 1.2.3
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml >/dev/null
EOS
end
end
@@ -45,9 +105,19 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
it_behaves_like 'helm commands' do
let(:commands) do
<<~EOS
- helm init --client-only >/dev/null
- helm repo add app-name https://repository.example.com
- helm install chart-name --name app-name --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
+ helm init --client-only >/dev/null
+ helm repo add app-name https://repository.example.com
+ #{helm_install_command}
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm install chart-name
+ --name app-name
+ --version 1.2.3
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml >/dev/null
EOS
end
end
@@ -59,14 +129,63 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
it_behaves_like 'helm commands' do
let(:commands) do
<<~EOS
- helm init --client-only >/dev/null
- helm repo add app-name https://repository.example.com
- helm install chart-name --name app-name --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
+ helm init --client-only >/dev/null
+ helm repo add app-name https://repository.example.com
+ #{helm_install_command}
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm install chart-name
+ --name app-name
+ --tls
+ --tls-ca-cert /data/helm/app-name/config/ca.pem
+ --tls-cert /data/helm/app-name/config/cert.pem
+ --tls-key /data/helm/app-name/config/key.pem
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml >/dev/null
EOS
end
end
end
+ describe '#rbac?' do
+ subject { install_command.rbac? }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#pod_resource' do
+ subject { install_command.pod_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a pod that uses the tiller serviceAccountName' do
+ expect(subject.spec.serviceAccountName).to eq('tiller')
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates a pod that uses the default serviceAccountName' do
+ expect(subject.spec.serviceAcccountName).to be_nil
+ end
+ end
+ end
+
describe '#config_map_resource' do
let(:metadata) do
{
@@ -84,4 +203,20 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
is_expected.to eq(resource)
end
end
+
+ describe '#service_account_resource' do
+ subject { install_command.service_account_resource }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
+
+ describe '#cluster_role_binding_resource' do
+ subject { install_command.cluster_role_binding_resource }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
index ec64193c0b2..b333b334f36 100644
--- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
@@ -5,8 +5,9 @@ describe Gitlab::Kubernetes::Helm::Pod do
let(:app) { create(:clusters_applications_prometheus) }
let(:command) { app.install_command }
let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
+ let(:service_account_name) { nil }
- subject { described_class.new(command, namespace) }
+ subject { described_class.new(command, namespace, service_account_name: service_account_name) }
context 'with a command' do
it 'should generate a Kubeclient::Resource' do
@@ -58,6 +59,20 @@ describe Gitlab::Kubernetes::Helm::Pod do
expect(volume.configMap['items'].first['key']).to eq(:'values.yaml')
expect(volume.configMap['items'].first['path']).to eq(:'values.yaml')
end
+
+ it 'should have no serviceAccountName' do
+ spec = subject.generate.spec
+ expect(spec.serviceAccountName).to be_nil
+ end
+
+ context 'with a service_account_name' do
+ let(:service_account_name) { 'sa' }
+
+ it 'should use the serviceAccountName provided' do
+ spec = subject.generate.spec
+ expect(spec.serviceAccountName).to eq(service_account_name)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
new file mode 100644
index 00000000000..9146729d139
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -0,0 +1,247 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::KubeClient do
+ include KubernetesHelpers
+
+ let(:api_url) { 'https://kubernetes.example.com/prefix' }
+ let(:api_groups) { ['api', 'apis/rbac.authorization.k8s.io'] }
+ let(:api_version) { 'v1' }
+ let(:kubeclient_options) { { auth_options: { bearer_token: 'xyz' } } }
+
+ let(:client) { described_class.new(api_url, api_groups, api_version, kubeclient_options) }
+
+ before do
+ stub_kubeclient_discover(api_url)
+ end
+
+ describe '#hashed_clients' do
+ subject { client.hashed_clients }
+
+ it 'has keys from api groups' do
+ expect(subject.keys).to match_array api_groups
+ end
+
+ it 'has values of Kubeclient::Client' do
+ expect(subject.values).to all(be_an_instance_of Kubeclient::Client)
+ end
+ end
+
+ describe '#clients' do
+ subject { client.clients }
+
+ it 'is not empty' do
+ is_expected.to be_present
+ end
+
+ it 'is an array of Kubeclient::Client objects' do
+ is_expected.to all(be_an_instance_of Kubeclient::Client)
+ end
+
+ it 'has each API group url' do
+ expected_urls = api_groups.map { |group| "#{api_url}/#{group}" }
+
+ expect(subject.map(&:api_endpoint).map(&:to_s)).to match_array(expected_urls)
+ end
+
+ it 'has the kubeclient options' do
+ subject.each do |client|
+ expect(client.auth_options).to eq({ bearer_token: 'xyz' })
+ end
+ end
+
+ it 'has the api_version' do
+ subject.each do |client|
+ expect(client.instance_variable_get(:@api_version)).to eq('v1')
+ end
+ end
+ end
+
+ describe '#core_client' do
+ subject { client.core_client }
+
+ it 'is a Kubeclient::Client' do
+ is_expected.to be_an_instance_of Kubeclient::Client
+ end
+
+ it 'has the core API endpoint' do
+ expect(subject.api_endpoint.to_s).to match(%r{\/api\Z})
+ end
+ end
+
+ describe '#rbac_client' do
+ subject { client.rbac_client }
+
+ it 'is a Kubeclient::Client' do
+ is_expected.to be_an_instance_of Kubeclient::Client
+ end
+
+ it 'has the RBAC API group endpoint' do
+ expect(subject.api_endpoint.to_s).to match(%r{\/apis\/rbac.authorization.k8s.io\Z})
+ end
+ end
+
+ describe '#extensions_client' do
+ subject { client.extensions_client }
+
+ let(:api_groups) { ['apis/extensions'] }
+
+ it 'is a Kubeclient::Client' do
+ is_expected.to be_an_instance_of Kubeclient::Client
+ end
+
+ it 'has the extensions API group endpoint' do
+ expect(subject.api_endpoint.to_s).to match(%r{\/apis\/extensions\Z})
+ end
+ end
+
+ describe '#discover!' do
+ it 'makes a discovery request for each API group' do
+ client.discover!
+
+ api_groups.each do |api_group|
+ discovery_url = api_url + '/' + api_group + '/v1'
+ expect(WebMock).to have_requested(:get, discovery_url).once
+ end
+ end
+ end
+
+ describe 'core API' do
+ let(:core_client) { client.core_client }
+
+ [
+ :get_pods,
+ :get_secrets,
+ :get_config_map,
+ :get_pod,
+ :get_namespace,
+ :get_service,
+ :get_service_account,
+ :delete_pod,
+ :create_config_map,
+ :create_namespace,
+ :create_pod,
+ :create_service_account,
+ :update_config_map,
+ :update_service_account
+ ].each do |method|
+ describe "##{method}" do
+ it 'delegates to the core client' do
+ expect(client).to delegate_method(method).to(:core_client)
+ end
+
+ it 'responds to the method' do
+ expect(client).to respond_to method
+ end
+ end
+ end
+ end
+
+ describe 'rbac API group' do
+ let(:rbac_client) { client.rbac_client }
+
+ [
+ :create_cluster_role_binding,
+ :get_cluster_role_binding,
+ :update_cluster_role_binding
+ ].each do |method|
+ describe "##{method}" do
+ it 'delegates to the rbac client' do
+ expect(client).to delegate_method(method).to(:rbac_client)
+ end
+
+ it 'responds to the method' do
+ expect(client).to respond_to method
+ end
+
+ context 'no rbac client' do
+ let(:api_groups) { ['api'] }
+
+ it 'throws an error' do
+ expect { client.public_send(method) }.to raise_error(Module::DelegationError)
+ end
+ end
+ end
+ end
+ end
+
+ describe 'extensions API group' do
+ let(:api_groups) { ['apis/extensions'] }
+ let(:api_version) { 'v1beta1' }
+ let(:extensions_client) { client.extensions_client }
+
+ describe '#get_deployments' do
+ it 'delegates to the extensions client' do
+ expect(client).to delegate_method(:get_deployments).to(:extensions_client)
+ end
+
+ it 'responds to the method' do
+ expect(client).to respond_to :get_deployments
+ end
+
+ context 'no extensions client' do
+ let(:api_groups) { ['api'] }
+ let(:api_version) { 'v1' }
+
+ it 'throws an error' do
+ expect { client.get_deployments }.to raise_error(Module::DelegationError)
+ end
+ end
+ end
+ end
+
+ describe 'non-entity methods' do
+ it 'does not proxy for non-entity methods' do
+ expect(client.clients.first).to respond_to :proxy_url
+
+ expect(client).not_to respond_to :proxy_url
+ end
+
+ it 'throws an error' do
+ expect { client.proxy_url }.to raise_error(NoMethodError)
+ end
+ end
+
+ describe '#get_pod_log' do
+ let(:core_client) { client.core_client }
+
+ it 'is delegated to the core client' do
+ expect(client).to delegate_method(:get_pod_log).to(:core_client)
+ end
+
+ context 'when no core client' do
+ let(:api_groups) { ['apis/extensions'] }
+
+ it 'throws an error' do
+ expect { client.get_pod_log('pod-name') }.to raise_error(Module::DelegationError)
+ end
+ end
+ end
+
+ describe '#watch_pod_log' do
+ let(:core_client) { client.core_client }
+
+ it 'is delegated to the core client' do
+ expect(client).to delegate_method(:watch_pod_log).to(:core_client)
+ end
+
+ context 'when no core client' do
+ let(:api_groups) { ['apis/extensions'] }
+
+ it 'throws an error' do
+ expect { client.watch_pod_log('pod-name') }.to raise_error(Module::DelegationError)
+ end
+ end
+ end
+
+ describe 'methods that do not exist on any client' do
+ it 'throws an error' do
+ expect { client.non_existent_method }.to raise_error(NoMethodError)
+ end
+
+ it 'returns false for respond_to' do
+ expect(client.respond_to?(:non_existent_method)).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/service_account_spec.rb b/spec/lib/gitlab/kubernetes/service_account_spec.rb
new file mode 100644
index 00000000000..8da9e932dc3
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/service_account_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::ServiceAccount do
+ let(:name) { 'a_service_account' }
+ let(:namespace_name) { 'a_namespace' }
+ let(:service_account) { described_class.new(name, namespace_name) }
+
+ it { expect(service_account.name).to eq(name) }
+ it { expect(service_account.namespace_name).to eq(namespace_name) }
+
+ describe '#generate' do
+ let(:resource) do
+ ::Kubeclient::Resource.new(metadata: { name: name, namespace: namespace_name })
+ end
+
+ subject { service_account.generate }
+
+ it 'should build a Kubeclient Resource' do
+ is_expected.to eq(resource)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index 4e7bd433a9c..ee6d6fc961f 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -42,6 +42,65 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do
subscriber.sql(event)
end
+
+ context 'events are internal to Rails or irrelevant' do
+ let(:schema_event) do
+ double(
+ :event,
+ name: 'sql.active_record',
+ payload: {
+ sql: "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass",
+ name: 'SCHEMA',
+ connection_id: 135,
+ statement_name: nil,
+ binds: []
+ },
+ duration: 0.7
+ )
+ end
+
+ let(:begin_event) do
+ double(
+ :event,
+ name: 'sql.active_record',
+ payload: {
+ sql: "BEGIN",
+ name: nil,
+ connection_id: 231,
+ statement_name: nil,
+ binds: []
+ },
+ duration: 1.1
+ )
+ end
+
+ let(:commit_event) do
+ double(
+ :event,
+ name: 'sql.active_record',
+ payload: {
+ sql: "COMMIT",
+ name: nil,
+ connection_id: 212,
+ statement_name: nil,
+ binds: []
+ },
+ duration: 1.6
+ )
+ end
+
+ it 'skips schema/begin/commit sql commands' do
+ expect(subscriber).to receive(:current_transaction)
+ .at_least(:once)
+ .and_return(transaction)
+
+ expect(transaction).not_to receive(:increment)
+
+ subscriber.sql(schema_event)
+ subscriber.sql(begin_event)
+ subscriber.sql(commit_event)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index de6dd2a9fea..1ec1fe10744 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -166,4 +166,20 @@ describe Gitlab::UsageData do
expect(subject[:recorded_at]).to be_a(Time)
end
end
+
+ describe '#count' do
+ let(:relation) { double(:relation) }
+
+ it 'returns the count when counting succeeds' do
+ allow(relation).to receive(:count).and_return(1)
+
+ expect(described_class.count(relation)).to eq(1)
+ end
+
+ it 'returns the fallback value when counting fails' do
+ allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect(described_class.count(relation, fallback: 15)).to eq(15)
+ end
+ end
end
diff --git a/spec/lib/gitlab/user_extractor_spec.rb b/spec/lib/gitlab/user_extractor_spec.rb
new file mode 100644
index 00000000000..fcc05ab3a0c
--- /dev/null
+++ b/spec/lib/gitlab/user_extractor_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::UserExtractor do
+ let(:text) do
+ <<~TXT
+ This is a long texth that mentions some users.
+ @user-1, @user-2 and user@gitlab.org take a walk in the park.
+ There they meet @user-4 that was out with other-user@gitlab.org.
+ @user-1 thought it was late, so went home straight away
+ TXT
+ end
+ subject(:extractor) { described_class.new(text) }
+
+ describe '#users' do
+ it 'returns an empty relation when nil was passed' do
+ extractor = described_class.new(nil)
+
+ expect(extractor.users).to be_empty
+ expect(extractor.users).to be_a(ActiveRecord::Relation)
+ end
+
+ it 'returns the user case insensitive for usernames' do
+ user = create(:user, username: "USER-4")
+
+ expect(extractor.users).to include(user)
+ end
+
+ it 'returns users by primary email' do
+ user = create(:user, email: 'user@gitlab.org')
+
+ expect(extractor.users).to include(user)
+ end
+
+ it 'returns users by secondary email' do
+ user = create(:email, email: 'other-user@gitlab.org').user
+
+ expect(extractor.users).to include(user)
+ end
+ end
+
+ describe '#matches' do
+ it 'includes all mentioned email adresses' do
+ expect(extractor.matches[:emails]).to contain_exactly('user@gitlab.org', 'other-user@gitlab.org')
+ end
+
+ it 'includes all mentioned usernames' do
+ expect(extractor.matches[:usernames]).to contain_exactly('user-1', 'user-2', 'user-4')
+ end
+ end
+
+ describe '#references' do
+ it 'includes all user-references once' do
+ expect(extractor.references).to contain_exactly('user-1', 'user-2', 'user@gitlab.org', 'user-4', 'other-user@gitlab.org')
+ end
+ end
+end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index e0569218d78..9c308cc1be9 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -61,6 +61,8 @@ describe ObjectStorage::DirectUpload do
expect(subject[:GetURL]).to start_with(storage_url)
expect(subject[:StoreURL]).to start_with(storage_url)
expect(subject[:DeleteURL]).to start_with(storage_url)
+ expect(subject[:CustomPutHeaders]).to be_truthy
+ expect(subject[:PutHeaders]).to eq({})
end
end
diff --git a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
new file mode 100644
index 00000000000..df82672f254
--- /dev/null
+++ b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
@@ -0,0 +1,73 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20180816161409_migrate_legacy_artifacts_to_job_artifacts.rb')
+
+describe MigrateLegacyArtifactsToJobArtifacts, :migration, :sidekiq do
+ let(:migration_class) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts }
+ let(:migration_name) { migration_class.to_s.demodulize }
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:pipelines) { table(:ci_pipelines) }
+ let(:jobs) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
+ let(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
+ let(:archive_file_type) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::ARCHIVE_FILE_TYPE }
+ let(:metadata_file_type) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::METADATA_FILE_TYPE }
+ let(:local_store) { ::ObjectStorage::Store::LOCAL }
+ let(:remote_store) { ::ObjectStorage::Store::REMOTE }
+ let(:legacy_location) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::LEGACY_PATH_FILE_LOCATION }
+
+ context 'when legacy artifacts exist' do
+ before do
+ jobs.create!(id: 1, commit_id: pipeline.id, project_id: project.id, status: :success, artifacts_file: 'archive.zip')
+ jobs.create!(id: 2, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_metadata: 'metadata.gz')
+ jobs.create!(id: 3, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_file: 'archive.zip', artifacts_metadata: 'metadata.gz')
+ jobs.create!(id: 4, commit_id: pipeline.id, project_id: project.id, status: :running)
+ jobs.create!(id: 5, commit_id: pipeline.id, project_id: project.id, status: :success, artifacts_file: 'archive.zip', artifacts_file_store: remote_store, artifacts_metadata: 'metadata.gz')
+ jobs.create!(id: 6, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_file: 'archive.zip', artifacts_metadata: 'metadata.gz')
+ end
+
+ it 'schedules a background migration' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(migration_name).to be_scheduled_delayed_migration(5.minutes, 1, 6)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 1
+ end
+ end
+ end
+
+ it 'migrates legacy artifacts to ci_job_artifacts table' do
+ migrate!
+
+ expect(job_artifacts.order(:job_id, :file_type).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
+ .to eq([[project.id, 1, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 3, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 3, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location],
+ [project.id, 5, archive_file_type, remote_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 5, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location],
+ [project.id, 6, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 6, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location]])
+ end
+ end
+
+ context 'when legacy artifacts do not exist' do
+ before do
+ jobs.create!(id: 1, commit_id: pipeline.id, project_id: project.id, status: :success)
+ jobs.create!(id: 2, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_metadata: 'metadata.gz')
+ end
+
+ it 'does not schedule background migrations' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq 0
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb
index e5b2bdc8a4e..2c37cd20ecc 100644
--- a/spec/models/clusters/applications/helm_spec.rb
+++ b/spec/models/clusters/applications/helm_spec.rb
@@ -47,5 +47,19 @@ describe Clusters::Applications::Helm do
cert = OpenSSL::X509::Certificate.new(subject.files[:'cert.pem'])
expect(cert.not_after).to be > 999.years.from_now
end
+
+ describe 'rbac' do
+ context 'non rbac cluster' do
+ it { expect(subject).not_to be_rbac }
+ end
+
+ context 'rbac cluster' do
+ before do
+ helm.cluster.platform_kubernetes.rbac!
+ end
+
+ it { expect(subject).to be_rbac }
+ end
+ end
end
end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index 21f75ced8c3..c55953c8d22 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -88,9 +88,18 @@ describe Clusters::Applications::Ingress do
expect(subject.name).to eq('ingress')
expect(subject.chart).to eq('stable/nginx-ingress')
expect(subject.version).to eq('0.23.0')
+ expect(subject).not_to be_rbac
expect(subject.files).to eq(ingress.files)
end
+ context 'on a rbac enabled cluster' do
+ before do
+ ingress.cluster.platform_kubernetes.rbac!
+ end
+
+ it { is_expected.to be_rbac }
+ end
+
context 'application failed to install previously' do
let(:ingress) { create(:clusters_applications_ingress, :errored, version: 'nginx') }
diff --git a/spec/models/clusters/applications/jupyter_spec.rb b/spec/models/clusters/applications/jupyter_spec.rb
index 027b732681b..591a01d78a9 100644
--- a/spec/models/clusters/applications/jupyter_spec.rb
+++ b/spec/models/clusters/applications/jupyter_spec.rb
@@ -51,10 +51,19 @@ describe Clusters::Applications::Jupyter do
expect(subject.name).to eq('jupyter')
expect(subject.chart).to eq('jupyter/jupyterhub')
expect(subject.version).to eq('v0.6')
+ expect(subject).not_to be_rbac
expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/')
expect(subject.files).to eq(jupyter.files)
end
+ context 'on a rbac enabled cluster' do
+ before do
+ jupyter.cluster.platform_kubernetes.rbac!
+ end
+
+ it { is_expected.to be_rbac }
+ end
+
context 'application failed to install previously' do
let(:jupyter) { create(:clusters_applications_jupyter, :errored, version: '0.0.1') }
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 26b75c75e1d..f34b4ece8db 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -1,6 +1,8 @@
require 'rails_helper'
describe Clusters::Applications::Prometheus do
+ include KubernetesHelpers
+
include_examples 'cluster application core specs', :clusters_applications_prometheus
include_examples 'cluster application status specs', :cluster_application_prometheus
@@ -107,26 +109,14 @@ describe Clusters::Applications::Prometheus do
end
context 'cluster has kubeclient' do
- let(:kubernetes_url) { 'http://example.com' }
- let(:k8s_discover_response) do
- {
- resources: [
- {
- name: 'service',
- kind: 'Service'
- }
- ]
- }
- end
-
- let(:kube_client) { Kubeclient::Client.new(kubernetes_url) }
+ let(:kubernetes_url) { subject.cluster.platform_kubernetes.api_url }
+ let(:kube_client) { subject.cluster.kubeclient.core_client }
- let(:cluster) { create(:cluster) }
- subject { create(:clusters_applications_prometheus, cluster: cluster) }
+ subject { create(:clusters_applications_prometheus) }
before do
- allow(kube_client.rest_client).to receive(:get).and_return(k8s_discover_response.to_json)
- allow(subject.cluster).to receive(:kubeclient).and_return(kube_client)
+ subject.cluster.platform_kubernetes.namespace = 'a-namespace'
+ stub_kubeclient_discover(subject.cluster.platform_kubernetes.api_url)
end
it 'creates proxy prometheus rest client' do
@@ -134,7 +124,7 @@ describe Clusters::Applications::Prometheus do
end
it 'creates proper url' do
- expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80/proxy')
+ expect(subject.prometheus_client.url).to eq("#{kubernetes_url}/api/v1/namespaces/gitlab-managed-apps/services/prometheus-prometheus-server:80/proxy")
end
it 'copies options and headers from kube client to proxy client' do
@@ -164,9 +154,18 @@ describe Clusters::Applications::Prometheus do
expect(subject.name).to eq('prometheus')
expect(subject.chart).to eq('stable/prometheus')
expect(subject.version).to eq('6.7.3')
+ expect(subject).not_to be_rbac
expect(subject.files).to eq(prometheus.files)
end
+ context 'on a rbac enabled cluster' do
+ before do
+ prometheus.cluster.platform_kubernetes.rbac!
+ end
+
+ it { is_expected.to be_rbac }
+ end
+
context 'application failed to install previously' do
let(:prometheus) { create(:clusters_applications_prometheus, :errored, version: '2.0.0') }
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
index d84f125e246..eda8d519f60 100644
--- a/spec/models/clusters/applications/runner_spec.rb
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -46,10 +46,19 @@ describe Clusters::Applications::Runner do
expect(subject.name).to eq('runner')
expect(subject.chart).to eq('runner/gitlab-runner')
expect(subject.version).to eq('0.1.31')
+ expect(subject).not_to be_rbac
expect(subject.repository).to eq('https://charts.gitlab.io')
expect(subject.files).to eq(gitlab_runner.files)
end
+ context 'on a rbac enabled cluster' do
+ before do
+ gitlab_runner.cluster.platform_kubernetes.rbac!
+ end
+
+ it { is_expected.to be_rbac }
+ end
+
context 'application failed to install previously' do
let(:gitlab_runner) { create(:clusters_applications_runner, :errored, runner: ci_runner, version: '0.1.13') }
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 6f66515b45f..2727191eb9b 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -13,6 +13,10 @@ describe Clusters::Cluster do
it { is_expected.to delegate_method(:status_reason).to(:provider) }
it { is_expected.to delegate_method(:status_name).to(:provider) }
it { is_expected.to delegate_method(:on_creation?).to(:provider) }
+ it { is_expected.to delegate_method(:active?).to(:platform_kubernetes).with_prefix }
+ it { is_expected.to delegate_method(:rbac?).to(:platform_kubernetes).with_prefix }
+ it { is_expected.to delegate_method(:installed?).to(:application_helm).with_prefix }
+ it { is_expected.to delegate_method(:installed?).to(:application_ingress).with_prefix }
it { is_expected.to respond_to :project }
describe '.enabled' do
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index ab7f89f9bf4..66198d5ee2b 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -92,6 +92,30 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
end
end
+ describe '#kubeclient' do
+ subject { kubernetes.kubeclient }
+
+ let(:kubernetes) { build(:cluster_platform_kubernetes, :configured, namespace: 'a-namespace') }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::KubeClient) }
+ end
+
+ describe '#rbac?' do
+ subject { kubernetes.rbac? }
+
+ let(:kubernetes) { build(:cluster_platform_kubernetes, :configured) }
+
+ context 'when authorization type is rbac' do
+ let(:kubernetes) { build(:cluster_platform_kubernetes, :rbac_enabled, :configured) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when authorization type is nil' do
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#actual_namespace' do
subject { kubernetes.actual_namespace }
diff --git a/spec/models/concerns/avatarable_spec.rb b/spec/models/concerns/avatarable_spec.rb
index 76f734079b7..7d617cb7b19 100644
--- a/spec/models/concerns/avatarable_spec.rb
+++ b/spec/models/concerns/avatarable_spec.rb
@@ -12,6 +12,26 @@ describe Avatarable do
stub_config_setting(relative_url_root: relative_url_root)
end
+ describe '#update' do
+ let(:validator) { project._validators[:avatar].detect { |v| v.is_a?(FileSizeValidator) } }
+
+ context 'when avatar changed' do
+ it 'validates the file size' do
+ expect(validator).to receive(:validate_each).and_call_original
+
+ project.update(avatar: 'uploads/avatar.png')
+ end
+ end
+
+ context 'when avatar was not changed' do
+ it 'skips validation of file size' do
+ expect(validator).not_to receive(:validate_each)
+
+ project.update(name: 'Hello world')
+ end
+ end
+ end
+
describe '#avatar_path' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/models/concerns/case_sensitivity_spec.rb b/spec/models/concerns/case_sensitivity_spec.rb
index 5c0dfaeb4d3..1bf6c9b3404 100644
--- a/spec/models/concerns/case_sensitivity_spec.rb
+++ b/spec/models/concerns/case_sensitivity_spec.rb
@@ -3,186 +3,50 @@ require 'spec_helper'
describe CaseSensitivity do
describe '.iwhere' do
let(:connection) { ActiveRecord::Base.connection }
- let(:model) { Class.new { include CaseSensitivity } }
-
- describe 'using PostgreSQL' do
- before do
- allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
- allow(Gitlab::Database).to receive(:mysql?).and_return(false)
- end
-
- describe 'with a single column/value pair' do
- it 'returns the criteria for a column and a value' do
- criteria = double(:criteria)
-
- expect(connection).to receive(:quote_table_name)
- .with(:foo)
- .and_return('"foo"')
-
- expect(model).to receive(:where)
- .with(%q{LOWER("foo") = LOWER(:value)}, value: 'bar')
- .and_return(criteria)
-
- expect(model.iwhere(foo: 'bar')).to eq(criteria)
- end
-
- it 'returns the criteria for a column with a table, and a value' do
- criteria = double(:criteria)
-
- expect(connection).to receive(:quote_table_name)
- .with(:'foo.bar')
- .and_return('"foo"."bar"')
-
- expect(model).to receive(:where)
- .with(%q{LOWER("foo"."bar") = LOWER(:value)}, value: 'bar')
- .and_return(criteria)
-
- expect(model.iwhere('foo.bar'.to_sym => 'bar')).to eq(criteria)
- end
- end
-
- describe 'with multiple column/value pairs' do
- it 'returns the criteria for a column and a value' do
- initial = double(:criteria)
- final = double(:criteria)
-
- expect(connection).to receive(:quote_table_name)
- .with(:foo)
- .and_return('"foo"')
-
- expect(connection).to receive(:quote_table_name)
- .with(:bar)
- .and_return('"bar"')
-
- expect(model).to receive(:where)
- .with(%q{LOWER("foo") = LOWER(:value)}, value: 'bar')
- .and_return(initial)
-
- expect(initial).to receive(:where)
- .with(%q{LOWER("bar") = LOWER(:value)}, value: 'baz')
- .and_return(final)
-
- got = model.iwhere(foo: 'bar', bar: 'baz')
-
- expect(got).to eq(final)
- end
-
- it 'returns the criteria for a column with a table, and a value' do
- initial = double(:criteria)
- final = double(:criteria)
-
- expect(connection).to receive(:quote_table_name)
- .with(:'foo.bar')
- .and_return('"foo"."bar"')
-
- expect(connection).to receive(:quote_table_name)
- .with(:'foo.baz')
- .and_return('"foo"."baz"')
-
- expect(model).to receive(:where)
- .with(%q{LOWER("foo"."bar") = LOWER(:value)}, value: 'bar')
- .and_return(initial)
-
- expect(initial).to receive(:where)
- .with(%q{LOWER("foo"."baz") = LOWER(:value)}, value: 'baz')
- .and_return(final)
-
- got = model.iwhere('foo.bar'.to_sym => 'bar',
- 'foo.baz'.to_sym => 'baz')
-
- expect(got).to eq(final)
- end
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ include CaseSensitivity
+ self.table_name = 'namespaces'
end
end
- describe 'using MySQL' do
- before do
- allow(Gitlab::Database).to receive(:postgresql?).and_return(false)
- allow(Gitlab::Database).to receive(:mysql?).and_return(true)
- end
-
- describe 'with a single column/value pair' do
- it 'returns the criteria for a column and a value' do
- criteria = double(:criteria)
-
- expect(connection).to receive(:quote_table_name)
- .with(:foo)
- .and_return('`foo`')
-
- expect(model).to receive(:where)
- .with(%q{`foo` = :value}, value: 'bar')
- .and_return(criteria)
+ let!(:model_1) { model.create(path: 'mOdEl-1', name: 'mOdEl 1') }
+ let!(:model_2) { model.create(path: 'mOdEl-2', name: 'mOdEl 2') }
- expect(model.iwhere(foo: 'bar')).to eq(criteria)
- end
+ it 'finds a single instance by a single attribute regardless of case' do
+ expect(model.iwhere(path: 'MODEL-1')).to contain_exactly(model_1)
+ end
- it 'returns the criteria for a column with a table, and a value' do
- criteria = double(:criteria)
+ it 'finds multiple instances by a single attribute regardless of case' do
+ expect(model.iwhere(path: %w(MODEL-1 model-2))).to contain_exactly(model_1, model_2)
+ end
- expect(connection).to receive(:quote_table_name)
- .with(:'foo.bar')
- .and_return('`foo`.`bar`')
+ it 'finds instances by multiple attributes' do
+ expect(model.iwhere(path: %w(MODEL-1 model-2), name: 'model 1'))
+ .to contain_exactly(model_1)
+ end
- expect(model).to receive(:where)
- .with(%q{`foo`.`bar` = :value}, value: 'bar')
- .and_return(criteria)
+ # Using `mysql` & `postgresql` metadata-tags here because both adapters build
+ # the query slightly differently
+ context 'for MySQL', :mysql do
+ it 'builds a simple query' do
+ query = model.iwhere(path: %w(MODEL-1 model-2), name: 'model 1').to_sql
+ expected_query = <<~QRY.strip
+ SELECT `namespaces`.* FROM `namespaces` WHERE (`namespaces`.`path` IN ('MODEL-1', 'model-2')) AND (`namespaces`.`name` = 'model 1')
+ QRY
- expect(model.iwhere('foo.bar'.to_sym => 'bar'))
- .to eq(criteria)
- end
+ expect(query).to eq(expected_query)
end
+ end
- describe 'with multiple column/value pairs' do
- it 'returns the criteria for a column and a value' do
- initial = double(:criteria)
- final = double(:criteria)
-
- expect(connection).to receive(:quote_table_name)
- .with(:foo)
- .and_return('`foo`')
-
- expect(connection).to receive(:quote_table_name)
- .with(:bar)
- .and_return('`bar`')
-
- expect(model).to receive(:where)
- .with(%q{`foo` = :value}, value: 'bar')
- .and_return(initial)
-
- expect(initial).to receive(:where)
- .with(%q{`bar` = :value}, value: 'baz')
- .and_return(final)
-
- got = model.iwhere(foo: 'bar', bar: 'baz')
-
- expect(got).to eq(final)
- end
-
- it 'returns the criteria for a column with a table, and a value' do
- initial = double(:criteria)
- final = double(:criteria)
-
- expect(connection).to receive(:quote_table_name)
- .with(:'foo.bar')
- .and_return('`foo`.`bar`')
-
- expect(connection).to receive(:quote_table_name)
- .with(:'foo.baz')
- .and_return('`foo`.`baz`')
-
- expect(model).to receive(:where)
- .with(%q{`foo`.`bar` = :value}, value: 'bar')
- .and_return(initial)
-
- expect(initial).to receive(:where)
- .with(%q{`foo`.`baz` = :value}, value: 'baz')
- .and_return(final)
-
- got = model.iwhere('foo.bar'.to_sym => 'bar',
- 'foo.baz'.to_sym => 'baz')
+ context 'for PostgreSQL', :postgresql do
+ it 'builds a query using LOWER' do
+ query = model.iwhere(path: %w(MODEL-1 model-2), name: 'model 1').to_sql
+ expected_query = <<~QRY.strip
+ SELECT \"namespaces\".* FROM \"namespaces\" WHERE (LOWER(\"namespaces\".\"path\") IN (LOWER('MODEL-1'), LOWER('model-2'))) AND (LOWER(\"namespaces\".\"name\") = LOWER('model 1'))
+ QRY
- expect(got).to eq(final)
- end
+ expect(query).to eq(expected_query)
end
end
end
diff --git a/spec/models/concerns/triggerable_hooks_spec.rb b/spec/models/concerns/triggerable_hooks_spec.rb
index 621d2d38eae..265abd6bd72 100644
--- a/spec/models/concerns/triggerable_hooks_spec.rb
+++ b/spec/models/concerns/triggerable_hooks_spec.rb
@@ -40,4 +40,28 @@ RSpec.describe TriggerableHooks do
end
end
end
+
+ describe '.select_active' do
+ it 'returns hooks that match the active filter' do
+ TestableHook.create!(url: 'http://example1.com', push_events: true)
+ TestableHook.create!(url: 'http://example2.com', push_events: true)
+ filter1 = double(:filter1)
+ filter2 = double(:filter2)
+ allow(ActiveHookFilter).to receive(:new).exactly(2).times.and_return(filter1, filter2)
+ expect(filter1).to receive(:matches?).and_return(true)
+ expect(filter2).to receive(:matches?).and_return(false)
+
+ hooks = TestableHook.push_hooks.order_id_asc
+ expect(hooks.select_active(:push_hooks, {})).to eq [hooks.first]
+ end
+
+ it 'returns empty list if no hooks match the active filter' do
+ TestableHook.create!(url: 'http://example1.com', push_events: true)
+ filter = double(:filter)
+ allow(ActiveHookFilter).to receive(:new).and_return(filter)
+ expect(filter).to receive(:matches?).and_return(false)
+
+ expect(TestableHook.push_hooks.select_active(:push_hooks, {})).to eq []
+ end
+ end
end
diff --git a/spec/models/hooks/active_hook_filter_spec.rb b/spec/models/hooks/active_hook_filter_spec.rb
new file mode 100644
index 00000000000..df7edda2213
--- /dev/null
+++ b/spec/models/hooks/active_hook_filter_spec.rb
@@ -0,0 +1,68 @@
+require 'spec_helper'
+
+describe ActiveHookFilter do
+ subject(:filter) { described_class.new(hook) }
+
+ describe '#matches?' do
+ context 'for push event hooks' do
+ let(:hook) do
+ create(:project_hook, push_events: true, push_events_branch_filter: branch_filter)
+ end
+
+ context 'branch filter is specified' do
+ let(:branch_filter) { 'master' }
+
+ it 'returns true if branch matches' do
+ expect(filter.matches?(:push_hooks, { ref: 'refs/heads/master' })).to be true
+ end
+
+ it 'returns false if branch does not match' do
+ expect(filter.matches?(:push_hooks, { ref: 'refs/heads/my_branch' })).to be false
+ end
+
+ it 'returns false if ref is nil' do
+ expect(filter.matches?(:push_hooks, {})).to be false
+ end
+
+ context 'branch filter contains wildcard' do
+ let(:branch_filter) { 'features/*' }
+
+ it 'returns true if branch matches' do
+ expect(filter.matches?(:push_hooks, { ref: 'refs/heads/features/my-branch' })).to be true
+ expect(filter.matches?(:push_hooks, { ref: 'refs/heads/features/my-branch/something' })).to be true
+ end
+
+ it 'returns false if branch does not match' do
+ expect(filter.matches?(:push_hooks, { ref: 'refs/heads/master' })).to be false
+ end
+ end
+ end
+
+ context 'branch filter is not specified' do
+ let(:branch_filter) { nil }
+
+ it 'returns true' do
+ expect(filter.matches?(:push_hooks, { ref: 'refs/heads/master' })).to be true
+ end
+ end
+
+ context 'branch filter is empty string' do
+ let(:branch_filter) { '' }
+
+ it 'acts like branch is not specified' do
+ expect(filter.matches?(:push_hooks, { ref: 'refs/heads/master' })).to be true
+ end
+ end
+ end
+
+ context 'for non-push-events hooks' do
+ let(:hook) do
+ create(:project_hook, issues_events: true, push_events: false, push_events_branch_filter: '')
+ end
+
+ it 'returns true as branch filters are not yet supported for these' do
+ expect(filter.matches?(:issues_events, { ref: 'refs/heads/master' })).to be true
+ end
+ end
+ end
+end
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index ea6d6e53ef5..a4181631f01 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -35,6 +35,26 @@ describe WebHook do
it { is_expected.not_to allow_values("foo\nbar", "foo\r\nbar").for(:token) }
end
+
+ describe 'push_events_branch_filter' do
+ it { is_expected.to allow_values("good_branch_name", "another/good-branch_name").for(:push_events_branch_filter) }
+ it { is_expected.to allow_values("").for(:push_events_branch_filter) }
+ it { is_expected.not_to allow_values("bad branch name", "bad~branchname").for(:push_events_branch_filter) }
+
+ it 'gets rid of whitespace' do
+ hook.push_events_branch_filter = ' branch '
+ hook.save
+
+ expect(hook.push_events_branch_filter).to eq('branch')
+ end
+
+ it 'stores whitespace only as empty' do
+ hook.push_events_branch_filter = ' '
+ hook.save
+
+ expect(hook.push_events_branch_filter).to eq('')
+ end
+ end
end
describe 'execute' do
diff --git a/spec/models/project_services/chat_notification_service_spec.rb b/spec/models/project_services/chat_notification_service_spec.rb
index 3aa1039d8bf..46713df77da 100644
--- a/spec/models/project_services/chat_notification_service_spec.rb
+++ b/spec/models/project_services/chat_notification_service_spec.rb
@@ -26,4 +26,54 @@ describe ChatNotificationService do
end
end
end
+
+ describe '#execute' do
+ let(:chat_service) { described_class.new }
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:webhook_url) { 'https://example.gitlab.com/' }
+
+ before do
+ allow(chat_service).to receive_messages(
+ project: project,
+ project_id: project.id,
+ service_hook: true,
+ webhook: webhook_url
+ )
+
+ WebMock.stub_request(:post, webhook_url)
+
+ subject.active = true
+ end
+
+ context 'with a repository' do
+ it 'returns true' do
+ subject.project = project
+ data = Gitlab::DataBuilder::Push.build_sample(project, user)
+
+ expect(Slack::Notifier).to receive(:new)
+ .with(webhook_url, {})
+ .and_return(
+ double(:slack_service).as_null_object
+ )
+
+ expect(chat_service.execute(data)).to be true
+ end
+ end
+
+ context 'with an empty repository' do
+ it 'returns true' do
+ subject.project = create(:project, :empty_repo)
+ data = Gitlab::DataBuilder::Push.build_sample(subject.project, user)
+
+ expect(Slack::Notifier).to receive(:new)
+ .with(webhook_url, {})
+ .and_return(
+ double(:slack_service).as_null_object
+ )
+
+ expect(chat_service.execute(data)).to be true
+ end
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 7cfffbde42f..264632dba4b 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -3734,21 +3734,45 @@ describe Project do
end
describe '#execute_hooks' do
- it 'executes the projects hooks with the specified scope' do
- hook1 = create(:project_hook, merge_requests_events: true, tag_push_events: false)
- hook2 = create(:project_hook, merge_requests_events: false, tag_push_events: true)
- project = create(:project, hooks: [hook1, hook2])
+ let(:data) { { ref: 'refs/heads/master', data: 'data' } }
+ it 'executes active projects hooks with the specified scope' do
+ hook = create(:project_hook, merge_requests_events: false, push_events: true)
+ expect(ProjectHook).to receive(:select_active)
+ .with(:push_hooks, data)
+ .and_return([hook])
+ project = create(:project, hooks: [hook])
expect_any_instance_of(ProjectHook).to receive(:async_execute).once
- project.execute_hooks({}, :tag_push_hooks)
+ project.execute_hooks(data, :push_hooks)
+ end
+
+ it 'does not execute project hooks that dont match the specified scope' do
+ hook = create(:project_hook, merge_requests_events: true, push_events: false)
+ project = create(:project, hooks: [hook])
+
+ expect_any_instance_of(ProjectHook).not_to receive(:async_execute).once
+
+ project.execute_hooks(data, :push_hooks)
+ end
+
+ it 'does not execute project hooks which are not active' do
+ hook = create(:project_hook, push_events: true)
+ expect(ProjectHook).to receive(:select_active)
+ .with(:push_hooks, data)
+ .and_return([])
+ project = create(:project, hooks: [hook])
+
+ expect_any_instance_of(ProjectHook).not_to receive(:async_execute).once
+
+ project.execute_hooks(data, :push_hooks)
end
it 'executes the system hooks with the specified scope' do
- expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with({ data: 'data' }, :merge_request_hooks)
+ expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with(data, :merge_request_hooks)
project = build(:project)
- project.execute_hooks({ data: 'data' }, :merge_request_hooks)
+ project.execute_hooks(data, :merge_request_hooks)
end
it 'executes the system hooks when inside a transaction' do
@@ -3763,7 +3787,7 @@ describe Project do
# actually get to the `after_commit` hook that queues these jobs.
expect do
project.transaction do
- project.execute_hooks({ data: 'data' }, :merge_request_hooks)
+ project.execute_hooks(data, :merge_request_hooks)
end
end.not_to raise_error # Sidekiq::Worker::EnqueueFromTransactionError
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index fd99acb3bb2..2a7aff39240 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -405,6 +405,23 @@ describe User do
end
end
end
+
+ describe '.by_username' do
+ it 'finds users regardless of the case passed' do
+ user = create(:user, username: 'CaMeLcAsEd')
+ user2 = create(:user, username: 'UPPERCASE')
+
+ expect(described_class.by_username(%w(CAMELCASED uppercase)))
+ .to contain_exactly(user, user2)
+ end
+
+ it 'finds a single user regardless of the case passed' do
+ user = create(:user, username: 'CaMeLcAsEd')
+
+ expect(described_class.by_username('CAMELCASED'))
+ .to contain_exactly(user)
+ end
+ end
end
describe "Respond to" do
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 793b724bfca..93e85b3a6fa 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -112,6 +112,7 @@ describe IssuePolicy do
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project, assignees: [assignee], author: author) }
let(:issue_no_assignee) { create(:issue, project: project) }
+ let(:issue_locked) { create(:issue, project: project, discussion_locked: true, author: author, assignees: [assignee]) }
before do
project.add_guest(guest)
@@ -124,36 +125,49 @@ describe IssuePolicy do
it 'allows guests to read issues' do
expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid)
- expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue)
+ expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue)
expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
- expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
+ expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue)
+
+ expect(permissions(guest, issue_locked)).to be_allowed(:read_issue, :read_issue_iid)
+ expect(permissions(guest, issue_locked)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue)
end
- it 'allows reporters to read, update, and admin issues' do
- expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
- expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
+ it 'allows reporters to read, update, reopen, and admin issues' do
+ expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue)
+ expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue)
+ expect(permissions(reporter, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
+ expect(permissions(reporter, issue_locked)).to be_disallowed(:reopen_issue)
end
- it 'allows reporters from group links to read, update, and admin issues' do
- expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
- expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
+ it 'allows reporters from group links to read, update, reopen and admin issues' do
+ expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue)
+ expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue)
+ expect(permissions(reporter_from_group_link, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, issue_locked)).to be_disallowed(:reopen_issue)
end
- it 'allows issue authors to read and update their issues' do
- expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ it 'allows issue authors to read, reopen and update their issues' do
+ expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :reopen_issue)
expect(permissions(author, issue)).to be_disallowed(:admin_issue)
expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
- expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
+ expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue)
+
+ expect(permissions(author, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(author, issue_locked)).to be_disallowed(:admin_issue, :reopen_issue)
end
- it 'allows issue assignees to read and update their issues' do
- expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ it 'allows issue assignees to read, reopen and update their issues' do
+ expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :reopen_issue)
expect(permissions(assignee, issue)).to be_disallowed(:admin_issue)
expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
- expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
+ expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue)
+
+ expect(permissions(assignee, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(assignee, issue_locked)).to be_disallowed(:admin_issue, :reopen_issue)
end
context 'with confidential issues' do
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 01085dbcb49..d9fb27e101e 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -159,39 +159,76 @@ describe ProjectPresenter do
end
end
+ context 'statistics anchors (empty repo)' do
+ let(:project) { create(:project, :empty_repo) }
+ let(:presenter) { described_class.new(project, current_user: user) }
+
+ describe '#files_anchor_data' do
+ it 'returns files data' do
+ expect(presenter.files_anchor_data).to have_attributes(enabled: true,
+ label: 'Files (0 Bytes)',
+ link: nil)
+ end
+ end
+
+ describe '#commits_anchor_data' do
+ it 'returns commits data' do
+ expect(presenter.commits_anchor_data).to have_attributes(enabled: true,
+ label: 'Commits (0)',
+ link: nil)
+ end
+ end
+
+ describe '#branches_anchor_data' do
+ it 'returns branches data' do
+ expect(presenter.branches_anchor_data).to have_attributes(enabled: true,
+ label: "Branches (0)",
+ link: nil)
+ end
+ end
+
+ describe '#tags_anchor_data' do
+ it 'returns tags data' do
+ expect(presenter.tags_anchor_data).to have_attributes(enabled: true,
+ label: "Tags (0)",
+ link: nil)
+ end
+ end
+ end
+
context 'statistics anchors' do
let(:project) { create(:project, :repository) }
let(:presenter) { described_class.new(project, current_user: user) }
describe '#files_anchor_data' do
it 'returns files data' do
- expect(presenter.files_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Files (0 Bytes)',
- link: presenter.project_tree_path(project)))
+ expect(presenter.files_anchor_data).to have_attributes(enabled: true,
+ label: 'Files (0 Bytes)',
+ link: presenter.project_tree_path(project))
end
end
describe '#commits_anchor_data' do
it 'returns commits data' do
- expect(presenter.commits_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Commits (0)',
- link: presenter.project_commits_path(project, project.repository.root_ref)))
+ expect(presenter.commits_anchor_data).to have_attributes(enabled: true,
+ label: 'Commits (0)',
+ link: presenter.project_commits_path(project, project.repository.root_ref))
end
end
describe '#branches_anchor_data' do
it 'returns branches data' do
- expect(presenter.branches_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: "Branches (#{project.repository.branches.size})",
- link: presenter.project_branches_path(project)))
+ expect(presenter.branches_anchor_data).to have_attributes(enabled: true,
+ label: "Branches (#{project.repository.branches.size})",
+ link: presenter.project_branches_path(project))
end
end
describe '#tags_anchor_data' do
it 'returns tags data' do
- expect(presenter.tags_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: "Tags (#{project.repository.tags.size})",
- link: presenter.project_tags_path(project)))
+ expect(presenter.tags_anchor_data).to have_attributes(enabled: true,
+ label: "Tags (#{project.repository.tags.size})",
+ link: presenter.project_tags_path(project))
end
end
@@ -199,10 +236,10 @@ describe ProjectPresenter do
it 'returns new file data if user can push' do
project.add_developer(user)
- expect(presenter.new_file_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: "New file",
- link: presenter.project_new_blob_path(project, 'master'),
- class_modifier: 'new'))
+ expect(presenter.new_file_anchor_data).to have_attributes(enabled: false,
+ label: "New file",
+ link: presenter.project_new_blob_path(project, 'master'),
+ class_modifier: 'new')
end
it 'returns nil if user cannot push' do
@@ -227,9 +264,9 @@ describe ProjectPresenter do
project.add_developer(user)
allow(project.repository).to receive(:readme).and_return(nil)
- expect(presenter.readme_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: 'Add Readme',
- link: presenter.add_readme_path))
+ expect(presenter.readme_anchor_data).to have_attributes(enabled: false,
+ label: 'Add Readme',
+ link: presenter.add_readme_path)
end
end
@@ -237,9 +274,9 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:readme).and_return(double(name: 'readme'))
- expect(presenter.readme_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Readme',
- link: presenter.readme_path))
+ expect(presenter.readme_anchor_data).to have_attributes(enabled: true,
+ label: 'Readme',
+ link: presenter.readme_path)
end
end
end
@@ -250,9 +287,9 @@ describe ProjectPresenter do
project.add_developer(user)
allow(project.repository).to receive(:changelog).and_return(nil)
- expect(presenter.changelog_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: 'Add Changelog',
- link: presenter.add_changelog_path))
+ expect(presenter.changelog_anchor_data).to have_attributes(enabled: false,
+ label: 'Add Changelog',
+ link: presenter.add_changelog_path)
end
end
@@ -260,9 +297,9 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:changelog).and_return(double(name: 'foo'))
- expect(presenter.changelog_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Changelog',
- link: presenter.changelog_path))
+ expect(presenter.changelog_anchor_data).to have_attributes(enabled: true,
+ label: 'Changelog',
+ link: presenter.changelog_path)
end
end
end
@@ -273,9 +310,9 @@ describe ProjectPresenter do
project.add_developer(user)
allow(project.repository).to receive(:license_blob).and_return(nil)
- expect(presenter.license_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: 'Add License',
- link: presenter.add_license_path))
+ expect(presenter.license_anchor_data).to have_attributes(enabled: false,
+ label: 'Add license',
+ link: presenter.add_license_path)
end
end
@@ -283,9 +320,9 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:license_blob).and_return(double(name: 'foo'))
- expect(presenter.license_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: presenter.license_short_name,
- link: presenter.license_path))
+ expect(presenter.license_anchor_data).to have_attributes(enabled: true,
+ label: presenter.license_short_name,
+ link: presenter.license_path)
end
end
end
@@ -296,9 +333,9 @@ describe ProjectPresenter do
project.add_developer(user)
allow(project.repository).to receive(:contribution_guide).and_return(nil)
- expect(presenter.contribution_guide_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: 'Add Contribution guide',
- link: presenter.add_contribution_guide_path))
+ expect(presenter.contribution_guide_anchor_data).to have_attributes(enabled: false,
+ label: 'Add Contribution guide',
+ link: presenter.add_contribution_guide_path)
end
end
@@ -306,9 +343,9 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:contribution_guide).and_return(double(name: 'foo'))
- expect(presenter.contribution_guide_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Contribution guide',
- link: presenter.contribution_guide_path))
+ expect(presenter.contribution_guide_anchor_data).to have_attributes(enabled: true,
+ label: 'Contribution guide',
+ link: presenter.contribution_guide_path)
end
end
end
@@ -318,9 +355,9 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project).to receive(:auto_devops_enabled?).and_return(true)
- expect(presenter.autodevops_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Auto DevOps enabled',
- link: nil))
+ expect(presenter.autodevops_anchor_data).to have_attributes(enabled: true,
+ label: 'Auto DevOps enabled',
+ link: nil)
end
end
@@ -330,9 +367,9 @@ describe ProjectPresenter do
allow(project).to receive(:auto_devops_enabled?).and_return(false)
allow(project.repository).to receive(:gitlab_ci_yml).and_return(nil)
- expect(presenter.autodevops_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: 'Enable Auto DevOps',
- link: presenter.project_settings_ci_cd_path(project, anchor: 'autodevops-settings')))
+ expect(presenter.autodevops_anchor_data).to have_attributes(enabled: false,
+ label: 'Enable Auto DevOps',
+ link: presenter.project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
end
end
end
@@ -343,9 +380,9 @@ describe ProjectPresenter do
project.add_maintainer(user)
cluster = create(:cluster, projects: [project])
- expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Kubernetes configured',
- link: presenter.project_cluster_path(project, cluster)))
+ expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(enabled: true,
+ label: 'Kubernetes configured',
+ link: presenter.project_cluster_path(project, cluster))
end
it 'returns link to clusters page if more than one exists' do
@@ -353,17 +390,17 @@ describe ProjectPresenter do
create(:cluster, :production_environment, projects: [project])
create(:cluster, projects: [project])
- expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: true,
- label: 'Kubernetes configured',
- link: presenter.project_clusters_path(project)))
+ expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(enabled: true,
+ label: 'Kubernetes configured',
+ link: presenter.project_clusters_path(project))
end
it 'returns link to create a cluster if no cluster exists' do
project.add_maintainer(user)
- expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: 'Add Kubernetes cluster',
- link: presenter.new_project_cluster_path(project)))
+ expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(enabled: false,
+ label: 'Add Kubernetes cluster',
+ link: presenter.new_project_cluster_path(project))
end
end
@@ -380,9 +417,9 @@ describe ProjectPresenter do
allow(project.repository).to receive(:koding_yml).and_return(nil)
allow(Gitlab::CurrentSettings).to receive(:koding_enabled?).and_return(true)
- expect(presenter.koding_anchor_data).to eq(OpenStruct.new(enabled: false,
- label: 'Set up Koding',
- link: presenter.add_koding_stack_path))
+ expect(presenter.koding_anchor_data).to have_attributes(enabled: false,
+ label: 'Set up Koding',
+ link: presenter.add_koding_stack_path)
end
it 'returns nil if user cannot push' do
diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb
index 85c93f35c20..6890f46c724 100644
--- a/spec/requests/api/internal_spec.rb
+++ b/spec/requests/api/internal_spec.rb
@@ -381,7 +381,7 @@ describe API::Internal do
it do
pull(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(401)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
@@ -391,13 +391,61 @@ describe API::Internal do
it do
push(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(401)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
end
end
+ context "custom action" do
+ let(:access_checker) { double(Gitlab::GitAccess) }
+ let(:message) { 'CustomActionError message' }
+ let(:payload) do
+ {
+ 'action' => 'geo_proxy_to_primary',
+ 'data' => {
+ 'api_endpoints' => %w{geo/proxy_git_push_ssh/info_refs geo/proxy_git_push_ssh/push},
+ 'gl_username' => 'testuser',
+ 'primary_repo' => 'http://localhost:3000/testuser/repo.git'
+ }
+ }
+ end
+
+ let(:custom_action_result) { Gitlab::GitAccessResult::CustomAction.new(payload, message) }
+
+ before do
+ project.add_guest(user)
+ expect(Gitlab::GitAccess).to receive(:new).with(
+ key,
+ project,
+ 'ssh',
+ {
+ authentication_abilities: [:read_project, :download_code, :push_code],
+ namespace_path: project.namespace.name,
+ project_path: project.path,
+ redirected_path: nil
+ }
+ ).and_return(access_checker)
+ expect(access_checker).to receive(:check).with(
+ 'git-receive-pack',
+ 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
+ ).and_return(custom_action_result)
+ end
+
+ context "git push" do
+ it do
+ push(key, project)
+
+ expect(response).to have_gitlab_http_status(300)
+ expect(json_response['status']).to be_truthy
+ expect(json_response['message']).to eql(message)
+ expect(json_response['payload']).to eql(payload)
+ expect(user.reload.last_activity_on).to be_nil
+ end
+ end
+ end
+
context "blocked user" do
let(:personal_project) { create(:project, namespace: user.namespace) }
@@ -409,7 +457,7 @@ describe API::Internal do
it do
pull(key, personal_project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(401)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
@@ -419,7 +467,7 @@ describe API::Internal do
it do
push(key, personal_project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(401)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
@@ -445,7 +493,7 @@ describe API::Internal do
it do
push(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(401)
expect(json_response["status"]).to be_falsey
end
end
@@ -477,7 +525,7 @@ describe API::Internal do
it do
archive(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(404)
expect(json_response["status"]).to be_falsey
end
end
@@ -489,7 +537,7 @@ describe API::Internal do
pull(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(404)
expect(json_response["status"]).to be_falsey
end
end
@@ -498,7 +546,7 @@ describe API::Internal do
it do
pull(OpenStruct.new(id: 0), project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(404)
expect(json_response["status"]).to be_falsey
end
end
@@ -511,7 +559,7 @@ describe API::Internal do
it 'rejects the SSH push' do
push(key, project)
- expect(response.status).to eq(200)
+ expect(response.status).to eq(401)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over SSH is not allowed'
end
@@ -519,7 +567,7 @@ describe API::Internal do
it 'rejects the SSH pull' do
pull(key, project)
- expect(response.status).to eq(200)
+ expect(response.status).to eq(401)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over SSH is not allowed'
end
@@ -533,7 +581,7 @@ describe API::Internal do
it 'rejects the HTTP push' do
push(key, project, 'http')
- expect(response.status).to eq(200)
+ expect(response.status).to eq(401)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over HTTP is not allowed'
end
@@ -541,7 +589,7 @@ describe API::Internal do
it 'rejects the HTTP pull' do
pull(key, project, 'http')
- expect(response.status).to eq(200)
+ expect(response.status).to eq(401)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq 'Git access over HTTP is not allowed'
end
@@ -571,14 +619,14 @@ describe API::Internal do
it 'rejects the push' do
push(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(404)
expect(json_response['status']).to be_falsy
end
it 'rejects the SSH pull' do
pull(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(404)
expect(json_response['status']).to be_falsy
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 4de834bf93a..e987eee6e91 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -353,6 +353,15 @@ describe API::MergeRequests do
end
end
+ it 'returns the commits behind the target branch when include_diverged_commits_count is present' do
+ allow_any_instance_of(merge_request.class).to receive(:diverged_commits_count).and_return(1)
+
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), include_diverged_commits_count: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['diverged_commits_count']).to eq(1)
+ end
+
it "returns a 404 error if merge_request_iid not found" do
get api("/projects/#{project.id}/merge_requests/999", user)
expect(response).to have_gitlab_http_status(404)
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index d3f81cc038d..87997a48dc9 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -9,7 +9,8 @@ describe API::ProjectHooks, 'ProjectHooks' do
:all_events_enabled,
project: project,
url: 'http://example.com',
- enable_ssl_verification: true)
+ enable_ssl_verification: true,
+ push_events_branch_filter: 'master')
end
before do
@@ -38,6 +39,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response.first['pipeline_events']).to eq(true)
expect(json_response.first['wiki_page_events']).to eq(true)
expect(json_response.first['enable_ssl_verification']).to eq(true)
+ expect(json_response.first['push_events_branch_filter']).to eq('master')
end
end
@@ -90,7 +92,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect do
post api("/projects/#{project.id}/hooks", user),
url: "http://example.com", issues_events: true, confidential_issues_events: true, wiki_page_events: true,
- job_events: true
+ job_events: true, push_events_branch_filter: 'some-feature-branch'
end.to change {project.hooks.count}.by(1)
expect(response).to have_gitlab_http_status(201)
@@ -106,6 +108,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['pipeline_events']).to eq(false)
expect(json_response['wiki_page_events']).to eq(true)
expect(json_response['enable_ssl_verification']).to eq(true)
+ expect(json_response['push_events_branch_filter']).to eq('some-feature-branch')
expect(json_response).not_to include('token')
end
@@ -132,7 +135,12 @@ describe API::ProjectHooks, 'ProjectHooks' do
end
it "returns a 422 error if url not valid" do
- post api("/projects/#{project.id}/hooks", user), "url" => "ftp://example.com"
+ post api("/projects/#{project.id}/hooks", user), url: "ftp://example.com"
+ expect(response).to have_gitlab_http_status(422)
+ end
+
+ it "returns a 422 error if branch filter is not valid" do
+ post api("/projects/#{project.id}/hooks", user), url: "http://example.com", push_events_branch_filter: '~badbranchname/'
expect(response).to have_gitlab_http_status(422)
end
end
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index 489cb001b82..c40d01e1a14 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -139,6 +139,27 @@ describe API::Wikis do
end
end
+ shared_examples_for 'uploads wiki attachment' do
+ it 'pushes attachment to the wiki repository' do
+ allow(SecureRandom).to receive(:hex).and_return('fixed_hex')
+
+ post(api(url, user), payload)
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response).to eq result_hash.deep_stringify_keys
+ end
+
+ it 'responds with validation error on empty file' do
+ payload.delete(:file)
+
+ post(api(url, user), payload)
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response.size).to eq(1)
+ expect(json_response['error']).to eq('file is missing')
+ end
+ end
+
describe 'GET /projects/:id/wikis' do
let(:url) { "/projects/#{project.id}/wikis" }
@@ -698,4 +719,107 @@ describe API::Wikis do
include_examples '204 No Content'
end
end
+
+ describe 'POST /projects/:id/wikis/attachments' do
+ let(:payload) { { file: fixture_file_upload('spec/fixtures/dk.png') } }
+ let(:url) { "/projects/#{project.id}/wikis/attachments" }
+ let(:file_path) { "#{Wikis::CreateAttachmentService::ATTACHMENT_PATH}/fixed_hex/dk.png" }
+ let(:result_hash) do
+ {
+ file_name: 'dk.png',
+ file_path: file_path,
+ branch: 'master',
+ link: {
+ url: file_path,
+ markdown: "![dk](#{file_path})"
+ }
+ }
+ end
+
+ context 'when wiki is disabled' do
+ let(:project) { create(:project, :wiki_disabled, :wiki_repo) }
+
+ context 'when user is guest' do
+ before do
+ post(api(url), payload)
+ end
+
+ include_examples '404 Project Not Found'
+ end
+
+ context 'when user is developer' do
+ before do
+ project.add_developer(user)
+ post(api(url, user), payload)
+ end
+
+ include_examples '403 Forbidden'
+ end
+
+ context 'when user is maintainer' do
+ before do
+ project.add_maintainer(user)
+ post(api(url, user), payload)
+ end
+
+ include_examples '403 Forbidden'
+ end
+ end
+
+ context 'when wiki is available only for team members' do
+ let(:project) { create(:project, :wiki_private, :wiki_repo) }
+
+ context 'when user is guest' do
+ before do
+ post(api(url), payload)
+ end
+
+ include_examples '404 Project Not Found'
+ end
+
+ context 'when user is developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ include_examples 'uploads wiki attachment'
+ end
+
+ context 'when user is maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ include_examples 'uploads wiki attachment'
+ end
+ end
+
+ context 'when wiki is available for everyone with access' do
+ let(:project) { create(:project, :wiki_repo) }
+
+ context 'when user is guest' do
+ before do
+ post(api(url), payload)
+ end
+
+ include_examples '404 Project Not Found'
+ end
+
+ context 'when user is developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ include_examples 'uploads wiki attachment'
+ end
+
+ context 'when user is maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ include_examples 'uploads wiki attachment'
+ end
+ end
+ end
end
diff --git a/spec/rubocop/cop/line_break_around_conditional_block_spec.rb b/spec/rubocop/cop/line_break_around_conditional_block_spec.rb
index 03eeffe6483..892b393c307 100644
--- a/spec/rubocop/cop/line_break_around_conditional_block_spec.rb
+++ b/spec/rubocop/cop/line_break_around_conditional_block_spec.rb
@@ -328,6 +328,22 @@ describe RuboCop::Cop::LineBreakAroundConditionalBlock do
expect(cop.offenses).to be_empty
end
+ it "doesn't flag violation for #{conditional} preceded by a rescue" do
+ source = <<~RUBY
+ def a_method
+ do_something
+ rescue
+ #{conditional} condition
+ do_something
+ end
+ end
+ RUBY
+
+ inspect_source(source)
+
+ expect(cop.offenses).to be_empty
+ end
+
it "doesn't flag violation for #{conditional} followed by a rescue" do
source = <<~RUBY
def a_method
diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb
index 00b2146dc86..3d90ce44dfb 100644
--- a/spec/serializers/diff_file_entity_spec.rb
+++ b/spec/serializers/diff_file_entity_spec.rb
@@ -67,4 +67,21 @@ describe DiffFileEntity do
end
end
end
+
+ context '#parallel_diff_lines' do
+ it 'exposes parallel diff lines correctly' do
+ response = subject
+
+ lines = response[:parallel_diff_lines]
+
+ # make sure at least one line is present for each side
+ expect(lines.map { |line| line[:right] }.compact).to be_present
+ expect(lines.map { |line| line[:left] }.compact).to be_present
+ # make sure all lines are in correct format
+ lines.each do |parallel_line|
+ expect(parallel_line[:left].as_json).to match_schema('entities/diff_line') if parallel_line[:left]
+ expect(parallel_line[:right].as_json).to match_schema('entities/diff_line') if parallel_line[:right]
+ end
+ end
+ end
end
diff --git a/spec/serializers/diff_line_serializer_spec.rb b/spec/serializers/diff_line_serializer_spec.rb
new file mode 100644
index 00000000000..6dd8abd0579
--- /dev/null
+++ b/spec/serializers/diff_line_serializer_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe DiffLineSerializer do
+ let(:line) { Gitlab::Diff::Line.new('hello world', 'new', 1, nil, 1) }
+ let(:serializer) { described_class.new.represent(line) }
+
+ describe '#to_json' do
+ subject { serializer.to_json }
+
+ it 'matches the schema' do
+ expect(subject).to match_schema('entities/diff_line')
+ end
+
+ context 'when lines are parallel' do
+ let(:right_line) { Gitlab::Diff::Line.new('right line', 'new', 1, nil, 1) }
+ let(:left_line) { Gitlab::Diff::Line.new('left line', 'match', 1, nil, 1) }
+ let(:parallel_line) { [{ right: right_line, left: left_line }] }
+ let(:serializer) { described_class.new.represent(parallel_line, {}, DiffLineParallelEntity) }
+
+ it 'matches the schema' do
+ expect(subject).to match_schema('entities/diff_line_parallel')
+ end
+ end
+ end
+end
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index 507909d9231..b69977c812a 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -101,4 +101,11 @@ describe PreviewMarkdownService do
expect(result[:markdown_engine]).to eq :common_mark
end
+
+ it 'honors the legacy_render parameter' do
+ service = described_class.new(project, user, { legacy_render: '1' })
+ result = service.execute
+
+ expect(result[:markdown_engine]).to eq :redcarpet
+ end
end
diff --git a/spec/services/wikis/create_attachment_service_spec.rb b/spec/services/wikis/create_attachment_service_spec.rb
new file mode 100644
index 00000000000..3f4da873ce4
--- /dev/null
+++ b/spec/services/wikis/create_attachment_service_spec.rb
@@ -0,0 +1,202 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Wikis::CreateAttachmentService do
+ let(:project) { create(:project, :wiki_repo) }
+ let(:user) { create(:user) }
+ let(:file_name) { 'filename.txt' }
+ let(:file_path_regex) { %r{#{described_class::ATTACHMENT_PATH}/\h{32}/#{file_name}} }
+
+ let(:file_opts) do
+ {
+ file_name: file_name,
+ file_content: 'Content of attachment'
+ }
+ end
+ let(:opts) { file_opts }
+
+ subject(:service) { described_class.new(project, user, opts) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe 'initialization' do
+ context 'author commit info' do
+ it 'does not raise error if user is nil' do
+ service = described_class.new(project, nil, opts)
+
+ expect(service.instance_variable_get(:@author_email)).to be_nil
+ expect(service.instance_variable_get(:@author_name)).to be_nil
+ end
+
+ it 'fills file_path from the repository uploads folder' do
+ expect(service.instance_variable_get(:@file_path)).to match(file_path_regex)
+ end
+
+ context 'when no author info provided' do
+ it 'fills author_email and author_name from current_user info' do
+ expect(service.instance_variable_get(:@author_email)).to eq user.email
+ expect(service.instance_variable_get(:@author_name)).to eq user.name
+ end
+ end
+
+ context 'when author info provided' do
+ let(:author_email) { 'author_email' }
+ let(:author_name) { 'author_name' }
+ let(:opts) { file_opts.merge(author_email: author_email, author_name: author_name) }
+
+ it 'fills author_email and author_name from params' do
+ expect(service.instance_variable_get(:@author_email)).to eq author_email
+ expect(service.instance_variable_get(:@author_name)).to eq author_name
+ end
+ end
+ end
+
+ context 'commit message' do
+ context 'when no commit message provided' do
+ it 'sets a default commit message' do
+ expect(service.instance_variable_get(:@commit_message)).to eq "Upload attachment #{opts[:file_name]}"
+ end
+ end
+
+ context 'when commit message provided' do
+ let(:commit_message) { 'whatever' }
+ let(:opts) { file_opts.merge(commit_message: commit_message) }
+
+ it 'use the commit message from params' do
+ expect(service.instance_variable_get(:@commit_message)).to eq commit_message
+ end
+ end
+ end
+
+ context 'branch name' do
+ context 'when no branch provided' do
+ it 'sets the branch from the wiki default_branch' do
+ expect(service.instance_variable_get(:@branch_name)).to eq project.wiki.default_branch
+ end
+ end
+
+ context 'when branch provided' do
+ let(:branch_name) { 'whatever' }
+ let(:opts) { file_opts.merge(branch_name: branch_name) }
+
+ it 'use the commit message from params' do
+ expect(service.instance_variable_get(:@branch_name)).to eq branch_name
+ end
+ end
+ end
+ end
+
+ describe 'validations' do
+ context 'when file_name' do
+ context 'is not present' do
+ let(:file_name) { nil }
+
+ it 'returns error' do
+ result = service.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq 'The file name cannot be empty'
+ end
+ end
+
+ context 'length' do
+ context 'is bigger than 255' do
+ let(:file_name) { "#{'0' * 256}.jpg" }
+
+ it 'truncates file name' do
+ result = service.execute
+
+ expect(result[:status]).to eq :success
+ expect(result[:result][:file_name].length).to eq 255
+ expect(result[:result][:file_name]).to match(/0{251}\.jpg/)
+ end
+ end
+
+ context 'is less or equal to 255 does not return error' do
+ let(:file_name) { '0' * 255 }
+
+ it 'does not return error' do
+ result = service.execute
+
+ expect(result[:status]).to eq :success
+ end
+ end
+ end
+ end
+
+ context 'when user' do
+ shared_examples 'wiki attachment user validations' do
+ it 'returns error' do
+ result = described_class.new(project, user2, opts).execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq 'You are not allowed to push to the wiki'
+ end
+ end
+
+ context 'does not have permission' do
+ let(:user2) { create(:user) }
+
+ it_behaves_like 'wiki attachment user validations'
+ end
+
+ context 'is nil' do
+ let(:user2) { nil }
+
+ it_behaves_like 'wiki attachment user validations'
+ end
+ end
+ end
+
+ describe '#execute' do
+ let(:wiki) { project.wiki }
+ subject(:service_execute) { service.execute[:result] }
+
+ context 'creates branch if it does not exists' do
+ let(:branch_name) { 'new_branch' }
+ let(:opts) { file_opts.merge(branch_name: branch_name) }
+
+ it do
+ expect(wiki.repository.branches).to be_empty
+ expect { service.execute }.to change { wiki.repository.branches.count }.by(1)
+ expect(wiki.repository.branches.first.name).to eq branch_name
+ end
+ end
+
+ it 'adds file to the repository' do
+ expect(wiki.repository.ls_files('HEAD')).to be_empty
+
+ service.execute
+
+ files = wiki.repository.ls_files('HEAD')
+ expect(files.count).to eq 1
+ expect(files.first).to match(file_path_regex)
+ end
+
+ context 'returns' do
+ before do
+ allow(SecureRandom).to receive(:hex).and_return('fixed_hex')
+
+ service_execute
+ end
+
+ it 'returns the file name' do
+ expect(service_execute[:file_name]).to eq file_name
+ end
+
+ it 'returns the path where file was stored' do
+ expect(service_execute[:file_path]).to eq 'uploads/fixed_hex/filename.txt'
+ end
+
+ it 'returns the branch where the file was pushed' do
+ expect(service_execute[:branch]).to eq wiki.default_branch
+ end
+
+ it 'returns the commit id' do
+ expect(service_execute[:commit]).not_to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 683a64504a1..994a2aaef90 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -16,6 +16,7 @@ module KubernetesHelpers
def stub_kubeclient_discover(api_url)
WebMock.stub_request(:get, api_url + '/api/v1').to_return(kube_response(kube_v1_discovery_body))
WebMock.stub_request(:get, api_url + '/apis/extensions/v1beta1').to_return(kube_response(kube_v1beta1_discovery_body))
+ WebMock.stub_request(:get, api_url + '/apis/rbac.authorization.k8s.io/v1').to_return(kube_response(kube_v1_rbac_authorization_discovery_body))
end
def stub_kubeclient_pods(response = nil)
@@ -66,7 +67,8 @@ module KubernetesHelpers
"resources" => [
{ "name" => "pods", "namespaced" => true, "kind" => "Pod" },
{ "name" => "deployments", "namespaced" => true, "kind" => "Deployment" },
- { "name" => "secrets", "namespaced" => true, "kind" => "Secret" }
+ { "name" => "secrets", "namespaced" => true, "kind" => "Secret" },
+ { "name" => "services", "namespaced" => true, "kind" => "Service" }
]
}
end
@@ -77,7 +79,20 @@ module KubernetesHelpers
"resources" => [
{ "name" => "pods", "namespaced" => true, "kind" => "Pod" },
{ "name" => "deployments", "namespaced" => true, "kind" => "Deployment" },
- { "name" => "secrets", "namespaced" => true, "kind" => "Secret" }
+ { "name" => "secrets", "namespaced" => true, "kind" => "Secret" },
+ { "name" => "services", "namespaced" => true, "kind" => "Service" }
+ ]
+ }
+ end
+
+ def kube_v1_rbac_authorization_discovery_body
+ {
+ "kind" => "APIResourceList",
+ "resources" => [
+ { "name" => "clusterrolebindings", "namespaced" => false, "kind" => "ClusterRoleBinding" },
+ { "name" => "clusterroles", "namespaced" => false, "kind" => "ClusterRole" },
+ { "name" => "rolebindings", "namespaced" => true, "kind" => "RoleBinding" },
+ { "name" => "roles", "namespaced" => true, "kind" => "Role" }
]
}
end
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index c54a871b157..4061a8d1bc9 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -4,8 +4,8 @@ module StubFeatureFlags
# @param [Hash] features where key is feature name and value is boolean whether enabled or not
def stub_feature_flags(features)
features.each do |feature_name, enabled|
- allow(Feature).to receive(:enabled?).with(feature_name) { enabled }
- allow(Feature).to receive(:enabled?).with(feature_name.to_s) { enabled }
+ allow(Feature).to receive(:enabled?).with(feature_name, any_args) { enabled }
+ allow(Feature).to receive(:enabled?).with(feature_name.to_s, any_args) { enabled }
end
end
end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 21103771d1f..3f8e3ae5190 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -52,7 +52,8 @@ module TestEnv
'add_images_and_changes' => '010d106',
'update-gitlab-shell-v-6-0-1' => '2f61d70',
'update-gitlab-shell-v-6-0-3' => 'de78448',
- '2-mb-file' => 'bf12d25'
+ '2-mb-file' => 'bf12d25',
+ 'with-codeowners' => '219560e'
}.freeze
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
diff --git a/spec/support/shared_examples/wiki_file_attachments_examples.rb b/spec/support/shared_examples/wiki_file_attachments_examples.rb
new file mode 100644
index 00000000000..b6fb2a66b0e
--- /dev/null
+++ b/spec/support/shared_examples/wiki_file_attachments_examples.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# project
+
+shared_examples 'wiki file attachments' do
+ include DropzoneHelper
+
+ context 'uploading attachments', :js do
+ let(:wiki) { project.wiki }
+
+ def attach_with_dropzone(wait = false)
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, wait)
+ end
+
+ context 'before uploading' do
+ it 'shows "Attach a file" button' do
+ expect(page).to have_button('Attach a file')
+ expect(page).not_to have_selector('.uploading-progress-container', visible: true)
+ end
+ end
+
+ context 'uploading is in progress' do
+ it 'cancels uploading on clicking to "Cancel" button' do
+ slow_requests do
+ attach_with_dropzone
+
+ click_button 'Cancel'
+ end
+
+ expect(page).to have_button('Attach a file')
+ expect(page).not_to have_button('Cancel')
+ expect(page).not_to have_selector('.uploading-progress-container', visible: true)
+ end
+
+ it 'shows "Attaching a file" message on uploading 1 file' do
+ slow_requests do
+ attach_with_dropzone
+
+ expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching a file -')
+ end
+ end
+ end
+
+ context 'uploading is complete' do
+ it 'shows "Attach a file" button on uploading complete' do
+ attach_with_dropzone
+ wait_for_requests
+
+ expect(page).to have_button('Attach a file')
+ expect(page).not_to have_selector('.uploading-progress-container', visible: true)
+ end
+
+ it 'the markdown link is added to the page' do
+ fill_in(:wiki_content, with: '')
+ attach_with_dropzone(true)
+ wait_for_requests
+
+ expect(page.find('#wiki_content').value)
+ .to match(%r{\!\[dk\]\(uploads/\h{32}/dk\.png\)$})
+ end
+
+ it 'the links point to the wiki root url' do
+ attach_with_dropzone(true)
+ wait_for_requests
+
+ find('.js-md-preview-button').click
+ file_path = page.find('input[name="files[]"]', visible: :hidden).value
+ link = page.find('a.no-attachment-icon')['href']
+ img_link = page.find('a.no-attachment-icon img')['src']
+
+ expect(link).to eq img_link
+ expect(URI.parse(link).path).to eq File.join(wiki.wiki_base_path, file_path)
+ end
+
+ it 'the file has been added to the wiki repository' do
+ expect do
+ attach_with_dropzone(true)
+ wait_for_requests
+ end.to change { wiki.repository.ls_files('HEAD').count }.by(1)
+
+ file_path = page.find('input[name="files[]"]', visible: :hidden).value
+
+ expect(wiki.find_file(file_path, 'HEAD').path).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 3ad5fe7e3b3..061432f082a 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -40,6 +40,53 @@ describe JobArtifactUploader do
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
+ describe '#dynamic_segment' do
+ let(:uploaded_content) { File.binread(Rails.root + 'spec/fixtures/ci_build_artifacts.zip') }
+ let(:model) { uploader.model }
+
+ shared_examples_for 'Read file from legacy path' do
+ it 'store_path returns the legacy path' do
+ expect(model.file.store_path).to eq(File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.job_id.to_s, 'ci_build_artifacts.zip'))
+ end
+
+ it 'has exactly the same content' do
+ expect(::File.binread(model.file.path)).to eq(uploaded_content)
+ end
+ end
+
+ shared_examples_for 'Read file from hashed path' do
+ it 'store_path returns hashed path' do
+ expect(model.file.store_path).to eq(File.join(disk_hash[0..1], disk_hash[2..3], disk_hash, creation_date, model.job_id.to_s, model.id.to_s, 'ci_build_artifacts.zip'))
+ end
+
+ it 'has exactly the same content' do
+ expect(::File.binread(model.file.path)).to eq(uploaded_content)
+ end
+ end
+
+ context 'when a job artifact is stored in legacy_path' do
+ let(:job_artifact) { create(:ci_job_artifact, :legacy_archive) }
+
+ it_behaves_like 'Read file from legacy path'
+ end
+
+ context 'when the artifact file is stored in hashed_path' do
+ let(:job_artifact) { create(:ci_job_artifact, :archive) }
+ let(:disk_hash) { Digest::SHA2.hexdigest(model.project_id.to_s) }
+ let(:creation_date) { model.created_at.utc.strftime('%Y_%m_%d') }
+
+ it_behaves_like 'Read file from hashed path'
+
+ context 'when file_location column is empty' do
+ before do
+ job_artifact.update_column(:file_location, nil)
+ end
+
+ it_behaves_like 'Read file from hashed path'
+ end
+ end
+ end
+
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload('spec/fixtures/trace/sample_trace'))
diff --git a/spec/uploaders/uploader_helper_spec.rb b/spec/uploaders/uploader_helper_spec.rb
index 33da93cc9d0..fd6712d4645 100644
--- a/spec/uploaders/uploader_helper_spec.rb
+++ b/spec/uploaders/uploader_helper_spec.rb
@@ -11,27 +11,10 @@ describe UploaderHelper do
example_uploader.new
end
- def upload_fixture(filename)
- fixture_file_upload(File.join('spec', 'fixtures', filename))
- end
-
- describe '#image_or_video?' do
- it 'returns true for an image file' do
- uploader.store!(upload_fixture('dk.png'))
-
- expect(uploader).to be_image_or_video
- end
-
- it 'it returns true for a video file' do
- uploader.store!(upload_fixture('video_sample.mp4'))
-
- expect(uploader).to be_image_or_video
- end
-
- it 'returns false for other extensions' do
- uploader.store!(upload_fixture('doc_sample.txt'))
-
- expect(uploader).not_to be_image_or_video
+ describe '#extension_match?' do
+ it 'returns false if file does not exists' do
+ expect(uploader.file).to be_nil
+ expect(uploader.send(:extension_match?, 'jpg')).to eq false
end
end
end
diff --git a/spec/validators/branch_filter_validator_spec.rb b/spec/validators/branch_filter_validator_spec.rb
new file mode 100644
index 00000000000..3be54827431
--- /dev/null
+++ b/spec/validators/branch_filter_validator_spec.rb
@@ -0,0 +1,42 @@
+require 'spec_helper'
+
+describe BranchFilterValidator do
+ let(:validator) { described_class.new(attributes: [:push_events_branch_filter]) }
+ let(:hook) { build(:project_hook) }
+
+ describe '#validates_each' do
+ it 'allows valid branch names' do
+ validator.validate_each(hook, :push_events_branch_filter, "good_branch_name")
+ validator.validate_each(hook, :push_events_branch_filter, "another/good_branch_name")
+ expect(hook.errors.empty?).to be true
+ end
+
+ it 'disallows bad branch names' do
+ validator.validate_each(hook, :push_events_branch_filter, "bad branch~name")
+ expect(hook.errors[:push_events_branch_filter].empty?).to be false
+ end
+
+ it 'allows wildcards' do
+ validator.validate_each(hook, :push_events_branch_filter, "features/*")
+ validator.validate_each(hook, :push_events_branch_filter, "features/*/bla")
+ validator.validate_each(hook, :push_events_branch_filter, "*-stable")
+ expect(hook.errors.empty?).to be true
+ end
+
+ it 'gets rid of whitespace' do
+ filter = ' master '
+ validator.validate_each(hook, :push_events_branch_filter, filter)
+
+ expect(filter).to eq 'master'
+ end
+
+ # Branch names can be quite long but in practice aren't over 255 so 4000 should
+ # be enough space for a list of branch names but we can increase if needed.
+ it 'limits length to 4000 chars' do
+ filter = 'a' * 4001
+ validator.validate_each(hook, :push_events_branch_filter, filter)
+
+ expect(hook.errors[:push_events_branch_filter].empty?).to be false
+ end
+ end
+end
diff --git a/spec/views/projects/_home_panel.html.haml_spec.rb b/spec/views/projects/_home_panel.html.haml_spec.rb
index b56940a9613..fc1fe5739c3 100644
--- a/spec/views/projects/_home_panel.html.haml_spec.rb
+++ b/spec/views/projects/_home_panel.html.haml_spec.rb
@@ -9,6 +9,7 @@ describe 'projects/_home_panel' do
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:can?).with(user, :read_project, project).and_return(false)
+ allow(project).to receive(:license_anchor_data).and_return(false)
end
context 'when user is signed in' do
@@ -63,6 +64,7 @@ describe 'projects/_home_panel' do
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:can?).with(user, :read_project, project).and_return(false)
+ allow(project).to receive(:license_anchor_data).and_return(false)
end
context 'has no badges' do
@@ -71,8 +73,7 @@ describe 'projects/_home_panel' do
it 'should not render any badge' do
render
- expect(rendered).to have_selector('.project-badges')
- expect(rendered).not_to have_selector('.project-badges > a')
+ expect(rendered).not_to have_selector('.project-badges')
end
end
@@ -118,6 +119,7 @@ describe 'projects/_home_panel' do
assign(:project, project)
allow(view).to receive(:current_user).and_return(user)
+ allow(project).to receive(:license_anchor_data).and_return(false)
end
context 'user can read project' do