diff options
author | Martin Wortschack <mwortschack@gitlab.com> | 2018-09-11 08:43:10 +0200 |
---|---|---|
committer | Martin Wortschack <mwortschack@gitlab.com> | 2018-09-11 08:43:10 +0200 |
commit | a2f7936c74e6c9c4eed7b1c5641f0c43d4ff1375 (patch) | |
tree | 82b1ea27fe2a9a1be884467e10d9bde688ce97d1 /spec | |
parent | bfd2181310097f5c0a92a43ffc2b7b9540b96ad3 (diff) | |
parent | 2f990e3408d00cad473d8dcf8a4e49155cc3cc33 (diff) | |
download | gitlab-ce-a2f7936c74e6c9c4eed7b1c5641f0c43d4ff1375.tar.gz |
merge master and resolve conflicts
Diffstat (limited to 'spec')
153 files changed, 3719 insertions, 1285 deletions
diff --git a/spec/bin/changelog_spec.rb b/spec/bin/changelog_spec.rb index 9dc4edf97d1..c59add88a82 100644 --- a/spec/bin/changelog_spec.rb +++ b/spec/bin/changelog_spec.rb @@ -95,6 +95,7 @@ describe 'bin/changelog' do it 'shows error message and exits the program' do allow($stdin).to receive(:getc).and_return(type) + expect do expect { described_class.read_type }.to raise_error( ChangelogHelpers::Abort, diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb index 9d10d725ff3..10e1bfc30f9 100644 --- a/spec/controllers/admin/application_settings_controller_spec.rb +++ b/spec/controllers/admin/application_settings_controller_spec.rb @@ -78,5 +78,12 @@ describe Admin::ApplicationSettingsController do expect(response).to redirect_to(admin_application_settings_path) expect(ApplicationSetting.current.restricted_visibility_levels).to be_empty end + + it 'updates the receive_max_input_size setting' do + put :update, application_setting: { receive_max_input_size: "1024" } + + expect(response).to redirect_to(admin_application_settings_path) + expect(ApplicationSetting.current.receive_max_input_size).to eq(1024) + end end end diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index ae49490f31c..65d6cd1a295 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -38,14 +38,6 @@ describe GroupsController do project end - context 'as html' do - it 'assigns whether or not a group has children' do - get :show, id: group.to_param - - expect(assigns(:has_children)).to be_truthy - end - end - context 'as atom' do it 'assigns events for all the projects in the group' do create(:event, project: project) diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb index ca7d30fec83..751919f9501 100644 --- a/spec/controllers/projects/jobs_controller_spec.rb +++ b/spec/controllers/projects/jobs_controller_spec.rb @@ -166,8 +166,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do expect(response).to match_response_schema('job/job_details') expect(json_response['artifact']['download_path']).to match(%r{artifacts/download}) expect(json_response['artifact']['browse_path']).to match(%r{artifacts/browse}) - expect(json_response['artifact']).not_to have_key(:expired) - expect(json_response['artifact']).not_to have_key(:expired_at) + expect(json_response['artifact']).not_to have_key('expired') + expect(json_response['artifact']).not_to have_key('expired_at') end end @@ -177,8 +177,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do it 'exposes needed information' do expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('job/job_details') - expect(json_response['artifact']).not_to have_key(:download_path) - expect(json_response['artifact']).not_to have_key(:browse_path) + expect(json_response['artifact']).not_to have_key('download_path') + expect(json_response['artifact']).not_to have_key('browse_path') expect(json_response['artifact']['expired']).to eq(true) expect(json_response['artifact']['expire_at']).not_to be_empty end diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb index 1458113b90c..81badaac76b 100644 --- a/spec/controllers/projects/notes_controller_spec.rb +++ b/spec/controllers/projects/notes_controller_spec.rb @@ -154,7 +154,7 @@ describe Projects::NotesController do get :index, request_params expect(parsed_response[:notes].count).to eq(1) - expect(note_json[:id]).to eq(note.id) + expect(note_json[:id]).to eq(note.id.to_s) end it 'does not result in N+1 queries' do diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb index 17769a14def..d11e42b411b 100644 --- a/spec/controllers/projects/registry/repositories_controller_spec.rb +++ b/spec/controllers/projects/registry/repositories_controller_spec.rb @@ -86,9 +86,10 @@ describe Projects::Registry::RepositoriesController do stub_container_registry_tags(repository: :any, tags: []) end - it 'deletes a repository' do - expect { delete_repository(repository) }.to change { ContainerRepository.all.count }.by(-1) + it 'schedules a job to delete a repository' do + expect(DeleteContainerRepositoryWorker).to receive(:perform_async).with(user.id, repository.id) + delete_repository(repository) expect(response).to have_gitlab_http_status(:no_content) end end diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb index 325ee53aafb..9802e4d5b1e 100644 --- a/spec/controllers/projects/uploads_controller_spec.rb +++ b/spec/controllers/projects/uploads_controller_spec.rb @@ -18,6 +18,20 @@ describe Projects::UploadsController do end end + context "when exception occurs" do + before do + allow(FileUploader).to receive(:workhorse_authorize).and_raise(SocketError.new) + sign_in(create(:user)) + end + + it "responds with status internal_server_error" do + post_authorize + + expect(response).to have_gitlab_http_status(500) + expect(response.body).to eq('Error uploading file') + end + end + def post_authorize(verified: true) request.headers.merge!(workhorse_internal_api_request_header) if verified diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb index c3a66477b6a..3bc9cbe64c5 100644 --- a/spec/controllers/projects_controller_spec.rb +++ b/spec/controllers/projects_controller_spec.rb @@ -803,37 +803,7 @@ describe ProjectsController do project.add_maintainer(user) end - context 'object storage disabled' do - before do - stub_feature_flags(import_export_object_storage: false) - end - - context 'when project export is enabled' do - it 'returns 302' do - get :download_export, namespace_id: project.namespace, id: project - - expect(response).to have_gitlab_http_status(302) - end - end - - context 'when project export is disabled' do - before do - stub_application_setting(project_export_enabled?: false) - end - - it 'returns 404' do - get :download_export, namespace_id: project.namespace, id: project - - expect(response).to have_gitlab_http_status(404) - end - end - end - context 'object storage enabled' do - before do - stub_feature_flags(import_export_object_storage: true) - end - context 'when project export is enabled' do it 'returns 302' do get :download_export, namespace_id: project.namespace, id: project diff --git a/spec/db/development/import_common_metrics_spec.rb b/spec/db/development/import_common_metrics_spec.rb new file mode 100644 index 00000000000..25061ef0887 --- /dev/null +++ b/spec/db/development/import_common_metrics_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Import metrics on development seed' do + subject { load Rails.root.join('db', 'fixtures', 'development', '99_common_metrics.rb') } + + it "imports all prometheus metrics" do + expect(PrometheusMetric.common).to be_empty + + subject + + expect(PrometheusMetric.common).not_to be_empty + end +end diff --git a/spec/db/importers/common_metrics_importer_spec.rb b/spec/db/importers/common_metrics_importer_spec.rb new file mode 100644 index 00000000000..16b59e1dfe8 --- /dev/null +++ b/spec/db/importers/common_metrics_importer_spec.rb @@ -0,0 +1,121 @@ +# frozen_string_literal: true + +require 'rails_helper' +require Rails.root.join("db", "importers", "common_metrics_importer.rb") + +describe Importers::PrometheusMetric do + it 'group enum equals ::PrometheusMetric' do + expect(described_class.groups).to eq(::PrometheusMetric.groups) + end + + it 'GROUP_TITLES equals ::PrometheusMetric' do + expect(described_class::GROUP_TITLES).to eq(::PrometheusMetric::GROUP_TITLES) + end +end + +describe Importers::CommonMetricsImporter do + subject { described_class.new } + + context "does import common_metrics.yml" do + let(:groups) { subject.content } + let(:metrics) { groups.map { |group| group['metrics'] }.flatten } + let(:queries) { metrics.map { |group| group['queries'] }.flatten } + let(:query_ids) { queries.map { |query| query['id'] } } + + before do + subject.execute + end + + it "has the same amount of groups" do + expect(PrometheusMetric.common.group(:group).count.count).to eq(groups.count) + end + + it "has the same amount of metrics" do + expect(PrometheusMetric.common.group(:group, :title).count.count).to eq(metrics.count) + end + + it "has the same amount of queries" do + expect(PrometheusMetric.common.count).to eq(queries.count) + end + + it "does not have duplicate IDs" do + expect(query_ids).to eq(query_ids.uniq) + end + + it "imports all IDs" do + expect(PrometheusMetric.common.pluck(:identifier)).to contain_exactly(*query_ids) + end + end + + context 'does import properly all fields' do + let(:query_identifier) { 'response-metric' } + let(:group) do + { + group: 'Response metrics (NGINX Ingress)', + metrics: [{ + title: "Throughput", + y_label: "Requests / Sec", + queries: [{ + id: query_identifier, + query_range: 'my-query', + unit: 'my-unit', + label: 'status code' + }] + }] + } + end + + before do + expect(subject).to receive(:content) { [group.deep_stringify_keys] } + end + + shared_examples 'stores metric' do + let(:metric) { PrometheusMetric.find_by(identifier: query_identifier) } + + it 'with all data' do + expect(metric.group).to eq('nginx_ingress') + expect(metric.title).to eq('Throughput') + expect(metric.y_label).to eq('Requests / Sec') + expect(metric.unit).to eq('my-unit') + expect(metric.legend).to eq('status code') + expect(metric.query).to eq('my-query') + end + end + + context 'if ID is missing' do + let(:query_identifier) { } + + it 'raises exception' do + expect { subject.execute }.to raise_error(described_class::MissingQueryId) + end + end + + context 'for existing common metric with different ID' do + let!(:existing_metric) { create(:prometheus_metric, :common, identifier: 'my-existing-metric') } + + before do + subject.execute + end + + it_behaves_like 'stores metric' do + it 'and existing metric is not changed' do + expect(metric).not_to eq(existing_metric) + end + end + end + + context 'when metric with ID exists ' do + let!(:existing_metric) { create(:prometheus_metric, :common, identifier: 'response-metric') } + + before do + subject.execute + end + + it_behaves_like 'stores metric' do + it 'and existing metric is changed' do + expect(metric).to eq(existing_metric) + end + end + end + end +end diff --git a/spec/db/production/import_common_metrics_spec.rb b/spec/db/production/import_common_metrics_spec.rb new file mode 100644 index 00000000000..1e4ff818a86 --- /dev/null +++ b/spec/db/production/import_common_metrics_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Import metrics on production seed' do + subject { load Rails.root.join('db', 'fixtures', 'production', '999_common_metrics.rb') } + + it "imports all prometheus metrics" do + expect(PrometheusMetric.common).to be_empty + + subject + + expect(PrometheusMetric.common).not_to be_empty + end +end diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb index a6ff226fa75..9fef424e425 100644 --- a/spec/factories/ci/pipelines.rb +++ b/spec/factories/ci/pipelines.rb @@ -77,6 +77,14 @@ FactoryBot.define do pipeline.builds << build(:ci_build, :test_reports, pipeline: pipeline, project: pipeline.project) end end + + trait :auto_devops_source do + config_source { Ci::Pipeline.config_sources[:auto_devops_source] } + end + + trait :repository_source do + config_source { Ci::Pipeline.config_sources[:repository_source] } + end end end end diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index 17e457c04a5..80801eb1082 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -103,27 +103,11 @@ FactoryBot.define do end trait :with_export do - before(:create) do |_project, _evaluator| - allow(Feature).to receive(:enabled?).with(:import_export_object_storage) { false } - allow(Feature).to receive(:enabled?).with('import_export_object_storage') { false } - end - after(:create) do |project, _evaluator| ProjectExportWorker.new.perform(project.creator.id, project.id) end end - trait :with_object_export do - before(:create) do |_project, _evaluator| - allow(Feature).to receive(:enabled?).with(:import_export_object_storage) { true } - allow(Feature).to receive(:enabled?).with('import_export_object_storage') { true } - end - - after(:create) do |project, evaluator| - ProjectExportWorker.new.perform(project.creator.id, project.id) - end - end - trait :broken_storage do after(:create) do |project| project.update_column(:repository_storage, 'broken') @@ -260,6 +244,10 @@ FactoryBot.define do trait(:repository_enabled) { repository_access_level ProjectFeature::ENABLED } trait(:repository_disabled) { repository_access_level ProjectFeature::DISABLED } trait(:repository_private) { repository_access_level ProjectFeature::PRIVATE } + + trait :auto_devops do + association :auto_devops, factory: :project_auto_devops + end end # Project with empty repository diff --git a/spec/factories/prometheus_metrics.rb b/spec/factories/prometheus_metrics.rb new file mode 100644 index 00000000000..c56644bfb96 --- /dev/null +++ b/spec/factories/prometheus_metrics.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :prometheus_metric, class: PrometheusMetric do + title 'title' + query 'avg(metric)' + y_label 'y_label' + unit 'm/s' + group :business + project + legend 'legend' + + trait :common do + common true + project nil + end + end +end diff --git a/spec/factories/resource_label_events.rb b/spec/factories/resource_label_events.rb index a67ad78c098..739ba901052 100644 --- a/spec/factories/resource_label_events.rb +++ b/spec/factories/resource_label_events.rb @@ -2,9 +2,12 @@ FactoryBot.define do factory :resource_label_event do - user { issue.project.creator } action :add label - issue + user { issuable&.author || create(:user) } + + after(:build) do |event, evaluator| + event.issue = create(:issue) unless event.issuable + end end end diff --git a/spec/features/commits/user_uses_slash_commands_spec.rb b/spec/features/commits/user_uses_quick_actions_spec.rb index 9a4b7bd2444..9a4b7bd2444 100644 --- a/spec/features/commits/user_uses_slash_commands_spec.rb +++ b/spec/features/commits/user_uses_quick_actions_spec.rb diff --git a/spec/features/explore/new_menu_spec.rb b/spec/features/explore/new_menu_spec.rb index 0a88988ea09..11f05b6d220 100644 --- a/spec/features/explore/new_menu_spec.rb +++ b/spec/features/explore/new_menu_spec.rb @@ -20,7 +20,7 @@ describe 'Top Plus Menu', :js do click_topmenuitem("New project") - expect(page).to have_content('Project path') + expect(page).to have_content('Project URL') expect(page).to have_content('Project name') end @@ -92,7 +92,7 @@ describe 'Top Plus Menu', :js do find('.header-new-group-project a').click end - expect(page).to have_content('Project path') + expect(page).to have_content('Project URL') expect(page).to have_content('Project name') end end diff --git a/spec/features/instance_statistics/cohorts_spec.rb b/spec/features/instance_statistics/cohorts_spec.rb index 81fc5eff980..40e65515ceb 100644 --- a/spec/features/instance_statistics/cohorts_spec.rb +++ b/spec/features/instance_statistics/cohorts_spec.rb @@ -3,6 +3,8 @@ require 'rails_helper' describe 'Cohorts page' do before do sign_in(create(:admin)) + + stub_application_setting(usage_ping_enabled: true) end it 'See users count per month' do diff --git a/spec/features/issues/resource_label_events_spec.rb b/spec/features/issues/resource_label_events_spec.rb new file mode 100644 index 00000000000..40c452c991a --- /dev/null +++ b/spec/features/issues/resource_label_events_spec.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +require 'rails_helper' + +describe 'List issue resource label events', :js do + let(:user) { create(:user) } + let(:project) { create(:project, :public) } + let(:issue) { create(:issue, project: project, author: user) } + let!(:label) { create(:label, project: project, title: 'foo') } + + context 'when user displays the issue' do + let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue, note: 'some note') } + let!(:event) { create(:resource_label_event, user: user, issue: issue, label: label) } + + before do + visit project_issue_path(project, issue) + wait_for_requests + end + + it 'shows both notes and resource label events' do + page.within('#notes') do + expect(find("#note_#{note.id}")).to have_content 'some note' + expect(find("#note_#{event.discussion_id}")).to have_content 'added foo label' + end + end + end + + context 'when user adds label to the issue' do + def toggle_labels(labels) + page.within '.labels' do + click_link 'Edit' + wait_for_requests + + labels.each { |label| click_link label } + + click_link 'Edit' + wait_for_requests + end + end + + before do + create(:label, project: project, title: 'bar') + project.add_developer(user) + + sign_in(user) + visit project_issue_path(project, issue) + wait_for_requests + end + + it 'shows add note for newly added labels' do + toggle_labels(%w(foo bar)) + visit project_issue_path(project, issue) + wait_for_requests + + page.within('#notes') do + expect(page).to have_content 'added bar foo labels' + end + end + end +end diff --git a/spec/features/issues/user_uses_slash_commands_spec.rb b/spec/features/issues/user_uses_quick_actions_spec.rb index 5926e442f24..5926e442f24 100644 --- a/spec/features/issues/user_uses_slash_commands_spec.rb +++ b/spec/features/issues/user_uses_quick_actions_spec.rb diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb index 77261f9375c..b6ed3686de2 100644 --- a/spec/features/merge_request/user_posts_diff_notes_spec.rb +++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb @@ -186,11 +186,8 @@ describe 'Merge request > User posts diff notes', :js do describe 'posting a note' do it 'adds as discussion' do - expect(page).to have_css('.js-temp-notes-holder', count: 2) - should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]'), asset_form_reset: false) expect(page).to have_css('.notes_holder .note.note-discussion', count: 1) - expect(page).to have_css('.js-temp-notes-holder', count: 1) expect(page).to have_button('Reply...') end end @@ -267,7 +264,7 @@ describe 'Merge request > User posts diff notes', :js do def assert_comment_persistence(line_holder, asset_form_reset:) notes_holder_saved = line_holder.find(:xpath, notes_holder_input_xpath) - expect(notes_holder_saved[:class]).not_to include(notes_holder_input_class) + expect(notes_holder_saved[:class]).not_to include('note-edit-form') expect(notes_holder_saved).to have_content test_note_comment assert_form_is_reset if asset_form_reset @@ -281,6 +278,6 @@ describe 'Merge request > User posts diff notes', :js do end def assert_form_is_reset - expect(page).to have_no_css('.js-temp-notes-holder') + expect(page).to have_no_css('.note-edit-form') end end diff --git a/spec/features/merge_request/user_uses_slash_commands_spec.rb b/spec/features/merge_request/user_uses_quick_actions_spec.rb index b81478a481f..b81478a481f 100644 --- a/spec/features/merge_request/user_uses_slash_commands_spec.rb +++ b/spec/features/merge_request/user_uses_quick_actions_spec.rb diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb index eb281cd2122..8a418356541 100644 --- a/spec/features/projects/import_export/export_file_spec.rb +++ b/spec/features/projects/import_export/export_file_spec.rb @@ -25,7 +25,6 @@ describe 'Import/Export - project export integration test', :js do before do allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) - stub_feature_flags(import_export_object_storage: false) end after do diff --git a/spec/features/projects/import_export/import_file_object_storage_spec.rb b/spec/features/projects/import_export/import_file_object_storage_spec.rb deleted file mode 100644 index 0d364543916..00000000000 --- a/spec/features/projects/import_export/import_file_object_storage_spec.rb +++ /dev/null @@ -1,103 +0,0 @@ -require 'spec_helper' - -describe 'Import/Export - project import integration test', :js do - include Select2Helper - - let(:user) { create(:user) } - let(:file) { File.join(Rails.root, 'spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') } - let(:export_path) { "#{Dir.tmpdir}/import_file_spec" } - - before do - stub_feature_flags(import_export_object_storage: true) - stub_uploads_object_storage(FileUploader) - allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) - gitlab_sign_in(user) - end - - after do - FileUtils.rm_rf(export_path, secure: true) - end - - context 'when selecting the namespace' do - let(:user) { create(:admin) } - let!(:namespace) { user.namespace } - let(:project_path) { 'test-project-path' + SecureRandom.hex } - - context 'prefilled the path' do - it 'user imports an exported project successfully' do - visit new_project_path - - select2(namespace.id, from: '#project_namespace_id') - fill_in :project_path, with: project_path, visible: true - click_import_project_tab - click_link 'GitLab export' - - expect(page).to have_content('Import an exported GitLab project') - expect(URI.parse(current_url).query).to eq("namespace_id=#{namespace.id}&path=#{project_path}") - - attach_file('file', file) - click_on 'Import project' - - expect(Project.count).to eq(1) - - project = Project.last - expect(project).not_to be_nil - expect(project.description).to eq("Foo Bar") - expect(project.issues).not_to be_empty - expect(project.merge_requests).not_to be_empty - expect(project_hook_exists?(project)).to be true - expect(wiki_exists?(project)).to be true - expect(project.import_state.status).to eq('finished') - end - end - - context 'path is not prefilled' do - it 'user imports an exported project successfully' do - visit new_project_path - click_import_project_tab - click_link 'GitLab export' - - fill_in :path, with: 'test-project-path', visible: true - attach_file('file', file) - - expect { click_on 'Import project' }.to change { Project.count }.by(1) - - project = Project.last - expect(project).not_to be_nil - expect(page).to have_content("Project 'test-project-path' is being imported") - end - end - end - - it 'invalid project' do - project = create(:project, namespace: user.namespace) - - visit new_project_path - - select2(user.namespace.id, from: '#project_namespace_id') - fill_in :project_path, with: project.name, visible: true - click_import_project_tab - click_link 'GitLab export' - attach_file('file', file) - click_on 'Import project' - - page.within('.flash-container') do - expect(page).to have_content('Project could not be imported') - end - end - - def wiki_exists?(project) - wiki = ProjectWiki.new(project) - wiki.repository.exists? && !wiki.repository.empty? - end - - def project_hook_exists?(project) - Gitlab::GitalyClient::StorageSettings.allow_disk_access do - Gitlab::Git::Hook.new('post-receive', project.repository.raw_repository).exists? - end - end - - def click_import_project_tab - find('#import-project-tab').click - end -end diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb index 2d86115de12..6cd5810325f 100644 --- a/spec/features/projects/import_export/import_file_spec.rb +++ b/spec/features/projects/import_export/import_file_spec.rb @@ -8,7 +8,7 @@ describe 'Import/Export - project import integration test', :js do let(:export_path) { "#{Dir.tmpdir}/import_file_spec" } before do - stub_feature_flags(import_export_object_storage: false) + stub_uploads_object_storage(FileUploader) allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) gitlab_sign_in(user) end @@ -20,6 +20,7 @@ describe 'Import/Export - project import integration test', :js do context 'when selecting the namespace' do let(:user) { create(:admin) } let!(:namespace) { user.namespace } + let(:project_name) { 'Test Project Name' + SecureRandom.hex } let(:project_path) { 'test-project-path' + SecureRandom.hex } context 'prefilled the path' do @@ -27,13 +28,13 @@ describe 'Import/Export - project import integration test', :js do visit new_project_path select2(namespace.id, from: '#project_namespace_id') + fill_in :project_name, with: project_name, visible: true fill_in :project_path, with: project_path, visible: true click_import_project_tab click_link 'GitLab export' expect(page).to have_content('Import an exported GitLab project') - expect(URI.parse(current_url).query).to eq("namespace_id=#{namespace.id}&path=#{project_path}") - expect(Gitlab::ImportExport).to receive(:import_upload_path).with(filename: /\A\h{32}\z/).and_call_original + expect(URI.parse(current_url).query).to eq("namespace_id=#{namespace.id}&name=#{ERB::Util.url_encode(project_name)}&path=#{project_path}") attach_file('file', file) click_on 'Import project' @@ -57,6 +58,7 @@ describe 'Import/Export - project import integration test', :js do click_import_project_tab click_link 'GitLab export' + fill_in :name, with: 'Test Project Name', visible: true fill_in :path, with: 'test-project-path', visible: true attach_file('file', file) @@ -75,7 +77,8 @@ describe 'Import/Export - project import integration test', :js do visit new_project_path select2(user.namespace.id, from: '#project_namespace_id') - fill_in :project_path, with: project.name, visible: true + fill_in :project_name, with: project.name, visible: true + fill_in :project_path, with: project.path, visible: true click_import_project_tab click_link 'GitLab export' attach_file('file', file) diff --git a/spec/features/projects/import_export/namespace_export_file_spec.rb b/spec/features/projects/import_export/namespace_export_file_spec.rb deleted file mode 100644 index 9bb8a2063b5..00000000000 --- a/spec/features/projects/import_export/namespace_export_file_spec.rb +++ /dev/null @@ -1,68 +0,0 @@ -require 'spec_helper' - -describe 'Import/Export - Namespace export file cleanup', :js do - let(:export_path) { Dir.mktmpdir('namespace_export_file_spec') } - - before do - allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) - stub_feature_flags(import_export_object_storage: false) - end - - after do - FileUtils.rm_rf(export_path, secure: true) - end - - shared_examples_for 'handling project exports on namespace change' do - let!(:old_export_path) { project.export_path } - - before do - sign_in(create(:admin)) - - setup_export_project - end - - context 'moving the namespace' do - it 'removes the export file' do - expect(File).to exist(old_export_path) - - project.namespace.update!(path: build(:namespace).path) - - expect(File).not_to exist(old_export_path) - end - end - - context 'deleting the namespace' do - it 'removes the export file' do - expect(File).to exist(old_export_path) - - project.namespace.destroy - - expect(File).not_to exist(old_export_path) - end - end - end - - describe 'legacy storage' do - let(:project) { create(:project, :legacy_storage) } - - it_behaves_like 'handling project exports on namespace change' - end - - describe 'hashed storage' do - let(:project) { create(:project) } - - it_behaves_like 'handling project exports on namespace change' - end - - def setup_export_project - visit edit_project_path(project) - - expect(page).to have_content('Export project') - - find(:link, 'Export project').send_keys(:return) - - visit edit_project_path(project) - - expect(page).to have_content('Download export') - end -end diff --git a/spec/features/projects/import_export/test_project_export.tar.gz b/spec/features/projects/import_export/test_project_export.tar.gz Binary files differindex 3b5df47e0b6..730e586b278 100644 --- a/spec/features/projects/import_export/test_project_export.tar.gz +++ b/spec/features/projects/import_export/test_project_export.tar.gz diff --git a/spec/features/projects/members/share_with_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb index c6d85e5d22f..0fb3eb20b5b 100644 --- a/spec/features/projects/members/share_with_group_spec.rb +++ b/spec/features/projects/members/invite_group_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe 'Project > Members > Share with Group', :js do +describe 'Project > Members > Invite group', :js do include Select2Helper include ActionView::Helpers::DateHelper @@ -8,17 +8,16 @@ describe 'Project > Members > Share with Group', :js do describe 'Share with group lock' do shared_examples 'the project can be shared with groups' do - it 'the "Share with group" tab exists' do + it 'the "Invite group" tab exists' do visit project_settings_members_path(project) - expect(page).to have_selector('#share-with-group-tab') + expect(page).to have_selector('#invite-group-tab') end end shared_examples 'the project cannot be shared with groups' do - it 'the "Share with group" tab does not exist' do + it 'the "Invite group" tab does not exist' do visit project_settings_members_path(project) - expect(page).to have_selector('#add-member-tab') - expect(page).not_to have_selector('#share-with-group-tab') + expect(page).not_to have_selector('#invite-group-tab') end end @@ -37,7 +36,7 @@ describe 'Project > Members > Share with Group', :js do it 'the project can be shared with another group' do visit project_settings_members_path(project) - click_on 'share-with-group-tab' + click_on 'invite-group-tab' select2 group_to_share_with.id, from: '#link_group_id' page.find('body').click @@ -117,12 +116,12 @@ describe 'Project > Members > Share with Group', :js do visit project_settings_members_path(project) - click_on 'share-with-group-tab' + click_on 'invite-group-tab' select2 group.id, from: '#link_group_id' fill_in 'expires_at_groups', with: (Time.now + 4.5.days).strftime('%Y-%m-%d') - click_on 'share-with-group-tab' + click_on 'invite-group-tab' find('.btn-create').click end @@ -150,7 +149,7 @@ describe 'Project > Members > Share with Group', :js do visit project_settings_members_path(project) - click_link 'Share with group' + click_link 'Invite group' find('.ajax-groups-select.select2-container') @@ -183,7 +182,7 @@ describe 'Project > Members > Share with Group', :js do it 'the groups dropdown does not show ancestors', :nested_groups do visit project_settings_members_path(project) - click_on 'share-with-group-tab' + click_on 'invite-group-tab' click_link 'Search for a group' page.within '.select2-drop' do diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb index bbe08ff83ff..0acd5059385 100644 --- a/spec/features/projects/new_project_spec.rb +++ b/spec/features/projects/new_project_spec.rb @@ -12,8 +12,9 @@ describe 'New project' do it 'shows "New project" page', :js do visit new_project_path - expect(page).to have_content('Project path') expect(page).to have_content('Project name') + expect(page).to have_content('Project URL') + expect(page).to have_content('Project slug') find('#import-project-tab').click @@ -187,7 +188,7 @@ describe 'New project' do collision_project = create(:project, name: 'test-name-collision', namespace: user.namespace) fill_in 'project_import_url', with: collision_project.http_url_to_repo - fill_in 'project_path', with: collision_project.path + fill_in 'project_name', with: collision_project.name click_on 'Create project' diff --git a/spec/features/projects/settings/user_manages_group_links_spec.rb b/spec/features/projects/settings/user_manages_group_links_spec.rb index 2f1824d7849..676659b90c3 100644 --- a/spec/features/projects/settings/user_manages_group_links_spec.rb +++ b/spec/features/projects/settings/user_manages_group_links_spec.rb @@ -26,13 +26,13 @@ describe 'Projects > Settings > User manages group links' do end end - it 'shares a project with a group', :js do - click_link('Share with group') + it 'invites a group to a project', :js do + click_link('Invite group') select2(group_market.id, from: '#link_group_id') select('Maintainer', from: 'link_group_access') - click_button('Share') + click_button('Invite') page.within('.project-members-groups') do expect(page).to have_content('Market') diff --git a/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb new file mode 100644 index 00000000000..baf715000a3 --- /dev/null +++ b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Project > Show > User interacts with auto devops implicitly enabled banner' do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + project.add_user(user, role) + sign_in(user) + end + + context 'when user does not have maintainer access' do + let(:role) { :developer } + + context 'when AutoDevOps is implicitly enabled' do + it 'does not display AutoDevOps implicitly enabled banner' do + expect(page).not_to have_css('.auto-devops-implicitly-enabled-banner') + end + end + end + + context 'when user has mantainer access' do + let(:role) { :maintainer } + + context 'when AutoDevOps is implicitly enabled' do + before do + stub_application_setting(auto_devops_enabled: true) + + visit project_path(project) + end + + it 'display AutoDevOps implicitly enabled banner' do + expect(page).to have_css('.auto-devops-implicitly-enabled-banner') + end + + context 'when user dismisses the banner', :js do + it 'does not display AutoDevOps implicitly enabled banner' do + find('.hide-auto-devops-implicitly-enabled-banner').click + wait_for_requests + visit project_path(project) + + expect(page).not_to have_css('.auto-devops-implicitly-enabled-banner') + end + end + end + + context 'when AutoDevOps is not implicitly enabled' do + before do + stub_application_setting(auto_devops_enabled: false) + + visit project_path(project) + end + + it 'does not display AutoDevOps implicitly enabled banner' do + expect(page).not_to have_css('.auto-devops-implicitly-enabled-banner') + end + end + end +end diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb index 8ae036cd29f..45e81e1c040 100644 --- a/spec/features/projects/tree/tree_show_spec.rb +++ b/spec/features/projects/tree/tree_show_spec.rb @@ -4,16 +4,30 @@ describe 'Projects tree', :js do let(:user) { create(:user) } let(:project) { create(:project, :repository) } + # This commit has a known state on the master branch of gitlab-test + let(:test_sha) { '7975be0116940bf2ad4321f79d02a55c5f7779aa' } + before do project.add_maintainer(user) sign_in(user) end it 'renders tree table without errors' do - visit project_tree_path(project, 'master') + visit project_tree_path(project, test_sha) + wait_for_requests + + expect(page).to have_selector('.tree-item') + expect(page).to have_content('add tests for .gitattributes custom highlighting') + expect(page).not_to have_selector('.flash-alert') + expect(page).not_to have_selector('.label-lfs', text: 'LFS') + end + + it 'renders tree table for a subtree without errors' do + visit project_tree_path(project, File.join(test_sha, 'files')) wait_for_requests expect(page).to have_selector('.tree-item') + expect(page).to have_content('add spaces in whitespace file') expect(page).not_to have_selector('.label-lfs', text: 'LFS') expect(page).not_to have_selector('.flash-alert') end diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb index 83d18996f4e..8d7e2883b2a 100644 --- a/spec/features/projects/user_creates_project_spec.rb +++ b/spec/features/projects/user_creates_project_spec.rb @@ -11,7 +11,7 @@ describe 'User creates a project', :js do it 'creates a new project' do visit(new_project_path) - fill_in(:project_path, with: 'Empty') + fill_in(:project_name, with: 'Empty') page.within('#content-body') do click_button('Create project') @@ -37,6 +37,7 @@ describe 'User creates a project', :js do it 'creates a new project' do visit(new_project_path) + fill_in :project_name, with: 'A Subgroup Project' fill_in :project_path, with: 'a-subgroup-project' page.find('.js-select-namespace').click @@ -46,7 +47,7 @@ describe 'User creates a project', :js do click_button('Create project') end - expect(page).to have_content("Project 'a-subgroup-project' was successfully created") + expect(page).to have_content("Project 'A Subgroup Project' was successfully created") project = Project.last diff --git a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb index 5007794cd77..18ccd31f3d0 100644 --- a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb +++ b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb @@ -13,7 +13,7 @@ describe 'User deletes wiki page', :js do it 'deletes a page' do click_on('Edit') click_on('Delete') - find('.js-modal-primary-action').click + find('.modal-footer .btn-danger').click expect(page).to have_content('Page was successfully deleted') end diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb index 22e3a99072f..8e310f38a8c 100644 --- a/spec/features/projects_spec.rb +++ b/spec/features/projects_spec.rb @@ -16,7 +16,7 @@ describe 'Project' do it "allows creation from templates", :js do find('#create-from-template-tab').click find("label[for=#{template.name}]").click - fill_in("project_path", with: template.name) + fill_in("project_name", with: template.name) page.within '#content-body' do click_button "Create project" diff --git a/spec/features/usage_stats_consent_spec.rb b/spec/features/usage_stats_consent_spec.rb new file mode 100644 index 00000000000..dd8f3179895 --- /dev/null +++ b/spec/features/usage_stats_consent_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Usage stats consent' do + context 'when signed in' do + let(:user) { create(:admin, created_at: 8.days.ago) } + let(:message) { 'To help improve GitLab, we would like to periodically collect usage information.' } + + before do + allow(user).to receive(:has_current_license?).and_return false + + gitlab_sign_in(user) + end + + it 'hides the banner permanently when sets usage stats' do + visit root_dashboard_path + + expect(page).to have_content(message) + + click_link 'Send usage data' + + expect(page).not_to have_content(message) + expect(page).to have_content('Application settings saved successfully') + + gitlab_sign_out + gitlab_sign_in(user) + visit root_dashboard_path + + expect(page).not_to have_content(message) + end + + it 'shows banner on next session if user did not set usage stats' do + visit root_dashboard_path + + expect(page).to have_content(message) + + gitlab_sign_out + gitlab_sign_in(user) + visit root_dashboard_path + + expect(page).to have_content(message) + end + end +end diff --git a/spec/finders/contributed_projects_finder_spec.rb b/spec/finders/contributed_projects_finder_spec.rb index 9155a8d6fe9..81fb4e3561c 100644 --- a/spec/finders/contributed_projects_finder_spec.rb +++ b/spec/finders/contributed_projects_finder_spec.rb @@ -8,6 +8,7 @@ describe ContributedProjectsFinder do let!(:public_project) { create(:project, :public) } let!(:private_project) { create(:project, :private) } + let!(:internal_project) { create(:project, :internal) } before do private_project.add_maintainer(source_user) @@ -16,17 +17,18 @@ describe ContributedProjectsFinder do create(:push_event, project: public_project, author: source_user) create(:push_event, project: private_project, author: source_user) + create(:push_event, project: internal_project, author: source_user) end - describe 'without a current user' do + describe 'activity without a current user' do subject { finder.execute } - it { is_expected.to eq([public_project]) } + it { is_expected.to match_array([public_project]) } end - describe 'with a current user' do + describe 'activity with a current user' do subject { finder.execute(current_user) } - it { is_expected.to eq([private_project, public_project]) } + it { is_expected.to match_array([private_project, internal_project, public_project]) } end end diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb index 796d40cb625..c64abdc3619 100644 --- a/spec/finders/group_descendants_finder_spec.rb +++ b/spec/finders/group_descendants_finder_spec.rb @@ -108,6 +108,15 @@ describe GroupDescendantsFinder do end end end + + it 'does not include projects shared with the group' do + project = create(:project, namespace: group) + other_project = create(:project) + other_project.project_group_links.create(group: group, + group_access: ProjectGroupLink::MASTER) + + expect(finder.execute).to contain_exactly(project) + end end context 'with nested groups', :nested_groups do diff --git a/spec/finders/user_recent_events_finder_spec.rb b/spec/finders/user_recent_events_finder_spec.rb index 58470f4c84d..c5fcd68eb4c 100644 --- a/spec/finders/user_recent_events_finder_spec.rb +++ b/spec/finders/user_recent_events_finder_spec.rb @@ -13,49 +13,25 @@ describe UserRecentEventsFinder do subject(:finder) { described_class.new(current_user, project_owner) } describe '#execute' do - context 'current user does not have access to projects' do - it 'returns public and internal events' do - records = finder.execute - - expect(records).to include(public_event, internal_event) - expect(records).not_to include(private_event) + context 'when profile is public' do + it 'returns all the events' do + expect(finder.execute).to include(private_event, internal_event, public_event) end end - context 'when current user has access to the projects' do - before do - private_project.add_developer(current_user) - internal_project.add_developer(current_user) - public_project.add_developer(current_user) - end - - context 'when profile is public' do - it 'returns all the events' do - expect(finder.execute).to include(private_event, internal_event, public_event) - end - end - - context 'when profile is private' do - it 'returns no event' do - allow(Ability).to receive(:allowed?).and_call_original - allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, project_owner).and_return(false) - expect(finder.execute).to be_empty - end - end + context 'when profile is private' do + it 'returns no event' do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, project_owner).and_return(false) - it 'does not include the events if the user cannot read cross project' do - expect(Ability).to receive(:allowed?).and_call_original - expect(Ability).to receive(:allowed?).with(current_user, :read_cross_project) { false } expect(finder.execute).to be_empty end end - context 'when current user is anonymous' do - let(:current_user) { nil } - - it 'returns public events only' do - expect(finder.execute).to eq([public_event]) - end + it 'does not include the events if the user cannot read cross project' do + expect(Ability).to receive(:allowed?).and_call_original + expect(Ability).to receive(:allowed?).with(current_user, :read_cross_project) { false } + expect(finder.execute).to be_empty end end end diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb index f76ed4bfda4..4af98bc3678 100644 --- a/spec/helpers/issuables_helper_spec.rb +++ b/spec/helpers/issuables_helper_spec.rb @@ -184,7 +184,7 @@ describe IssuablesHelper do issuableRef: "##{issue.iid}", markdownPreviewPath: "/#{@project.full_path}/preview_markdown", markdownDocsPath: '/help/user/markdown', - markdownVersion: 11, + markdownVersion: CacheMarkdownField::CACHE_COMMONMARK_VERSION, issuableTemplates: [], projectPath: @project.path, projectNamespace: @project.namespace.path, diff --git a/spec/javascripts/boards/mock_data.js b/spec/javascripts/boards/mock_data.js index 81f1a97112f..f380ef450db 100644 --- a/spec/javascripts/boards/mock_data.js +++ b/spec/javascripts/boards/mock_data.js @@ -27,7 +27,7 @@ export const listObjDuplicate = { export const BoardsMockData = { GET: { - '/test/-/boards/1/lists/300/issues?id=300&page=1&=': { + '/test/-/boards/1/lists/300/issues?id=300&page=1': { issues: [ { title: 'Testing', diff --git a/spec/javascripts/boards/utils/query_data_spec.js b/spec/javascripts/boards/utils/query_data_spec.js deleted file mode 100644 index 922215ffc1d..00000000000 --- a/spec/javascripts/boards/utils/query_data_spec.js +++ /dev/null @@ -1,27 +0,0 @@ -import queryData from '~/boards/utils/query_data'; - -describe('queryData', () => { - it('parses path for label with trailing +', () => { - expect( - queryData('label_name[]=label%2B', {}), - ).toEqual({ - label_name: ['label+'], - }); - }); - - it('parses path for milestone with trailing +', () => { - expect( - queryData('milestone_title=A%2B', {}), - ).toEqual({ - milestone_title: 'A+', - }); - }); - - it('parses path for search terms with spaces', () => { - expect( - queryData('search=two+words', {}), - ).toEqual({ - search: 'two words', - }); - }); -}); diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js index 44a38f7ca82..845fef23db6 100644 --- a/spec/javascripts/diffs/components/diff_file_spec.js +++ b/spec/javascripts/diffs/components/diff_file_spec.js @@ -51,7 +51,9 @@ describe('DiffFile', () => { }); it('should have collapsed text and link', done => { - vm.file.collapsed = true; + vm.file.renderIt = true; + vm.file.collapsed = false; + vm.file.highlightedDiffLines = null; vm.$nextTick(() => { expect(vm.$el.innerText).toContain('This diff is collapsed'); diff --git a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js b/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js index a1a37b342b7..663c0680845 100644 --- a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js +++ b/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js @@ -6,61 +6,61 @@ import discussionsMockData from '../mock_data/diff_discussions'; import diffFileMockData from '../mock_data/diff_file'; describe('DiffLineGutterContent', () => { - const getDiscussionsMockData = () => [Object.assign({}, discussionsMockData)]; const getDiffFileMock = () => Object.assign({}, diffFileMockData); const createComponent = (options = {}) => { const cmp = Vue.extend(DiffLineGutterContent); const props = Object.assign({}, options); + props.line = { + lineCode: 'LC_42', + type: 'new', + oldLine: null, + newLine: 1, + discussions: [], + text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', + richText: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', + metaData: null, + }; props.fileHash = getDiffFileMock().fileHash; props.contextLinesPath = '/context/lines/path'; return createComponentWithStore(cmp, store, props).$mount(); }; - const setDiscussions = component => { - component.$store.dispatch('setInitialNotes', getDiscussionsMockData()); - }; - - const resetDiscussions = component => { - component.$store.dispatch('setInitialNotes', []); - }; describe('computed', () => { describe('lineHref', () => { it('should prepend # to lineCode', () => { const lineCode = 'LC_42'; - const component = createComponent({ lineCode }); + const component = createComponent(); expect(component.lineHref).toEqual(`#${lineCode}`); }); it('should return # if there is no lineCode', () => { - const component = createComponent({ lineCode: null }); + const component = createComponent(); + component.line.lineCode = ''; expect(component.lineHref).toEqual('#'); }); }); describe('discussions, hasDiscussions, shouldShowAvatarsOnGutter', () => { it('should return empty array when there is no discussion', () => { - const component = createComponent({ lineCode: 'LC_42' }); - expect(component.discussions).toEqual([]); + const component = createComponent(); expect(component.hasDiscussions).toEqual(false); expect(component.shouldShowAvatarsOnGutter).toEqual(false); }); it('should return discussions for the given lineCode', () => { - const { lineCode } = getDiffFileMock().highlightedDiffLines[1]; - const component = createComponent({ - lineCode, + const cmp = Vue.extend(DiffLineGutterContent); + const props = { + line: getDiffFileMock().highlightedDiffLines[1], + fileHash: getDiffFileMock().fileHash, showCommentButton: true, - discussions: getDiscussionsMockData(), - }); + contextLinesPath: '/context/lines/path', + }; + props.line.discussions = [Object.assign({}, discussionsMockData)]; + const component = createComponentWithStore(cmp, store, props).$mount(); - setDiscussions(component); - - expect(component.discussions).toEqual(getDiscussionsMockData()); expect(component.hasDiscussions).toEqual(true); expect(component.shouldShowAvatarsOnGutter).toEqual(true); - - resetDiscussions(component); }); }); }); @@ -104,9 +104,7 @@ describe('DiffLineGutterContent', () => { lineCode: getDiffFileMock().highlightedDiffLines[1].lineCode, }); - setDiscussions(component); expect(component.$el.querySelector('.diff-comment-avatar-holders')).toBeDefined(); - resetDiscussions(component); }); }); }); diff --git a/spec/javascripts/diffs/components/parallel_diff_view_spec.js b/spec/javascripts/diffs/components/parallel_diff_view_spec.js index 165e4b69b6c..091e01868d3 100644 --- a/spec/javascripts/diffs/components/parallel_diff_view_spec.js +++ b/spec/javascripts/diffs/components/parallel_diff_view_spec.js @@ -18,11 +18,11 @@ describe('ParallelDiffView', () => { }).$mount(); }); - describe('computed', () => { - describe('parallelDiffLines', () => { + describe('assigned', () => { + describe('diffLines', () => { it('should normalize lines for empty cells', () => { - expect(component.parallelDiffLines[0].left.type).toEqual(constants.EMPTY_CELL_TYPE); - expect(component.parallelDiffLines[1].left.type).toEqual(constants.EMPTY_CELL_TYPE); + expect(component.diffLines[0].left.type).toEqual(constants.EMPTY_CELL_TYPE); + expect(component.diffLines[1].left.type).toEqual(constants.EMPTY_CELL_TYPE); }); }); }); diff --git a/spec/javascripts/diffs/mock_data/diff_discussions.js b/spec/javascripts/diffs/mock_data/diff_discussions.js index 41d0dfd8939..b29a22da7c2 100644 --- a/spec/javascripts/diffs/mock_data/diff_discussions.js +++ b/spec/javascripts/diffs/mock_data/diff_discussions.js @@ -16,7 +16,7 @@ export default { expanded: true, notes: [ { - id: 1749, + id: '1749', type: 'DiffNote', attachment: null, author: { @@ -68,7 +68,7 @@ export default { '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20', }, { - id: 1753, + id: '1753', type: 'DiffNote', attachment: null, author: { @@ -120,7 +120,7 @@ export default { '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20', }, { - id: 1754, + id: '1754', type: 'DiffNote', attachment: null, author: { @@ -162,7 +162,7 @@ export default { '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20', }, { - id: 1755, + id: '1755', type: 'DiffNote', attachment: null, author: { @@ -204,7 +204,7 @@ export default { '/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20', }, { - id: 1756, + id: '1756', type: 'DiffNote', attachment: null, author: { diff --git a/spec/javascripts/diffs/mock_data/diff_file.js b/spec/javascripts/diffs/mock_data/diff_file.js index cce36ecc91f..372b8f066cf 100644 --- a/spec/javascripts/diffs/mock_data/diff_file.js +++ b/spec/javascripts/diffs/mock_data/diff_file.js @@ -49,6 +49,7 @@ export default { type: 'new', oldLine: null, newLine: 1, + discussions: [], text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', richText: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', metaData: null, @@ -58,6 +59,7 @@ export default { type: 'new', oldLine: null, newLine: 2, + discussions: [], text: '+<span id="LC2" class="line" lang="plaintext"></span>\n', richText: '+<span id="LC2" class="line" lang="plaintext"></span>\n', metaData: null, @@ -67,6 +69,7 @@ export default { type: null, oldLine: 1, newLine: 3, + discussions: [], text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', richText: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', metaData: null, @@ -76,6 +79,7 @@ export default { type: null, oldLine: 2, newLine: 4, + discussions: [], text: ' <span id="LC4" class="line" lang="plaintext"></span>\n', richText: ' <span id="LC4" class="line" lang="plaintext"></span>\n', metaData: null, @@ -85,6 +89,7 @@ export default { type: null, oldLine: 3, newLine: 5, + discussions: [], text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', richText: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', metaData: null, @@ -94,6 +99,7 @@ export default { type: 'match', oldLine: null, newLine: null, + discussions: [], text: '', richText: '', metaData: { @@ -112,6 +118,7 @@ export default { type: 'new', oldLine: null, newLine: 1, + discussions: [], text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', richText: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', metaData: null, @@ -126,6 +133,7 @@ export default { type: 'new', oldLine: null, newLine: 2, + discussions: [], text: '+<span id="LC2" class="line" lang="plaintext"></span>\n', richText: '<span id="LC2" class="line" lang="plaintext"></span>\n', metaData: null, @@ -137,6 +145,7 @@ export default { type: null, oldLine: 1, newLine: 3, + discussions: [], text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', richText: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', metaData: null, @@ -146,6 +155,7 @@ export default { type: null, oldLine: 1, newLine: 3, + discussions: [], text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', richText: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', metaData: null, @@ -157,6 +167,7 @@ export default { type: null, oldLine: 2, newLine: 4, + discussions: [], text: ' <span id="LC4" class="line" lang="plaintext"></span>\n', richText: '<span id="LC4" class="line" lang="plaintext"></span>\n', metaData: null, @@ -166,6 +177,7 @@ export default { type: null, oldLine: 2, newLine: 4, + discussions: [], text: ' <span id="LC4" class="line" lang="plaintext"></span>\n', richText: '<span id="LC4" class="line" lang="plaintext"></span>\n', metaData: null, @@ -177,6 +189,7 @@ export default { type: null, oldLine: 3, newLine: 5, + discussions: [], text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', richText: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', metaData: null, @@ -186,6 +199,7 @@ export default { type: null, oldLine: 3, newLine: 5, + discussions: [], text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', richText: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', metaData: null, @@ -197,6 +211,7 @@ export default { type: 'match', oldLine: null, newLine: null, + discussions: [], text: '', richText: '', metaData: { @@ -209,6 +224,7 @@ export default { type: 'match', oldLine: null, newLine: null, + discussions: [], text: '', richText: '', metaData: { diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js index c1560dac1a0..cfb8f862598 100644 --- a/spec/javascripts/diffs/store/actions_spec.js +++ b/spec/javascripts/diffs/store/actions_spec.js @@ -7,10 +7,30 @@ import { } from '~/diffs/constants'; import * as actions from '~/diffs/store/actions'; import * as types from '~/diffs/store/mutation_types'; +import { reduceDiscussionsToLineCodes } from '~/notes/stores/utils'; import axios from '~/lib/utils/axios_utils'; import testAction from '../../helpers/vuex_action_helper'; describe('DiffsStoreActions', () => { + const originalMethods = { + requestAnimationFrame: global.requestAnimationFrame, + requestIdleCallback: global.requestIdleCallback, + }; + + beforeEach(() => { + ['requestAnimationFrame', 'requestIdleCallback'].forEach(method => { + global[method] = cb => { + cb(); + }; + }); + }); + + afterEach(() => { + ['requestAnimationFrame', 'requestIdleCallback'].forEach(method => { + global[method] = originalMethods[method]; + }); + }); + describe('setBaseConfig', () => { it('should set given endpoint and project path', done => { const endpoint = '/diffs/set/endpoint'; @@ -53,6 +73,198 @@ describe('DiffsStoreActions', () => { }); }); + describe('assignDiscussionsToDiff', () => { + it('should merge discussions into diffs', done => { + const state = { + diffFiles: [ + { + fileHash: 'ABC', + parallelDiffLines: [ + { + left: { + lineCode: 'ABC_1_1', + discussions: [], + }, + right: { + lineCode: 'ABC_1_1', + discussions: [], + }, + }, + ], + highlightedDiffLines: [ + { + lineCode: 'ABC_1_1', + discussions: [], + oldLine: 5, + newLine: null, + }, + ], + diffRefs: { + baseSha: 'abc', + headSha: 'def', + startSha: 'ghi', + }, + newPath: 'file1', + oldPath: 'file2', + }, + ], + }; + + const diffPosition = { + baseSha: 'abc', + headSha: 'def', + startSha: 'ghi', + newLine: null, + newPath: 'file1', + oldLine: 5, + oldPath: 'file2', + }; + + const singleDiscussion = { + line_code: 'ABC_1_1', + diff_discussion: {}, + diff_file: { + file_hash: 'ABC', + }, + fileHash: 'ABC', + resolvable: true, + position: { + formatter: diffPosition, + }, + original_position: { + formatter: diffPosition, + }, + }; + + const discussions = reduceDiscussionsToLineCodes([singleDiscussion]); + + testAction( + actions.assignDiscussionsToDiff, + discussions, + state, + [ + { + type: types.SET_LINE_DISCUSSIONS_FOR_FILE, + payload: { + fileHash: 'ABC', + discussions: [singleDiscussion], + diffPositionByLineCode: { + ABC_1_1: { + baseSha: 'abc', + headSha: 'def', + startSha: 'ghi', + newLine: null, + newPath: 'file1', + oldLine: 5, + oldPath: 'file2', + }, + }, + }, + }, + ], + [], + () => { + done(); + }, + ); + }); + }); + + describe('removeDiscussionsFromDiff', () => { + it('should remove discussions from diffs', done => { + const state = { + diffFiles: [ + { + fileHash: 'ABC', + parallelDiffLines: [ + { + left: { + lineCode: 'ABC_1_1', + discussions: [ + { + id: 1, + }, + ], + }, + right: { + lineCode: 'ABC_1_1', + discussions: [], + }, + }, + ], + highlightedDiffLines: [ + { + lineCode: 'ABC_1_1', + discussions: [], + }, + ], + }, + ], + }; + const singleDiscussion = { + fileHash: 'ABC', + line_code: 'ABC_1_1', + }; + + testAction( + actions.removeDiscussionsFromDiff, + singleDiscussion, + state, + [ + { + type: types.REMOVE_LINE_DISCUSSIONS_FOR_FILE, + payload: { + fileHash: 'ABC', + lineCode: 'ABC_1_1', + }, + }, + ], + [], + () => { + done(); + }, + ); + }); + }); + + describe('startRenderDiffsQueue', () => { + it('should set all files to RENDER_FILE', done => { + const state = { + diffFiles: [ + { + id: 1, + renderIt: false, + collapsed: false, + }, + { + id: 2, + renderIt: false, + collapsed: false, + }, + ], + }; + + const pseudoCommit = (commitType, file) => { + expect(commitType).toBe(types.RENDER_FILE); + Object.assign(file, { + renderIt: true, + }); + }; + + actions + .startRenderDiffsQueue({ state, commit: pseudoCommit }) + .then(() => { + expect(state.diffFiles[0].renderIt).toBeTruthy(); + expect(state.diffFiles[1].renderIt).toBeTruthy(); + + done(); + }) + .catch(() => { + done.fail(); + }); + }); + }); + describe('setInlineDiffViewType', () => { it('should set diff view type to inline and also set the cookie properly', done => { testAction( @@ -204,7 +416,11 @@ describe('DiffsStoreActions', () => { actions.toggleFileDiscussions({ getters, dispatch }); - expect(dispatch).toHaveBeenCalledWith('collapseDiscussion', { discussionId: 1 }, { root: true }); + expect(dispatch).toHaveBeenCalledWith( + 'collapseDiscussion', + { discussionId: 1 }, + { root: true }, + ); }); it('should dispatch expandDiscussion when all discussions are collapsed', () => { @@ -218,7 +434,11 @@ describe('DiffsStoreActions', () => { actions.toggleFileDiscussions({ getters, dispatch }); - expect(dispatch).toHaveBeenCalledWith('expandDiscussion', { discussionId: 1 }, { root: true }); + expect(dispatch).toHaveBeenCalledWith( + 'expandDiscussion', + { discussionId: 1 }, + { root: true }, + ); }); it('should dispatch expandDiscussion when some discussions are collapsed and others are expanded for the collapsed discussion', () => { @@ -232,7 +452,11 @@ describe('DiffsStoreActions', () => { actions.toggleFileDiscussions({ getters, dispatch }); - expect(dispatch).toHaveBeenCalledWith('expandDiscussion', { discussionId: 1 }, { root: true }); + expect(dispatch).toHaveBeenCalledWith( + 'expandDiscussion', + { discussionId: 1 }, + { root: true }, + ); }); }); }); diff --git a/spec/javascripts/diffs/store/getters_spec.js b/spec/javascripts/diffs/store/getters_spec.js index a59b26b2634..4747e437c4e 100644 --- a/spec/javascripts/diffs/store/getters_spec.js +++ b/spec/javascripts/diffs/store/getters_spec.js @@ -184,101 +184,73 @@ describe('Diffs Module Getters', () => { }); }); - describe('singleDiscussionByLineCode', () => { - it('returns found discussion per line Code', () => { - const discussionsMock = {}; - discussionsMock.ABC = discussionMock; - - expect( - getters.singleDiscussionByLineCode(localState, {}, null, { - discussionsByLineCode: () => discussionsMock, - })('DEF'), - ).toEqual([]); - }); - - it('returns empty array when no discussions match', () => { - expect( - getters.singleDiscussionByLineCode(localState, {}, null, { - discussionsByLineCode: () => {}, - })('DEF'), - ).toEqual([]); - }); - }); - describe('shouldRenderParallelCommentRow', () => { let line; beforeEach(() => { line = {}; + discussionMock.expanded = true; + line.left = { lineCode: 'ABC', + discussions: [discussionMock], }; line.right = { lineCode: 'DEF', + discussions: [discussionMock1], }; }); it('returns true when discussion is expanded', () => { - discussionMock.expanded = true; - - expect( - getters.shouldRenderParallelCommentRow(localState, { - singleDiscussionByLineCode: () => [discussionMock], - })(line), - ).toEqual(true); + expect(getters.shouldRenderParallelCommentRow(localState)(line)).toEqual(true); }); it('returns false when no discussion was found', () => { + line.left.discussions = []; + line.right.discussions = []; + localState.diffLineCommentForms.ABC = false; localState.diffLineCommentForms.DEF = false; - expect( - getters.shouldRenderParallelCommentRow(localState, { - singleDiscussionByLineCode: () => [], - })(line), - ).toEqual(false); + expect(getters.shouldRenderParallelCommentRow(localState)(line)).toEqual(false); }); it('returns true when discussionForm was found', () => { localState.diffLineCommentForms.ABC = {}; - expect( - getters.shouldRenderParallelCommentRow(localState, { - singleDiscussionByLineCode: () => [discussionMock], - })(line), - ).toEqual(true); + expect(getters.shouldRenderParallelCommentRow(localState)(line)).toEqual(true); }); }); describe('shouldRenderInlineCommentRow', () => { + let line; + + beforeEach(() => { + discussionMock.expanded = true; + + line = { + lineCode: 'ABC', + discussions: [discussionMock], + }; + }); + it('returns true when diffLineCommentForms has form', () => { localState.diffLineCommentForms.ABC = {}; - expect( - getters.shouldRenderInlineCommentRow(localState)({ - lineCode: 'ABC', - }), - ).toEqual(true); + expect(getters.shouldRenderInlineCommentRow(localState)(line)).toEqual(true); }); it('returns false when no line discussions were found', () => { - expect( - getters.shouldRenderInlineCommentRow(localState, { - singleDiscussionByLineCode: () => [], - })('DEF'), - ).toEqual(false); + line.discussions = []; + expect(getters.shouldRenderInlineCommentRow(localState)(line)).toEqual(false); }); it('returns true if all found discussions are expanded', () => { discussionMock.expanded = true; - expect( - getters.shouldRenderInlineCommentRow(localState, { - singleDiscussionByLineCode: () => [discussionMock], - })('ABC'), - ).toEqual(true); + expect(getters.shouldRenderInlineCommentRow(localState)(line)).toEqual(true); }); }); diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js index 8f89984c6e5..7eeca6712cc 100644 --- a/spec/javascripts/diffs/store/mutations_spec.js +++ b/spec/javascripts/diffs/store/mutations_spec.js @@ -138,10 +138,9 @@ describe('DiffsStoreMutations', () => { const fileHash = 123; const state = { diffFiles: [{}, { fileHash, existingField: 0 }] }; - const file = { fileHash }; const data = { diff_files: [{ file_hash: fileHash, extra_field: 1, existingField: 1 }] }; - mutations[types.ADD_COLLAPSED_DIFFS](state, { file, data }); + mutations[types.ADD_COLLAPSED_DIFFS](state, { file: state.diffFiles[1], data }); expect(spy).toHaveBeenCalledWith(data, { deep: true }); expect(state.diffFiles[1].fileHash).toEqual(fileHash); @@ -149,4 +148,141 @@ describe('DiffsStoreMutations', () => { expect(state.diffFiles[1].extraField).toEqual(1); }); }); + + describe('SET_LINE_DISCUSSIONS_FOR_FILE', () => { + it('should add discussions to the given line', () => { + const diffPosition = { + baseSha: 'ed13df29948c41ba367caa757ab3ec4892509910', + headSha: 'b921914f9a834ac47e6fd9420f78db0f83559130', + newLine: null, + newPath: '500-lines-4.txt', + oldLine: 5, + oldPath: '500-lines-4.txt', + startSha: 'ed13df29948c41ba367caa757ab3ec4892509910', + }; + + const state = { + diffFiles: [ + { + fileHash: 'ABC', + parallelDiffLines: [ + { + left: { + lineCode: 'ABC_1', + discussions: [], + }, + right: { + lineCode: 'ABC_1', + discussions: [], + }, + }, + ], + highlightedDiffLines: [ + { + lineCode: 'ABC_1', + discussions: [], + }, + ], + }, + ], + }; + const discussions = [ + { + id: 1, + line_code: 'ABC_1', + diff_discussion: true, + resolvable: true, + original_position: { + formatter: diffPosition, + }, + position: { + formatter: diffPosition, + }, + }, + { + id: 2, + line_code: 'ABC_1', + diff_discussion: true, + resolvable: true, + original_position: { + formatter: diffPosition, + }, + position: { + formatter: diffPosition, + }, + }, + ]; + + const diffPositionByLineCode = { + ABC_1: diffPosition, + }; + + mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, { + fileHash: 'ABC', + discussions, + diffPositionByLineCode, + }); + + expect(state.diffFiles[0].parallelDiffLines[0].left.discussions.length).toEqual(2); + expect(state.diffFiles[0].parallelDiffLines[0].left.discussions[1].id).toEqual(2); + + expect(state.diffFiles[0].highlightedDiffLines[0].discussions.length).toEqual(2); + expect(state.diffFiles[0].highlightedDiffLines[0].discussions[1].id).toEqual(2); + }); + }); + + describe('REMOVE_LINE_DISCUSSIONS', () => { + it('should remove the existing discussions on the given line', () => { + const state = { + diffFiles: [ + { + fileHash: 'ABC', + parallelDiffLines: [ + { + left: { + lineCode: 'ABC_1', + discussions: [ + { + id: 1, + line_code: 'ABC_1', + }, + { + id: 2, + line_code: 'ABC_1', + }, + ], + }, + right: { + lineCode: 'ABC_1', + discussions: [], + }, + }, + ], + highlightedDiffLines: [ + { + lineCode: 'ABC_1', + discussions: [ + { + id: 1, + line_code: 'ABC_1', + }, + { + id: 2, + line_code: 'ABC_1', + }, + ], + }, + ], + }, + ], + }; + + mutations[types.REMOVE_LINE_DISCUSSIONS_FOR_FILE](state, { + fileHash: 'ABC', + lineCode: 'ABC_1', + }); + expect(state.diffFiles[0].parallelDiffLines[0].left.discussions.length).toEqual(0); + expect(state.diffFiles[0].highlightedDiffLines[0].discussions.length).toEqual(0); + }); + }); }); diff --git a/spec/javascripts/diffs/store/utils_spec.js b/spec/javascripts/diffs/store/utils_spec.js index 32136d9ebff..4b5cf450c68 100644 --- a/spec/javascripts/diffs/store/utils_spec.js +++ b/spec/javascripts/diffs/store/utils_spec.js @@ -179,32 +179,126 @@ describe('DiffsStoreUtils', () => { describe('trimFirstCharOfLineContent', () => { it('trims the line when it starts with a space', () => { - expect(utils.trimFirstCharOfLineContent({ richText: ' diff' })).toEqual({ richText: 'diff' }); + expect(utils.trimFirstCharOfLineContent({ richText: ' diff' })).toEqual({ + discussions: [], + richText: 'diff', + }); }); it('trims the line when it starts with a +', () => { - expect(utils.trimFirstCharOfLineContent({ richText: '+diff' })).toEqual({ richText: 'diff' }); + expect(utils.trimFirstCharOfLineContent({ richText: '+diff' })).toEqual({ + discussions: [], + richText: 'diff', + }); }); it('trims the line when it starts with a -', () => { - expect(utils.trimFirstCharOfLineContent({ richText: '-diff' })).toEqual({ richText: 'diff' }); + expect(utils.trimFirstCharOfLineContent({ richText: '-diff' })).toEqual({ + discussions: [], + richText: 'diff', + }); }); it('does not trims the line when it starts with a letter', () => { - expect(utils.trimFirstCharOfLineContent({ richText: 'diff' })).toEqual({ richText: 'diff' }); + expect(utils.trimFirstCharOfLineContent({ richText: 'diff' })).toEqual({ + discussions: [], + richText: 'diff', + }); }); it('does not modify the provided object', () => { const lineObj = { + discussions: [], richText: ' diff', }; utils.trimFirstCharOfLineContent(lineObj); - expect(lineObj).toEqual({ richText: ' diff' }); + expect(lineObj).toEqual({ discussions: [], richText: ' diff' }); }); it('handles a undefined or null parameter', () => { - expect(utils.trimFirstCharOfLineContent()).toEqual({}); + expect(utils.trimFirstCharOfLineContent()).toEqual({ discussions: [] }); + }); + }); + + describe('prepareDiffData', () => { + it('sets the renderIt and collapsed attribute on files', () => { + const preparedDiff = { diffFiles: [getDiffFileMock()] }; + utils.prepareDiffData(preparedDiff); + + const firstParallelDiffLine = preparedDiff.diffFiles[0].parallelDiffLines[2]; + expect(firstParallelDiffLine.left.discussions.length).toBe(0); + expect(firstParallelDiffLine.left).not.toHaveAttr('text'); + expect(firstParallelDiffLine.right.discussions.length).toBe(0); + expect(firstParallelDiffLine.right).not.toHaveAttr('text'); + const firstParallelChar = firstParallelDiffLine.right.richText.charAt(0); + expect(firstParallelChar).not.toBe(' '); + expect(firstParallelChar).not.toBe('+'); + expect(firstParallelChar).not.toBe('-'); + + const checkLine = preparedDiff.diffFiles[0].highlightedDiffLines[0]; + expect(checkLine.discussions.length).toBe(0); + expect(checkLine).not.toHaveAttr('text'); + const firstChar = checkLine.richText.charAt(0); + expect(firstChar).not.toBe(' '); + expect(firstChar).not.toBe('+'); + expect(firstChar).not.toBe('-'); + + expect(preparedDiff.diffFiles[0].renderIt).toBeTruthy(); + expect(preparedDiff.diffFiles[0].collapsed).toBeFalsy(); + }); + }); + + describe('isDiscussionApplicableToLine', () => { + const diffPosition = { + baseSha: 'ed13df29948c41ba367caa757ab3ec4892509910', + headSha: 'b921914f9a834ac47e6fd9420f78db0f83559130', + newLine: null, + newPath: '500-lines-4.txt', + oldLine: 5, + oldPath: '500-lines-4.txt', + startSha: 'ed13df29948c41ba367caa757ab3ec4892509910', + }; + + const wrongDiffPosition = { + baseSha: 'wrong', + headSha: 'wrong', + newLine: null, + newPath: '500-lines-4.txt', + oldLine: 5, + oldPath: '500-lines-4.txt', + startSha: 'wrong', + }; + + const discussions = { + upToDateDiscussion1: { + original_position: { + formatter: diffPosition, + }, + position: { + formatter: wrongDiffPosition, + }, + }, + outDatedDiscussion1: { + original_position: { + formatter: wrongDiffPosition, + }, + position: { + formatter: wrongDiffPosition, + }, + }, + }; + + it('returns true when the discussion is up to date', () => { + expect( + utils.isDiscussionApplicableToLine(discussions.upToDateDiscussion1, diffPosition), + ).toBe(true); + }); + + it('returns false when the discussion is not up to date', () => { + expect( + utils.isDiscussionApplicableToLine(discussions.outDatedDiscussion1, diffPosition), + ).toBe(false); }); }); }); diff --git a/spec/javascripts/dropzone_input_spec.js b/spec/javascripts/dropzone_input_spec.js new file mode 100644 index 00000000000..0c6b1a8946d --- /dev/null +++ b/spec/javascripts/dropzone_input_spec.js @@ -0,0 +1,68 @@ +import $ from 'jquery'; +import dropzoneInput from '~/dropzone_input'; +import { TEST_HOST } from 'spec/test_constants'; + +const TEST_FILE = { + upload: {}, +}; +const TEST_UPLOAD_PATH = `${TEST_HOST}/upload/file`; +const TEST_ERROR_MESSAGE = 'A big error occurred!'; +const TEMPLATE = ( +`<form class="gfm-form" data-uploads-path="${TEST_UPLOAD_PATH}"> + <textarea class="js-gfm-input"></textarea> + <div class="uploading-error-message"></div> +</form>` +); + +describe('dropzone_input', () => { + let form; + let dropzone; + let xhr; + let oldXMLHttpRequest; + + beforeEach(() => { + form = $(TEMPLATE); + + dropzone = dropzoneInput(form); + + xhr = jasmine.createSpyObj(Object.keys(XMLHttpRequest.prototype)); + oldXMLHttpRequest = window.XMLHttpRequest; + window.XMLHttpRequest = () => xhr; + }); + + afterEach(() => { + window.XMLHttpRequest = oldXMLHttpRequest; + }); + + it('shows error message, when AJAX fails with json', () => { + xhr = { + ...xhr, + statusCode: 400, + readyState: 4, + responseText: JSON.stringify({ message: TEST_ERROR_MESSAGE }), + getResponseHeader: () => 'application/json', + }; + + dropzone.processFile(TEST_FILE); + + xhr.onload(); + + expect(form.find('.uploading-error-message').text()).toEqual(TEST_ERROR_MESSAGE); + }); + + it('shows error message, when AJAX fails with text', () => { + xhr = { + ...xhr, + statusCode: 400, + readyState: 4, + responseText: TEST_ERROR_MESSAGE, + getResponseHeader: () => 'text/plain', + }; + + dropzone.processFile(TEST_FILE); + + xhr.onload(); + + expect(form.find('.uploading-error-message').text()).toEqual(TEST_ERROR_MESSAGE); + }); +}); diff --git a/spec/javascripts/groups/components/app_spec.js b/spec/javascripts/groups/components/app_spec.js index 03d4b472b87..76933cf337b 100644 --- a/spec/javascripts/groups/components/app_spec.js +++ b/spec/javascripts/groups/components/app_spec.js @@ -9,9 +9,14 @@ import GroupsStore from '~/groups/store/groups_store'; import GroupsService from '~/groups/service/groups_service'; import { - mockEndpoint, mockGroups, mockSearchedGroups, - mockRawPageInfo, mockParentGroupItem, mockRawChildren, - mockChildren, mockPageInfo, + mockEndpoint, + mockGroups, + mockSearchedGroups, + mockRawPageInfo, + mockParentGroupItem, + mockRawChildren, + mockChildren, + mockPageInfo, } from '../mock_data'; const createComponent = (hideProjects = false) => { @@ -28,22 +33,23 @@ const createComponent = (hideProjects = false) => { }); }; -const returnServicePromise = (data, failed) => new Promise((resolve, reject) => { - if (failed) { - reject(data); - } else { - resolve({ - json() { - return data; - }, - }); - } -}); +const returnServicePromise = (data, failed) => + new Promise((resolve, reject) => { + if (failed) { + reject(data); + } else { + resolve({ + json() { + return data; + }, + }); + } + }); describe('AppComponent', () => { let vm; - beforeEach((done) => { + beforeEach(done => { Vue.component('group-folder', groupFolderComponent); Vue.component('group-item', groupItemComponent); @@ -94,7 +100,7 @@ describe('AppComponent', () => { }); describe('fetchGroups', () => { - it('should call `getGroups` with all the params provided', (done) => { + it('should call `getGroups` with all the params provided', done => { spyOn(vm.service, 'getGroups').and.returnValue(returnServicePromise(mockGroups)); vm.fetchGroups({ @@ -110,8 +116,10 @@ describe('AppComponent', () => { }, 0); }); - it('should set headers to store for building pagination info when called with `updatePagination`', (done) => { - spyOn(vm.service, 'getGroups').and.returnValue(returnServicePromise({ headers: mockRawPageInfo })); + it('should set headers to store for building pagination info when called with `updatePagination`', done => { + spyOn(vm.service, 'getGroups').and.returnValue( + returnServicePromise({ headers: mockRawPageInfo }), + ); spyOn(vm, 'updatePagination'); vm.fetchGroups({ updatePagination: true }); @@ -122,7 +130,7 @@ describe('AppComponent', () => { }, 0); }); - it('should show flash error when request fails', (done) => { + it('should show flash error when request fails', done => { spyOn(vm.service, 'getGroups').and.returnValue(returnServicePromise(null, true)); spyOn($, 'scrollTo'); spyOn(window, 'Flash'); @@ -138,7 +146,7 @@ describe('AppComponent', () => { }); describe('fetchAllGroups', () => { - it('should fetch default set of groups', (done) => { + it('should fetch default set of groups', done => { spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockGroups)); spyOn(vm, 'updatePagination').and.callThrough(); spyOn(vm, 'updateGroups').and.callThrough(); @@ -153,7 +161,7 @@ describe('AppComponent', () => { }, 0); }); - it('should fetch matching set of groups when app is loaded with search query', (done) => { + it('should fetch matching set of groups when app is loaded with search query', done => { spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockSearchedGroups)); spyOn(vm, 'updateGroups').and.callThrough(); @@ -173,7 +181,7 @@ describe('AppComponent', () => { }); describe('fetchPage', () => { - it('should fetch groups for provided page details and update window state', (done) => { + it('should fetch groups for provided page details and update window state', done => { spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockGroups)); spyOn(vm, 'updateGroups').and.callThrough(); const mergeUrlParams = spyOnDependency(appComponent, 'mergeUrlParams').and.callThrough(); @@ -193,9 +201,13 @@ describe('AppComponent', () => { expect(vm.isLoading).toBe(false); expect($.scrollTo).toHaveBeenCalledWith(0); expect(mergeUrlParams).toHaveBeenCalledWith({ page: 2 }, jasmine.any(String)); - expect(window.history.replaceState).toHaveBeenCalledWith({ - page: jasmine.any(String), - }, jasmine.any(String), jasmine.any(String)); + expect(window.history.replaceState).toHaveBeenCalledWith( + { + page: jasmine.any(String), + }, + jasmine.any(String), + jasmine.any(String), + ); expect(vm.updateGroups).toHaveBeenCalled(); done(); }, 0); @@ -211,7 +223,7 @@ describe('AppComponent', () => { groupItem.isChildrenLoading = false; }); - it('should fetch children of given group and expand it if group is collapsed and children are not loaded', (done) => { + it('should fetch children of given group and expand it if group is collapsed and children are not loaded', done => { spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockRawChildren)); spyOn(vm.store, 'setGroupChildren'); @@ -244,7 +256,7 @@ describe('AppComponent', () => { expect(groupItem.isOpen).toBe(false); }); - it('should set `isChildrenLoading` back to `false` if load request fails', (done) => { + it('should set `isChildrenLoading` back to `false` if load request fails', done => { spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise({}, true)); vm.toggleChildren(groupItem); @@ -272,7 +284,9 @@ describe('AppComponent', () => { expect(vm.groupLeaveConfirmationMessage).toBe(''); vm.showLeaveGroupModal(group, mockParentGroupItem); expect(vm.showModal).toBe(true); - expect(vm.groupLeaveConfirmationMessage).toBe(`Are you sure you want to leave the "${group.fullName}" group?`); + expect(vm.groupLeaveConfirmationMessage).toBe( + `Are you sure you want to leave the "${group.fullName}" group?`, + ); }); }); @@ -299,7 +313,7 @@ describe('AppComponent', () => { vm.targetParentGroup = groupItem; }); - it('hides modal confirmation leave group and remove group item from tree', (done) => { + it('hides modal confirmation leave group and remove group item from tree', done => { const notice = `You left the "${childGroupItem.fullName}" group.`; spyOn(vm.service, 'leaveGroup').and.returnValue(returnServicePromise({ notice })); spyOn(vm.store, 'removeGroup').and.callThrough(); @@ -318,9 +332,11 @@ describe('AppComponent', () => { }, 0); }); - it('should show error flash message if request failed to leave group', (done) => { + it('should show error flash message if request failed to leave group', done => { const message = 'An error occurred. Please try again.'; - spyOn(vm.service, 'leaveGroup').and.returnValue(returnServicePromise({ status: 500 }, true)); + spyOn(vm.service, 'leaveGroup').and.returnValue( + returnServicePromise({ status: 500 }, true), + ); spyOn(vm.store, 'removeGroup').and.callThrough(); spyOn(window, 'Flash'); @@ -335,9 +351,11 @@ describe('AppComponent', () => { }, 0); }); - it('should show appropriate error flash message if request forbids to leave group', (done) => { + it('should show appropriate error flash message if request forbids to leave group', done => { const message = 'Failed to leave the group. Please make sure you are not the only owner.'; - spyOn(vm.service, 'leaveGroup').and.returnValue(returnServicePromise({ status: 403 }, true)); + spyOn(vm.service, 'leaveGroup').and.returnValue( + returnServicePromise({ status: 403 }, true), + ); spyOn(vm.store, 'removeGroup').and.callThrough(); spyOn(window, 'Flash'); @@ -388,7 +406,7 @@ describe('AppComponent', () => { }); describe('created', () => { - it('should bind event listeners on eventHub', (done) => { + it('should bind event listeners on eventHub', done => { spyOn(eventHub, '$on'); const newVm = createComponent(); @@ -405,21 +423,21 @@ describe('AppComponent', () => { }); }); - it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `false`', (done) => { + it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `false`', done => { const newVm = createComponent(); newVm.$mount(); Vue.nextTick(() => { - expect(newVm.searchEmptyMessage).toBe('Sorry, no groups or projects matched your search'); + expect(newVm.searchEmptyMessage).toBe('No groups or projects matched your search'); newVm.$destroy(); done(); }); }); - it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `true`', (done) => { + it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `true`', done => { const newVm = createComponent(true); newVm.$mount(); Vue.nextTick(() => { - expect(newVm.searchEmptyMessage).toBe('Sorry, no groups matched your search'); + expect(newVm.searchEmptyMessage).toBe('No groups matched your search'); newVm.$destroy(); done(); }); @@ -427,7 +445,7 @@ describe('AppComponent', () => { }); describe('beforeDestroy', () => { - it('should unbind event listeners on eventHub', (done) => { + it('should unbind event listeners on eventHub', done => { spyOn(eventHub, '$off'); const newVm = createComponent(); @@ -454,7 +472,7 @@ describe('AppComponent', () => { vm.$destroy(); }); - it('should render loading icon', (done) => { + it('should render loading icon', done => { vm.isLoading = true; Vue.nextTick(() => { expect(vm.$el.querySelector('.loading-animation')).toBeDefined(); @@ -463,7 +481,7 @@ describe('AppComponent', () => { }); }); - it('should render groups tree', (done) => { + it('should render groups tree', done => { vm.store.state.groups = [mockParentGroupItem]; vm.isLoading = false; vm.store.state.pageInfo = mockPageInfo; @@ -473,7 +491,7 @@ describe('AppComponent', () => { }); }); - it('renders modal confirmation dialog', (done) => { + it('renders modal confirmation dialog', done => { vm.groupLeaveConfirmationMessage = 'Are you sure you want to leave the "foo" group?'; vm.showModal = true; Vue.nextTick(() => { diff --git a/spec/javascripts/helpers/vue_resource_helper.js b/spec/javascripts/helpers/vue_resource_helper.js index 70b7ec4e574..0d1bf5e2e80 100644 --- a/spec/javascripts/helpers/vue_resource_helper.js +++ b/spec/javascripts/helpers/vue_resource_helper.js @@ -5,6 +5,7 @@ export const headersInterceptor = (request, next) => { response.headers.forEach((value, key) => { headers[key] = value; }); + // eslint-disable-next-line no-param-reassign response.headers = headers; }); }; diff --git a/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js index 41d8bfff7e7..b45ae5bbb0f 100644 --- a/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js +++ b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js @@ -30,7 +30,7 @@ describe('Multi-file editor commit sidebar list item', () => { }); it('renders file path', () => { - expect(vm.$el.querySelector('.multi-file-commit-list-path').textContent.trim()).toBe(f.path); + expect(vm.$el.querySelector('.multi-file-commit-list-path').textContent).toContain(f.path); }); it('renders actionn button', () => { diff --git a/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js b/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js index a5b906da8a1..e09ccbe2a63 100644 --- a/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js +++ b/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js @@ -29,7 +29,7 @@ describe('IDE stage file button', () => { }); it('renders button to discard & stage', () => { - expect(vm.$el.querySelectorAll('.btn').length).toBe(2); + expect(vm.$el.querySelectorAll('.btn-blank').length).toBe(2); }); it('calls store with stage button', () => { @@ -39,7 +39,7 @@ describe('IDE stage file button', () => { }); it('calls store with discard button', () => { - vm.$el.querySelector('.dropdown-menu button').click(); + vm.$el.querySelector('.btn-danger').click(); expect(vm.discardFileChanges).toHaveBeenCalledWith(f.path); }); diff --git a/spec/javascripts/ide/components/file_templates/bar_spec.js b/spec/javascripts/ide/components/file_templates/bar_spec.js new file mode 100644 index 00000000000..a688f7f69a6 --- /dev/null +++ b/spec/javascripts/ide/components/file_templates/bar_spec.js @@ -0,0 +1,117 @@ +import Vue from 'vue'; +import { createStore } from '~/ide/stores'; +import Bar from '~/ide/components/file_templates/bar.vue'; +import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { resetStore, file } from '../../helpers'; + +describe('IDE file templates bar component', () => { + let Component; + let vm; + + beforeAll(() => { + Component = Vue.extend(Bar); + }); + + beforeEach(() => { + const store = createStore(); + + store.state.openFiles.push({ + ...file('file'), + opened: true, + active: true, + }); + + vm = mountComponentWithStore(Component, { store }); + }); + + afterEach(() => { + vm.$destroy(); + resetStore(vm.$store); + }); + + describe('template type dropdown', () => { + it('renders dropdown component', () => { + expect(vm.$el.querySelector('.dropdown').textContent).toContain('Choose a type'); + }); + + it('calls setSelectedTemplateType when clicking item', () => { + spyOn(vm, 'setSelectedTemplateType').and.stub(); + + vm.$el.querySelector('.dropdown-content button').click(); + + expect(vm.setSelectedTemplateType).toHaveBeenCalledWith({ + name: '.gitlab-ci.yml', + key: 'gitlab_ci_ymls', + }); + }); + }); + + describe('template dropdown', () => { + beforeEach(done => { + vm.$store.state.fileTemplates.templates = [ + { + name: 'test', + }, + ]; + vm.$store.state.fileTemplates.selectedTemplateType = { + name: '.gitlab-ci.yml', + key: 'gitlab_ci_ymls', + }; + + vm.$nextTick(done); + }); + + it('renders dropdown component', () => { + expect(vm.$el.querySelectorAll('.dropdown')[1].textContent).toContain('Choose a template'); + }); + + it('calls fetchTemplate on click', () => { + spyOn(vm, 'fetchTemplate').and.stub(); + + vm.$el + .querySelectorAll('.dropdown-content')[1] + .querySelector('button') + .click(); + + expect(vm.fetchTemplate).toHaveBeenCalledWith({ + name: 'test', + }); + }); + }); + + it('shows undo button if updateSuccess is true', done => { + vm.$store.state.fileTemplates.updateSuccess = true; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.btn-default').style.display).not.toBe('none'); + + done(); + }); + }); + + it('calls undoFileTemplate when clicking undo button', () => { + spyOn(vm, 'undoFileTemplate').and.stub(); + + vm.$el.querySelector('.btn-default').click(); + + expect(vm.undoFileTemplate).toHaveBeenCalled(); + }); + + it('calls setSelectedTemplateType if activeFile name matches a template', done => { + const fileName = '.gitlab-ci.yml'; + + spyOn(vm, 'setSelectedTemplateType'); + vm.$store.state.openFiles[0].name = fileName; + + vm.setInitialType(); + + vm.$nextTick(() => { + expect(vm.setSelectedTemplateType).toHaveBeenCalledWith({ + name: fileName, + key: 'gitlab_ci_ymls', + }); + + done(); + }); + }); +}); diff --git a/spec/javascripts/ide/components/file_templates/dropdown_spec.js b/spec/javascripts/ide/components/file_templates/dropdown_spec.js new file mode 100644 index 00000000000..898796f4fa0 --- /dev/null +++ b/spec/javascripts/ide/components/file_templates/dropdown_spec.js @@ -0,0 +1,201 @@ +import $ from 'jquery'; +import Vue from 'vue'; +import { createStore } from '~/ide/stores'; +import Dropdown from '~/ide/components/file_templates/dropdown.vue'; +import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { resetStore } from '../../helpers'; + +describe('IDE file templates dropdown component', () => { + let Component; + let vm; + + beforeAll(() => { + Component = Vue.extend(Dropdown); + }); + + beforeEach(() => { + const store = createStore(); + + vm = createComponentWithStore(Component, store, { + label: 'Test', + }).$mount(); + }); + + afterEach(() => { + vm.$destroy(); + resetStore(vm.$store); + }); + + describe('async', () => { + beforeEach(() => { + vm.isAsyncData = true; + }); + + it('calls async store method on Bootstrap dropdown event', () => { + spyOn(vm, 'fetchTemplateTypes').and.stub(); + + $(vm.$el).trigger('show.bs.dropdown'); + + expect(vm.fetchTemplateTypes).toHaveBeenCalled(); + }); + + it('renders templates when async', done => { + vm.$store.state.fileTemplates.templates = [ + { + name: 'test', + }, + ]; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.dropdown-content').textContent).toContain('test'); + + done(); + }); + }); + + it('renders loading icon when isLoading is true', done => { + vm.$store.state.fileTemplates.isLoading = true; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.loading-container')).not.toBe(null); + + done(); + }); + }); + + it('searches template data', () => { + vm.$store.state.fileTemplates.templates = [ + { + name: 'test', + }, + ]; + vm.searchable = true; + vm.search = 'hello'; + + expect(vm.outputData).toEqual([]); + }); + + it('does not filter data is searchable is false', () => { + vm.$store.state.fileTemplates.templates = [ + { + name: 'test', + }, + ]; + vm.search = 'hello'; + + expect(vm.outputData).toEqual([ + { + name: 'test', + }, + ]); + }); + + it('calls clickItem on click', done => { + spyOn(vm, 'clickItem').and.stub(); + + vm.$store.state.fileTemplates.templates = [ + { + name: 'test', + }, + ]; + + vm.$nextTick(() => { + vm.$el.querySelector('.dropdown-content button').click(); + + expect(vm.clickItem).toHaveBeenCalledWith({ + name: 'test', + }); + + done(); + }); + }); + + it('renders input when searchable is true', done => { + vm.searchable = true; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.dropdown-input')).not.toBe(null); + + done(); + }); + }); + + it('does not render input when searchable is true & showLoading is true', done => { + vm.searchable = true; + vm.$store.state.fileTemplates.isLoading = true; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.dropdown-input')).toBe(null); + + done(); + }); + }); + }); + + describe('sync', () => { + beforeEach(done => { + vm.data = [ + { + name: 'test sync', + }, + ]; + + vm.$nextTick(done); + }); + + it('renders props data', () => { + expect(vm.$el.querySelector('.dropdown-content').textContent).toContain('test sync'); + }); + + it('renders input when searchable is true', done => { + vm.searchable = true; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.dropdown-input')).not.toBe(null); + + done(); + }); + }); + + it('calls clickItem on click', done => { + spyOn(vm, 'clickItem').and.stub(); + + vm.$nextTick(() => { + vm.$el.querySelector('.dropdown-content button').click(); + + expect(vm.clickItem).toHaveBeenCalledWith({ + name: 'test sync', + }); + + done(); + }); + }); + + it('searches template data', () => { + vm.searchable = true; + vm.search = 'hello'; + + expect(vm.outputData).toEqual([]); + }); + + it('does not filter data is searchable is false', () => { + vm.search = 'hello'; + + expect(vm.outputData).toEqual([ + { + name: 'test sync', + }, + ]); + }); + + it('renders dropdown title', done => { + vm.title = 'Test title'; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.dropdown-title').textContent).toContain('Test title'); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/components/repo_commit_section_spec.js b/spec/javascripts/ide/components/repo_commit_section_spec.js index 30cd92b2ca4..d09ccd7ac34 100644 --- a/spec/javascripts/ide/components/repo_commit_section_spec.js +++ b/spec/javascripts/ide/components/repo_commit_section_spec.js @@ -111,7 +111,7 @@ describe('RepoCommitSection', () => { .then(vm.$nextTick) .then(() => { expect(vm.$el.querySelector('.ide-commit-list-container').textContent).toContain( - 'No changes', + 'There are no unstaged changes', ); }) .then(done) @@ -133,7 +133,7 @@ describe('RepoCommitSection', () => { }); it('discards a single file', done => { - vm.$el.querySelector('.multi-file-discard-btn .dropdown-menu button').click(); + vm.$el.querySelector('.multi-file-commit-list li:first-child .js-modal-primary-action').click(); Vue.nextTick(() => { expect(vm.$el.querySelector('.ide-commit-list-container').textContent).not.toContain('file1'); diff --git a/spec/javascripts/ide/helpers.js b/spec/javascripts/ide/helpers.js index c11c482fef8..3ce9c9fcda1 100644 --- a/spec/javascripts/ide/helpers.js +++ b/spec/javascripts/ide/helpers.js @@ -5,6 +5,7 @@ import commitState from '~/ide/stores/modules/commit/state'; import mergeRequestsState from '~/ide/stores/modules/merge_requests/state'; import pipelinesState from '~/ide/stores/modules/pipelines/state'; import branchesState from '~/ide/stores/modules/branches/state'; +import fileTemplatesState from '~/ide/stores/modules/file_templates/state'; export const resetStore = store => { const newState = { @@ -13,6 +14,7 @@ export const resetStore = store => { mergeRequests: mergeRequestsState(), pipelines: pipelinesState(), branches: branchesState(), + fileTemplates: fileTemplatesState(), }; store.replaceState(newState); }; diff --git a/spec/javascripts/ide/stores/actions/file_spec.js b/spec/javascripts/ide/stores/actions/file_spec.js index bca2033ff97..1ca811e996b 100644 --- a/spec/javascripts/ide/stores/actions/file_spec.js +++ b/spec/javascripts/ide/stores/actions/file_spec.js @@ -692,21 +692,6 @@ describe('IDE store file actions', () => { .then(done) .catch(done.fail); }); - - it('calls scrollToTab', done => { - const scrollToTabSpy = jasmine.createSpy('scrollToTab'); - const oldScrollToTab = store._actions.scrollToTab; // eslint-disable-line - store._actions.scrollToTab = [scrollToTabSpy]; // eslint-disable-line - - store - .dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }) - .then(() => { - expect(scrollToTabSpy).toHaveBeenCalled(); - store._actions.scrollToTab = oldScrollToTab; // eslint-disable-line - }) - .then(done) - .catch(done.fail); - }); }); describe('removePendingTab', () => { diff --git a/spec/javascripts/ide/stores/actions_spec.js b/spec/javascripts/ide/stores/actions_spec.js index d84f1717a61..c9a1158a14e 100644 --- a/spec/javascripts/ide/stores/actions_spec.js +++ b/spec/javascripts/ide/stores/actions_spec.js @@ -305,7 +305,11 @@ describe('Multi-file store actions', () => { describe('stageAllChanges', () => { it('adds all files from changedFiles to stagedFiles', done => { - store.state.changedFiles.push(file(), file('new')); + const openFile = { ...file(), path: 'test' }; + + store.state.openFiles.push(openFile); + store.state.stagedFiles.push(openFile); + store.state.changedFiles.push(openFile, file('new')); testAction( stageAllChanges, @@ -316,7 +320,12 @@ describe('Multi-file store actions', () => { { type: types.STAGE_CHANGE, payload: store.state.changedFiles[0].path }, { type: types.STAGE_CHANGE, payload: store.state.changedFiles[1].path }, ], - [], + [ + { + type: 'openPendingTab', + payload: { file: openFile, keyPrefix: 'staged' }, + }, + ], done, ); }); @@ -324,7 +333,11 @@ describe('Multi-file store actions', () => { describe('unstageAllChanges', () => { it('removes all files from stagedFiles after unstaging', done => { - store.state.stagedFiles.push(file(), file('new')); + const openFile = { ...file(), path: 'test' }; + + store.state.openFiles.push(openFile); + store.state.changedFiles.push(openFile); + store.state.stagedFiles.push(openFile, file('new')); testAction( unstageAllChanges, @@ -334,7 +347,12 @@ describe('Multi-file store actions', () => { { type: types.UNSTAGE_CHANGE, payload: store.state.stagedFiles[0].path }, { type: types.UNSTAGE_CHANGE, payload: store.state.stagedFiles[1].path }, ], - [], + [ + { + type: 'openPendingTab', + payload: { file: openFile, keyPrefix: 'unstaged' }, + }, + ], done, ); }); diff --git a/spec/javascripts/ide/stores/modules/file_templates/actions_spec.js b/spec/javascripts/ide/stores/modules/file_templates/actions_spec.js index f831a9f0a5d..c29dd9f0d06 100644 --- a/spec/javascripts/ide/stores/modules/file_templates/actions_spec.js +++ b/spec/javascripts/ide/stores/modules/file_templates/actions_spec.js @@ -148,14 +148,66 @@ describe('IDE file templates actions', () => { }); describe('setSelectedTemplateType', () => { - it('commits SET_SELECTED_TEMPLATE_TYPE', done => { - testAction( - actions.setSelectedTemplateType, - 'test', - state, - [{ type: types.SET_SELECTED_TEMPLATE_TYPE, payload: 'test' }], - [], - done, + it('commits SET_SELECTED_TEMPLATE_TYPE', () => { + const commit = jasmine.createSpy('commit'); + const options = { + commit, + dispatch() {}, + rootGetters: { + activeFile: { + name: 'test', + prevPath: '', + }, + }, + }; + + actions.setSelectedTemplateType(options, { name: 'test' }); + + expect(commit).toHaveBeenCalledWith(types.SET_SELECTED_TEMPLATE_TYPE, { name: 'test' }); + }); + + it('dispatches discardFileChanges if prevPath matches templates name', () => { + const dispatch = jasmine.createSpy('dispatch'); + const options = { + commit() {}, + dispatch, + rootGetters: { + activeFile: { + name: 'test', + path: 'test', + prevPath: 'test', + }, + }, + }; + + actions.setSelectedTemplateType(options, { name: 'test' }); + + expect(dispatch).toHaveBeenCalledWith('discardFileChanges', 'test', { root: true }); + }); + + it('dispatches renameEntry if file name doesnt match', () => { + const dispatch = jasmine.createSpy('dispatch'); + const options = { + commit() {}, + dispatch, + rootGetters: { + activeFile: { + name: 'oldtest', + path: 'oldtest', + prevPath: '', + }, + }, + }; + + actions.setSelectedTemplateType(options, { name: 'test' }); + + expect(dispatch).toHaveBeenCalledWith( + 'renameEntry', + { + path: 'oldtest', + name: 'test', + }, + { root: true }, ); }); }); @@ -332,5 +384,20 @@ describe('IDE file templates actions', () => { expect(commit).toHaveBeenCalledWith('SET_UPDATE_SUCCESS', false); }); + + it('dispatches discardFileChanges if file has prevPath', () => { + const dispatch = jasmine.createSpy('dispatch'); + const rootGetters = { + activeFile: { path: 'test', prevPath: 'newtest', raw: 'raw content' }, + }; + + actions.undoFileTemplate({ dispatch, commit() {}, rootGetters }); + + expect(dispatch.calls.mostRecent().args).toEqual([ + 'discardFileChanges', + 'test', + { root: true }, + ]); + }); }); }); diff --git a/spec/javascripts/ide/stores/modules/file_templates/getters_spec.js b/spec/javascripts/ide/stores/modules/file_templates/getters_spec.js index e337c3f331b..17cb457881f 100644 --- a/spec/javascripts/ide/stores/modules/file_templates/getters_spec.js +++ b/spec/javascripts/ide/stores/modules/file_templates/getters_spec.js @@ -1,3 +1,5 @@ +import createState from '~/ide/stores/state'; +import { activityBarViews } from '~/ide/constants'; import * as getters from '~/ide/stores/modules/file_templates/getters'; describe('IDE file templates getters', () => { @@ -8,22 +10,49 @@ describe('IDE file templates getters', () => { }); describe('showFileTemplatesBar', () => { - it('finds template type by name', () => { + let rootState; + + beforeEach(() => { + rootState = createState(); + }); + + it('returns true if template is found and currentActivityView is edit', () => { + rootState.currentActivityView = activityBarViews.edit; + + expect( + getters.showFileTemplatesBar( + null, + { + templateTypes: getters.templateTypes(), + }, + rootState, + )('LICENSE'), + ).toBe(true); + }); + + it('returns false if template is found and currentActivityView is not edit', () => { + rootState.currentActivityView = activityBarViews.commit; + expect( - getters.showFileTemplatesBar(null, { - templateTypes: getters.templateTypes(), - })('LICENSE'), - ).toEqual({ - name: 'LICENSE', - key: 'licenses', - }); + getters.showFileTemplatesBar( + null, + { + templateTypes: getters.templateTypes(), + }, + rootState, + )('LICENSE'), + ).toBe(false); }); it('returns undefined if not found', () => { expect( - getters.showFileTemplatesBar(null, { - templateTypes: getters.templateTypes(), - })('test'), + getters.showFileTemplatesBar( + null, + { + templateTypes: getters.templateTypes(), + }, + rootState, + )('test'), ).toBe(undefined); }); }); diff --git a/spec/javascripts/ide/stores/mutations_spec.js b/spec/javascripts/ide/stores/mutations_spec.js index 6ce76aaa03b..41dd3d3c67f 100644 --- a/spec/javascripts/ide/stores/mutations_spec.js +++ b/spec/javascripts/ide/stores/mutations_spec.js @@ -339,5 +339,13 @@ describe('Multi-file store mutations', () => { expect(localState.entries.parentPath.tree.length).toBe(1); }); + + it('adds to openFiles if previously opened', () => { + localState.entries.oldPath.opened = true; + + mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' }); + + expect(localState.openFiles).toEqual([localState.entries.newPath]); + }); }); }); diff --git a/spec/javascripts/lazy_loader_spec.js b/spec/javascripts/lazy_loader_spec.js index 1d81e4e2d1a..c177d79b9e0 100644 --- a/spec/javascripts/lazy_loader_spec.js +++ b/spec/javascripts/lazy_loader_spec.js @@ -2,10 +2,10 @@ import LazyLoader from '~/lazy_loader'; let lazyLoader = null; -describe('LazyLoader', function () { +describe('LazyLoader', function() { preloadFixtures('issues/issue_with_comment.html.raw'); - beforeEach(function () { + beforeEach(function() { loadFixtures('issues/issue_with_comment.html.raw'); lazyLoader = new LazyLoader({ observerNode: 'body', @@ -13,8 +13,8 @@ describe('LazyLoader', function () { // Doing everything that happens normally in onload lazyLoader.loadCheck(); }); - describe('behavior', function () { - it('should copy value from data-src to src for img 1', function (done) { + describe('behavior', function() { + it('should copy value from data-src to src for img 1', function(done) { const img = document.querySelectorAll('img[data-src]')[0]; const originalDataSrc = img.getAttribute('data-src'); img.scrollIntoView(); @@ -26,7 +26,7 @@ describe('LazyLoader', function () { }, 100); }); - it('should lazy load dynamically added data-src images', function (done) { + it('should lazy load dynamically added data-src images', function(done) { const newImg = document.createElement('img'); const testPath = '/img/testimg.png'; newImg.className = 'lazy'; @@ -41,7 +41,7 @@ describe('LazyLoader', function () { }, 100); }); - it('should not alter normal images', function (done) { + it('should not alter normal images', function(done) { const newImg = document.createElement('img'); const testPath = '/img/testimg.png'; newImg.setAttribute('src', testPath); diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js index babad296f09..28151c7e658 100644 --- a/spec/javascripts/lib/utils/common_utils_spec.js +++ b/spec/javascripts/lib/utils/common_utils_spec.js @@ -29,24 +29,46 @@ describe('common_utils', () => { }); }); - describe('getUrlParamsArray', () => { - it('should return params array', () => { - expect(commonUtils.getUrlParamsArray() instanceof Array).toBe(true); + describe('urlParamsToArray', () => { + it('returns empty array for empty querystring', () => { + expect(commonUtils.urlParamsToArray('')).toEqual([]); + }); + + it('should decode params', () => { + expect( + commonUtils.urlParamsToArray('?label_name%5B%5D=test')[0], + ).toBe('label_name[]=test'); }); it('should remove the question mark from the search params', () => { - const paramsArray = commonUtils.getUrlParamsArray(); + const paramsArray = commonUtils.urlParamsToArray('?test=thing'); expect(paramsArray[0][0] !== '?').toBe(true); }); + }); - it('should decode params', () => { - window.history.pushState('', '', '?label_name%5B%5D=test'); + describe('urlParamsToObject', () => { + it('parses path for label with trailing +', () => { + expect( + commonUtils.urlParamsToObject('label_name[]=label%2B', {}), + ).toEqual({ + label_name: ['label+'], + }); + }); + it('parses path for milestone with trailing +', () => { expect( - commonUtils.getUrlParamsArray()[0], - ).toBe('label_name[]=test'); + commonUtils.urlParamsToObject('milestone_title=A%2B', {}), + ).toEqual({ + milestone_title: 'A+', + }); + }); - window.history.pushState('', '', '?'); + it('parses path for search terms with spaces', () => { + expect( + commonUtils.urlParamsToObject('search=two+words', {}), + ).toEqual({ + search: 'two words', + }); }); }); diff --git a/spec/javascripts/lib/utils/mock_data.js b/spec/javascripts/lib/utils/mock_data.js index fd0d62b751f..93d0d6259b9 100644 --- a/spec/javascripts/lib/utils/mock_data.js +++ b/spec/javascripts/lib/utils/mock_data.js @@ -2,4 +2,4 @@ export const faviconDataUrl = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACA export const overlayDataUrl = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAA85JREFUWAntVllIVGEUPv/9b46O41KplYN7PeRkti8TjQlhCUGh3MmeQugpIsGKAi2soIcIooiohxYKK2daqDAlIpIiWwxtQaJcaHE0d5tMrbn37z9XRqfR0TvVW56Hudf//uec72zfEWBCJjIwkYGJDPzvGSD/KgExN3Oi2Q+2DJgSDYQEMwItVGH1iZGmJw/Si1y+/PwVAMYYib22MYc/8hVQFgKDEfYoId0KYzagAQebsos/ewMZoeB9wdffcTYpQSaCTWHKoqSQaDk7zkIt0+aCUR8BelEHrf3dUNv9AcqbnsHtT5UKB/hTASh0SLYjnjb/CIDRJi0XiFAaJOpCD8zLpdb4NB66b1OfelthX815dtdRRfiti2aAXLvVLiMQ6olGyztGDkSo4JGGXk8/QFdGpYzpHG2GBQTDhtgVhPEaVbbVpvI6GJz22rv4TcAfrYI1x7Rj5MWWAppomKFVVb2302SFzUkZHAbkG+0b1+Gh77yNYjrmqnWTrLBLRxdvBWv8qlFujH/kYjJYyvLkj71t78zAUvzMAMnHhpN4zf9UREJhd8omyssxu1IgazQDwDnHUcNuH6vhPIE1fmuBzHt74Hn7W89jWGtcAjoaIDOFrdcMYJBkgOCoaRF0Lj0oglddDbCj6tRvKjphEpgjkzEQs2YAKsNxMzjn3nKurhzK+Ly7xe28ua8TwgMMcHJZnvvT0BPtEEKM4tDJ+C8GvIIk4ylINIXVZ0EUKJxYuh3mhCeokbudl6TtVc88dfBdLwbyaWB6zQCYQJpBYSrDGQxBQ/ZWRM2B+VNmQnVnHWx7elyNuL2/R336co7KyJR8CL9oLgEuFlREevWUkEl6uGwpVEG4FBm0OEf9N10NMgPlvWYAuNVwsWDKvcUNYsHUWTCZ13ysyFEXe6TO6aC8CUr9IiK+A05TQrc8yjwmxARHeeMAPlfQJw+AQRwu0YhL/GDXi9NwufG+S8dYkuYMqIb4SsWthotlNMOUCOM6r+G9cqXxPmd1dqrBav/o1zJy2l5/NUjJA/VORwYuFnOUaTQcPs9wMqwV++Xv8oADxKAcZ8nLPr8AoGW+xR6HSqYk3GodAz2QNj0V+Gr26dT9ASNH5239Pf0gktVNWZca8ZvfAFBprWS6hSu1pqt++Y0PD+WIwDAhIWQGtzvSHDbcodfFUFB9hg1Gjs5LXqIdFL+acFBl+FddqYwdxsWC3I70OvgfUaA65zhq2O2c8VxYcyIGFTVlXegYtvCXANCQZJMobjVcLMjtSK/IcEgyOOe8Ve5w7ryKDefp2P3+C/5ohv8HZmVLAAAAAElFTkSuQmCC'; -export const faviconWithOverlayDataUrl = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAGt0lEQVRYR8WWf3DT5R3H35/vN0lDiCztSuiPAEnTFhSOUamIzFGokwMH55g0E845yjbP6+4qIoiHY6JjnHLeRI6h9jgQpQcD3ImH2u00eHBwjGthKLI1TSm26S8oKYVS0vT7/X52z7ckTUhqC/yx55/kks/zeb+ez6/nIWYm/B8XJQB4SGq6lL+CJA47vvRtvWs2D0nNl/Kf0qBZxx6u23arv0QAAIHivK8BynB4ffa7BgDQXJx/ngGnw+uThgTwP5ZnMocoJAxVxZDVZ+0L5n5WF75TkMafjLdJxpSg2E+gqW1X7zk3rbpaifhLiEBgTv4mEFbpBoTyu01DoDj/dQAv9rvjtdnp/k3Yx9rgAMV5QYCsAAwAgg6vL/1OTy/2BYrzzwLIBWACuNHhrXPG+otGoKaw0JA58kqGJtOFfgPS8yWT4sz88nzj7UIIfz+wd0mRdEZPLMnp2V/8R0+JrhLbBYFHJvwWzBUxYgqYNzhG+zfEhm24MIE5ectBtP0W+y0Or29FcoDifHFSRxwAcMrh9c0YrmisXaA4r0V0U8xvopgDDq9PpCQ+Ag0/zbEbNUNbMiG9fTwkDTsKHpJa2t1Zmiw1AqLg+tMZ+R6WVVtnZ2qP6Ib+FIjh05G3lsDrB4xjUIbRDeM+WZLJYZ4B1rKMzKPm/fdyzs9qg6WT225IMnPcuYjxbPZhn57qaA00zc4/QYT7b1b/wAZmDYSLjsN1WcmiM+6jXz7JTCs1aNPASBjrtrCGOXVBLK9ph72772bc0REZcsQlkEVoOxblhaFBH0Bxi6GBWFNC8gpV0XqYSe/hI85R9o1zxr/QaZbdbmuW9oRzljRrzBRkW9JhMaTgYugKzl35DlXNJ/Fp43FImoZnz7T0ln7bLihM0g85N627vkWPgLrbvYyCvAP1+rRIWETA5QsyQlcJYOCbMRasWpALtljwSsFyeJxFYsoNWqdN1y/ildM78Y/WGjxx8TL+ol3oluy8VupKe7cfoNLdCJkdqEUPOmBJ5ksJoae91mBps5lQ6pkIm20MPiz6A3KsmcNukDe/3Ye3zh3A77Q2XqcGjslLz88i/nB8pkpSoL8nAFSTBpUN4qSxS5KB5jOGUOniCebmzFQcevSN2xKP+Fp7ajt21f8TOxU/5i45JZFS6XwcTB9HxZgUnGTRNgk31x5jet+aGU7jWw+UweOcPeyTxxoqrGL25+UwdjehSvnmOVIqcz4C8y8GAABcQwjnYI5NheikhQWT+EZmDh2ev/l7cz4U2cGmYyg78TYqVH87Kbtd1wFY4hsVQAt14zu2RiDaTUZMf/BHWD35STx37wDv94k1dLeh7MRmvDZ1GR5Inxg17dX6MPnjZfh5X6tGSqXrV2B8ACIx98UNGOlV4CxCuA6zqIeq9FQ8c68bhx7ZiIK06CQdVF+Il3y1Hq03gnDfk4Uj8zbH2T51dCPOtlW39Q+iPTl2VSMfwKPiKw8aTuhgpl1Zdqxzj8PphRWwm21xZjv9VcgYkYb52dP132PFbSYr/la0DpNtrrg9a2oqsKfB2zlwG+4nSe1z7QDjaQBi2Eh6J4QRwimYt43LwOsuB2oX7YLVMCLqTAya3xx/EwZJxtYHy3WhyMkHExebXz3zAbbXfdo7AFBRaMAz1Ypa6XoaoPejKRGteZm6D3SlWVdOcOHo/Lfj2u9aXw+WHNmA00G/DiFEO0Jd+meyk0fIf/+vLfik6Xhj4qN0v7i5HCY1bBQPk+ij9GSzNbzYNdH03kMrscARfzvHQgiBocSFTVHVCrW+u+WrpK9iCIgS1rRK93oG/1GkRJVIup8KMNs1Sw/1rUtALD36ZzRca8XeJDmPtRc18vDn5SCJViYHENY3IZTK3JkE7RAYtpdkp3bAaJeOzN+CsSMTX+wqa7ih9sbVSLI2WV3znihAJYXZPThA7M6KQoM2MniyhUxTioxTpKLMadjx8Jqh5k3S//8d9GOh92XWmP/aXLKvfHgA0ZTklL0jj9m6UR6L5+9bjFWTPLcFIWbCY1+8pHb0drWybJ4aWLQrODyAWJndzoyylNyGg0hL+bV7Ll4rKIWB5CFBxMlLj21SL4W6QjDQjwOL9n4tNt0+AADPfo+UqgXPHJLSJrkso7F6ylLMy56OFMmYACIKblvtQext8Iqp0swyLYiI3zEAbs6Ml3cXv/p3Y+ryq5KcnSKb1Jmj75P7X0Rm/UV0tvO86r/WIhORwszvkmHEehH2WMo7ikDUQUWhoaIG+NNc96Os8eMEmklE2Qy2ANTO0OrA+CwFOFBfsq8pWZ7+B25aDBxvPp+QAAAAAElFTkSuQmCC'; +export const faviconWithOverlayDataUrl = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAGtElEQVRYR8WXf3CT9R3H35/nSdIQIktrCf0RStI0FYRjVBAccxTq5MDBKUoz4ZyjbPO87q4yBsPDMdExTjlvIsdQexyI0oMBeuKhdjsNHhwcMgpjIlublLIm/UlJKZSSJs/z/e6+T5v0CQ22wB/7/pPck8/383l9fj6fEOec8H88NAjAS1LwknsFSVLU8WXd1rtm85LUeKnwGQKzjj3s33azvsEAAEIlnn8ByHL4/Pa7BgAQLCm8QOBOh88vDQkQeMxjMkcQEYKqYsyJWWPhgs/80TsFafzROJtkNIXFfYI0pfXqPeennjqlxPUNikBoTuEmEF+lCRBV3G0aQiWFrwH8d30AWJubGdiEfZzdGqDEEwbICnADQGGHry7zTr0X94IlnnMACggwAWh0+PxOvb5EBGqmTTNkj7ySxWS62C+g5Usm1Zn95YXG24UQ+r5n75Li6Ux4LBkyc7/4t5YSLSr6Lgg9UvBLcKocMEYKON/gGB3YoA/bcGFCczzLQdieLE9bHL66FakBSjzCU0cSAHDa4at7aLhG9XLBEk8zAVnxZxyIEhBy+PwFgwAafpxvNzK5NZUhrX28JA07Cl6SmtvcOUwm4ZAouHj7ad+jMrN1dqb3iG7oS4EYPh2etQS+XiesC8TQ3ZD3yZJsHuUPgbMcI+ej5v3ncv5PasNlk1p7JJnzJL+I0/O5h+u0VCdqIDi78AQRHuirft3hYJzQPvawPydVdPI+/OnTnNNKBjYVXHRa8rFFGeb4w1he0wZ7d/84IXTEhxzxUsgitB2LPFGwvgGUfLSeZUpEXqEqrIdz0nr4iHOUfeOccb/tNMtutzWHPeWcJc0aMxm5lkxYDGloj1zB+Sv/RXXTSXzaeBwSY3j+bHNv2bdtMYCbpHtRkNFd36xFQN3tXkZhvgP1fdPi5kMEXL4oIXKVAA58M8aCVQs84BYLXi5aDq+zGJTqYr+i4PV2vHxmJ/7WUoOn2i/jz6yhW7JjrdSV8U4fQFV+I2Q4UIsedMCSSlcsgp72WtnSajOhzDsBNtsYfFD8e+Rbs4fdIG98uw9vnj+AX7FWvk4NHZOXXphF/INx2SpJIU2L8L4GDAoMwlP9kWSg6awcKVs83tyUnY5Dj75+W8bjutae3o5d9X/HTiWAuUtOS6RUOR8Hp48TxjgU/AMSeKJ1Ej/tMWXG1sxwGt98sBxe5+xhe64XVLiK2Z9XwNgdRLXyzQsC4ENwelIHAFxDBOdh1qdCdNLCoon8RnY+HZ6/+TtzPhTZweAxlJ94C5VqoI2U3a7rACzJjQqgBd24CGscos1kxPQZ38fqSU/jhQkDvN9lrKG7FeUnNuPVKcvwYOb4hGgvi2HSx8vwRKyJkVLl+hk43gdBAcfADBD1cA4RXIdZ1EN1Zjqem+DGoUc2oigjMUlvaV8YL/1qPVpuhOG+JwdH5m1Okn3m6Eacaz3V2jeI9uTbVYY6AKOSKw8MX0MBg2lXjh3r3Hk4s7ASdrMtSWxnoBpZIzIwP3e69lxv3Gay4q/F6zDJ5kq6s6amEnsafJ0Db8P9JKkx1w5wPJuY36IToojgNMzb8rLwmsuB2kW7YDWMSCgTg+YXx9+AQZKxdUaFZiju+a2Mi8uvnH0f2/2f9g4AVE4z4LlTilrlehag9xIpEam4jO4DXfdaV97nwtH5byW137VYD5Yc2YAz4YAGIYx2RLq0z1Sex8l//fUWfBI83jh4Kd1PEuAwqVGjWEwSS+nJJmt0sWu86d0frMQCR/LbWQ8hDAxlXMgUV69Q67ubv0q5FUNAlHKmVLnXE/gfREpUiaQHqAizXbO0UN98BMTSo39Cw7UW7E2Rc728qJGHP68ASbQyNYCQTkAUzCSwQ+CwvSjnsQPGLOnI/C0YO3Lwxq5yhhtqb1KNpGqT1TXvigJU0jh33xpAf7NymoGNDJ9sJtPkYuNkqTh7KnY8vGaoeZPy93+GA1joe4kzzv/SVLqvYngA/dFgVfnlb8tjtm6Ux+I39y/Gqone24IQM+GxL15UO3q7WrhsnhJatCs8PAC9md3OrPK0goaDyEj7uXsuXi0qg4HkIUGE52XHNqmXIl0RGOiHoUV7xb+v5K14SC39At79Ximdhc8ekjImuiyjsXryUszLnY40yThIhSi4bbUHsbfBJ6ZKE5dpQdz4HQOgf2a8tLvklY+M6cuvSnJummxSZ46+X+7biMzaRnSu84IauNYsE5HCOX+HDCPWi7DrKW8/BTcVZ2UN8Me57kc5448TaCYR5XJwC0BtHMwPjs/SgAP1pfuCqSL8Pxhr/wunLWAOAAAAAElFTkSuQmCC'; diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/javascripts/lib/utils/text_utility_spec.js index d60485b1308..ac3270baef5 100644 --- a/spec/javascripts/lib/utils/text_utility_spec.js +++ b/spec/javascripts/lib/utils/text_utility_spec.js @@ -63,6 +63,12 @@ describe('text_utility', () => { }); }); + describe('slugifyWithHyphens', () => { + it('should replaces whitespaces with hyphens and convert to lower case', () => { + expect(textUtils.slugifyWithHyphens('My Input String')).toEqual('my-input-string'); + }); + }); + describe('stripHtml', () => { it('replaces html tag with the default replacement', () => { expect(textUtils.stripHtml('This is a text with <p>html</p>.')).toEqual( diff --git a/spec/javascripts/monitoring/graph/flag_spec.js b/spec/javascripts/monitoring/graph/flag_spec.js index 19278312b6d..a837b71db0b 100644 --- a/spec/javascripts/monitoring/graph/flag_spec.js +++ b/spec/javascripts/monitoring/graph/flag_spec.js @@ -35,7 +35,7 @@ const defaultValuesComponent = { unitOfDisplay: 'ms', currentDataIndex: 0, legendTitle: 'Average', - currentCoordinates: [], + currentCoordinates: {}, }; const deploymentFlagData = { diff --git a/spec/javascripts/monitoring/graph_spec.js b/spec/javascripts/monitoring/graph_spec.js index a46a387a534..990619b4109 100644 --- a/spec/javascripts/monitoring/graph_spec.js +++ b/spec/javascripts/monitoring/graph_spec.js @@ -113,6 +113,9 @@ describe('Graph', () => { projectPath, }); + // simulate moving mouse over data series + component.seriesUnderMouse = component.timeSeries; + component.positionFlag(); expect(component.currentData).toBe(component.timeSeries[0].values[10]); }); diff --git a/spec/javascripts/notes/components/note_actions_spec.js b/spec/javascripts/notes/components/note_actions_spec.js index 52cc42cb53d..d7298cb3483 100644 --- a/spec/javascripts/notes/components/note_actions_spec.js +++ b/spec/javascripts/notes/components/note_actions_spec.js @@ -28,7 +28,7 @@ describe('issue_note_actions component', () => { canEdit: true, canAwardEmoji: true, canReportAsAbuse: true, - noteId: 539, + noteId: '539', noteUrl: 'https://localhost:3000/group/project/merge_requests/1#note_1', reportAbusePath: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_539&user_id=26', @@ -59,6 +59,20 @@ describe('issue_note_actions component', () => { expect(vm.$el.querySelector(`a[href="${props.reportAbusePath}"]`)).toBeDefined(); }); + it('should be possible to copy link to a note', () => { + expect(vm.$el.querySelector('.js-btn-copy-note-link')).not.toBeNull(); + }); + + it('should not show copy link action when `noteUrl` prop is empty', done => { + vm.noteUrl = ''; + Vue.nextTick() + .then(() => { + expect(vm.$el.querySelector('.js-btn-copy-note-link')).toBeNull(); + }) + .then(done) + .catch(done.fail); + }); + it('should be possible to delete comment', () => { expect(vm.$el.querySelector('.js-note-delete')).toBeDefined(); }); @@ -77,7 +91,7 @@ describe('issue_note_actions component', () => { canEdit: false, canAwardEmoji: false, canReportAsAbuse: false, - noteId: 539, + noteId: '539', noteUrl: 'https://localhost:3000/group/project/merge_requests/1#note_1', reportAbusePath: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_539&user_id=26', diff --git a/spec/javascripts/notes/components/note_awards_list_spec.js b/spec/javascripts/notes/components/note_awards_list_spec.js index 9d98ba219da..6a6a810acff 100644 --- a/spec/javascripts/notes/components/note_awards_list_spec.js +++ b/spec/javascripts/notes/components/note_awards_list_spec.js @@ -30,7 +30,7 @@ describe('note_awards_list component', () => { propsData: { awards: awardsMock, noteAuthorId: 2, - noteId: 545, + noteId: '545', canAwardEmoji: true, toggleAwardPath: '/gitlab-org/gitlab-ce/notes/545/toggle_award_emoji', }, @@ -70,7 +70,7 @@ describe('note_awards_list component', () => { propsData: { awards: awardsMock, noteAuthorId: 2, - noteId: 545, + noteId: '545', canAwardEmoji: false, toggleAwardPath: '/gitlab-org/gitlab-ce/notes/545/toggle_award_emoji', }, diff --git a/spec/javascripts/notes/components/note_form_spec.js b/spec/javascripts/notes/components/note_form_spec.js index 95d400ab3df..147ffcf1b81 100644 --- a/spec/javascripts/notes/components/note_form_spec.js +++ b/spec/javascripts/notes/components/note_form_spec.js @@ -19,7 +19,7 @@ describe('issue_note_form component', () => { props = { isEditing: false, noteBody: 'Magni suscipit eius consectetur enim et ex et commodi.', - noteId: 545, + noteId: '545', }; vm = new Component({ @@ -32,6 +32,22 @@ describe('issue_note_form component', () => { vm.$destroy(); }); + describe('noteHash', () => { + it('returns note hash string based on `noteId`', () => { + expect(vm.noteHash).toBe(`#note_${props.noteId}`); + }); + + it('return note hash as `#` when `noteId` is empty', done => { + vm.noteId = ''; + Vue.nextTick() + .then(() => { + expect(vm.noteHash).toBe('#'); + }) + .then(done) + .catch(done.fail); + }); + }); + describe('conflicts editing', () => { it('should show conflict message if note changes outside the component', done => { vm.isEditing = true; diff --git a/spec/javascripts/notes/components/note_header_spec.js b/spec/javascripts/notes/components/note_header_spec.js index a3c6bf78988..379780f43a0 100644 --- a/spec/javascripts/notes/components/note_header_spec.js +++ b/spec/javascripts/notes/components/note_header_spec.js @@ -33,7 +33,7 @@ describe('note_header component', () => { }, createdAt: '2017-08-02T10:51:58.559Z', includeToggle: false, - noteId: 1394, + noteId: '1394', expanded: true, }, }).$mount(); @@ -47,6 +47,16 @@ describe('note_header component', () => { it('should render timestamp link', () => { expect(vm.$el.querySelector('a[href="#note_1394"]')).toBeDefined(); }); + + it('should not render user information when prop `author` is empty object', done => { + vm.author = {}; + Vue.nextTick() + .then(() => { + expect(vm.$el.querySelector('.note-header-author-name')).toBeNull(); + }) + .then(done) + .catch(done.fail); + }); }); describe('discussion', () => { @@ -66,7 +76,7 @@ describe('note_header component', () => { }, createdAt: '2017-08-02T10:51:58.559Z', includeToggle: true, - noteId: 1395, + noteId: '1395', expanded: true, }, }).$mount(); diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js index 0423fcb6ec4..1f030e5af28 100644 --- a/spec/javascripts/notes/mock_data.js +++ b/spec/javascripts/notes/mock_data.js @@ -66,7 +66,7 @@ export const individualNote = { individual_note: true, notes: [ { - id: 1390, + id: '1390', attachment: { url: null, filename: null, @@ -111,7 +111,7 @@ export const individualNote = { }; export const note = { - id: 546, + id: '546', attachment: { url: null, filename: null, @@ -174,7 +174,7 @@ export const discussionMock = { expanded: true, notes: [ { - id: 1395, + id: '1395', attachment: { url: null, filename: null, @@ -211,7 +211,7 @@ export const discussionMock = { path: '/gitlab-org/gitlab-ce/notes/1395', }, { - id: 1396, + id: '1396', attachment: { url: null, filename: null, @@ -257,7 +257,7 @@ export const discussionMock = { path: '/gitlab-org/gitlab-ce/notes/1396', }, { - id: 1437, + id: '1437', attachment: { url: null, filename: null, @@ -308,7 +308,7 @@ export const discussionMock = { }; export const loggedOutnoteableData = { - id: 98, + id: '98', iid: 26, author_id: 1, description: '', @@ -358,7 +358,7 @@ export const collapseNotesMock = [ individual_note: true, notes: [ { - id: 1390, + id: '1390', attachment: null, author: { id: 1, @@ -393,7 +393,7 @@ export const collapseNotesMock = [ individual_note: true, notes: [ { - id: 1391, + id: '1391', attachment: null, author: { id: 1, @@ -433,7 +433,7 @@ export const INDIVIDUAL_NOTE_RESPONSE_MAP = { expanded: true, notes: [ { - id: 1390, + id: '1390', attachment: { url: null, filename: null, @@ -495,7 +495,7 @@ export const INDIVIDUAL_NOTE_RESPONSE_MAP = { expanded: true, notes: [ { - id: 1391, + id: '1391', attachment: { url: null, filename: null, @@ -544,7 +544,7 @@ export const INDIVIDUAL_NOTE_RESPONSE_MAP = { '/gitlab-org/gitlab-ce/notes/1471': { commands_changes: null, valid: true, - id: 1471, + id: '1471', attachment: null, author: { id: 1, @@ -600,7 +600,7 @@ export const DISCUSSION_NOTE_RESPONSE_MAP = { expanded: true, notes: [ { - id: 1471, + id: '1471', attachment: { url: null, filename: null, @@ -671,7 +671,7 @@ export const notesWithDescriptionChanges = [ expanded: true, notes: [ { - id: 901, + id: '901', type: null, attachment: null, author: { @@ -718,7 +718,7 @@ export const notesWithDescriptionChanges = [ expanded: true, notes: [ { - id: 902, + id: '902', type: null, attachment: null, author: { @@ -765,7 +765,7 @@ export const notesWithDescriptionChanges = [ expanded: true, notes: [ { - id: 903, + id: '903', type: null, attachment: null, author: { @@ -809,7 +809,7 @@ export const notesWithDescriptionChanges = [ expanded: true, notes: [ { - id: 904, + id: '904', type: null, attachment: null, author: { @@ -854,7 +854,7 @@ export const notesWithDescriptionChanges = [ expanded: true, notes: [ { - id: 905, + id: '905', type: null, attachment: null, author: { @@ -898,7 +898,7 @@ export const notesWithDescriptionChanges = [ expanded: true, notes: [ { - id: 906, + id: '906', type: null, attachment: null, author: { @@ -945,7 +945,7 @@ export const collapsedSystemNotes = [ expanded: true, notes: [ { - id: 901, + id: '901', type: null, attachment: null, author: { @@ -992,7 +992,7 @@ export const collapsedSystemNotes = [ expanded: true, notes: [ { - id: 902, + id: '902', type: null, attachment: null, author: { @@ -1039,7 +1039,7 @@ export const collapsedSystemNotes = [ expanded: true, notes: [ { - id: 904, + id: '904', type: null, attachment: null, author: { @@ -1084,7 +1084,7 @@ export const collapsedSystemNotes = [ expanded: true, notes: [ { - id: 905, + id: '905', type: null, attachment: null, author: { @@ -1129,7 +1129,7 @@ export const collapsedSystemNotes = [ expanded: true, notes: [ { - id: 906, + id: '906', type: null, attachment: null, author: { diff --git a/spec/javascripts/projects/project_new_spec.js b/spec/javascripts/projects/project_new_spec.js index 84515d2bf97..dace834a3c8 100644 --- a/spec/javascripts/projects/project_new_spec.js +++ b/spec/javascripts/projects/project_new_spec.js @@ -4,12 +4,14 @@ import projectNew from '~/projects/project_new'; describe('New Project', () => { let $projectImportUrl; let $projectPath; + let $projectName; beforeEach(() => { setFixtures(` <div class='toggle-import-form'> <div class='import-url-data'> <input id="project_import_url" /> + <input id="project_name" /> <input id="project_path" /> </div> </div> @@ -17,6 +19,7 @@ describe('New Project', () => { $projectImportUrl = $('#project_import_url'); $projectPath = $('#project_path'); + $projectName = $('#project_name'); }); describe('deriveProjectPathFromUrl', () => { @@ -129,4 +132,31 @@ describe('New Project', () => { }); }); }); + + describe('deriveSlugFromProjectName', () => { + beforeEach(() => { + projectNew.bindEvents(); + $projectName.val('').keyup(); + }); + + it('converts project name to lower case and dash-limited slug', () => { + const dummyProjectName = 'My Awesome Project'; + + $projectName.val(dummyProjectName); + + projectNew.onProjectNameChange($projectName, $projectPath); + + expect($projectPath.val()).toEqual('my-awesome-project'); + }); + + it('does not add additional dashes in the slug if the project name already contains dashes', () => { + const dummyProjectName = 'My-Dash-Delimited Awesome Project'; + + $projectName.val(dummyProjectName); + + projectNew.onProjectNameChange($projectName, $projectPath); + + expect($projectPath.val()).toEqual('my-dash-delimited-awesome-project'); + }); + }); }); diff --git a/spec/javascripts/vue_shared/components/notes/system_note_spec.js b/spec/javascripts/vue_shared/components/notes/system_note_spec.js index 2a6015fe35f..adcb1c858aa 100644 --- a/spec/javascripts/vue_shared/components/notes/system_note_spec.js +++ b/spec/javascripts/vue_shared/components/notes/system_note_spec.js @@ -9,7 +9,7 @@ describe('system note component', () => { beforeEach(() => { props = { note: { - id: 1424, + id: '1424', author: { id: 1, name: 'Root', diff --git a/spec/lib/api/helpers/pagination_spec.rb b/spec/lib/api/helpers/pagination_spec.rb index c73c6023b60..0a7682d906b 100644 --- a/spec/lib/api/helpers/pagination_spec.rb +++ b/spec/lib/api/helpers/pagination_spec.rb @@ -189,9 +189,9 @@ describe API::Helpers::Pagination do it 'it returns the right link to the next page' do allow(subject).to receive(:params) .and_return({ pagination: 'keyset', ks_prev_id: projects[3].id, ks_prev_name: projects[3].name, per_page: 2 }) + expect_header('X-Per-Page', '2') expect_header('X-Next-Page', "#{value}?ks_prev_id=#{projects[6].id}&ks_prev_name=#{projects[6].name}&pagination=keyset&per_page=2") - expect_header('Link', anything) do |_key, val| expect(val).to include('rel="next"') end diff --git a/spec/lib/banzai/filter/spaced_link_filter_spec.rb b/spec/lib/banzai/filter/spaced_link_filter_spec.rb index 4463c011522..1ad7f3ff567 100644 --- a/spec/lib/banzai/filter/spaced_link_filter_spec.rb +++ b/spec/lib/banzai/filter/spaced_link_filter_spec.rb @@ -3,49 +3,73 @@ require 'spec_helper' describe Banzai::Filter::SpacedLinkFilter do include FilterSpecHelper - let(:link) { '[example](page slug)' } + let(:link) { '[example](page slug)' } + let(:image) { '![example](img test.jpg)' } - it 'converts slug with spaces to a link' do - doc = filter("See #{link}") + context 'when a link is detected' do + it 'converts slug with spaces to a link' do + doc = filter("See #{link}") - expect(doc.at_css('a').text).to eq 'example' - expect(doc.at_css('a')['href']).to eq 'page%20slug' - expect(doc.at_css('p')).to eq nil - end + expect(doc.at_css('a').text).to eq 'example' + expect(doc.at_css('a')['href']).to eq 'page%20slug' + expect(doc.at_css('a')['title']).to be_nil + expect(doc.at_css('p')).to be_nil + end - it 'converts slug with spaces and a title to a link' do - link = '[example](page slug "title")' - doc = filter("See #{link}") + it 'converts slug with spaces and a title to a link' do + link = '[example](page slug "title")' + doc = filter("See #{link}") - expect(doc.at_css('a').text).to eq 'example' - expect(doc.at_css('a')['href']).to eq 'page%20slug' - expect(doc.at_css('a')['title']).to eq 'title' - expect(doc.at_css('p')).to eq nil - end + expect(doc.at_css('a').text).to eq 'example' + expect(doc.at_css('a')['href']).to eq 'page%20slug' + expect(doc.at_css('a')['title']).to eq 'title' + expect(doc.at_css('p')).to be_nil + end - it 'does nothing when markdown_engine is redcarpet' do - exp = act = link - expect(filter(act, markdown_engine: :redcarpet).to_html).to eq exp - end + it 'does nothing when markdown_engine is redcarpet' do + exp = act = link + expect(filter(act, markdown_engine: :redcarpet).to_html).to eq exp + end + + it 'does nothing with empty text' do + link = '[](page slug)' + doc = filter("See #{link}") + + expect(doc.at_css('a')).to be_nil + end - it 'does nothing with empty text' do - link = '[](page slug)' - doc = filter("See #{link}") + it 'does nothing with an empty slug' do + link = '[example]()' + doc = filter("See #{link}") - expect(doc.at_css('a')).to eq nil + expect(doc.at_css('a')).to be_nil + end end - it 'does nothing with an empty slug' do - link = '[example]()' - doc = filter("See #{link}") + context 'when an image is detected' do + it 'converts slug with spaces to an iamge' do + doc = filter("See #{image}") + + expect(doc.at_css('img')['src']).to eq 'img%20test.jpg' + expect(doc.at_css('img')['alt']).to eq 'example' + expect(doc.at_css('p')).to be_nil + end + + it 'converts slug with spaces and a title to an image' do + image = '![example](img test.jpg "title")' + doc = filter("See #{image}") - expect(doc.at_css('a')).to eq nil + expect(doc.at_css('img')['src']).to eq 'img%20test.jpg' + expect(doc.at_css('img')['alt']).to eq 'example' + expect(doc.at_css('img')['title']).to eq 'title' + expect(doc.at_css('p')).to be_nil + end end it 'converts multiple URLs' do link1 = '[first](slug one)' link2 = '[second](http://example.com/slug two)' - doc = filter("See #{link1} and #{link2}") + doc = filter("See #{link1} and #{image} and #{link2}") found_links = doc.css('a') @@ -54,6 +78,12 @@ describe Banzai::Filter::SpacedLinkFilter do expect(found_links[0]['href']).to eq 'slug%20one' expect(found_links[1].text).to eq 'second' expect(found_links[1]['href']).to eq 'http://example.com/slug%20two' + + found_images = doc.css('img') + + expect(found_images.size).to eq(1) + expect(found_images[0]['src']).to eq 'img%20test.jpg' + expect(found_images[0]['alt']).to eq 'example' end described_class::IGNORE_PARENTS.each do |elem| diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb index 75413596431..df24cef0b8b 100644 --- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb @@ -87,4 +87,22 @@ describe Banzai::Pipeline::GfmPipeline do end end end + + describe 'markdown link or image urls having spaces' do + let(:project) { create(:project, :public) } + + it 'rewrites links with spaces in url' do + markdown = "[Link to Page](page slug)" + output = described_class.to_html(markdown, project: project) + + expect(output).to include("href=\"page%20slug\"") + end + + it 'rewrites images with spaces in url' do + markdown = "![My Image](test image.png)" + output = described_class.to_html(markdown, project: project) + + expect(output).to include("src=\"test%20image.png\"") + end + end end diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb index 52b8c9be647..64ca3ec345d 100644 --- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb @@ -178,4 +178,25 @@ describe Banzai::Pipeline::WikiPipeline do end end end + + describe 'videos' do + let(:namespace) { create(:namespace, name: "wiki_link_ns") } + let(:project) { create(:project, :public, name: "wiki_link_project", namespace: namespace) } + let(:project_wiki) { ProjectWiki.new(project, double(:user)) } + let(:page) { build(:wiki_page, wiki: project_wiki, page: OpenStruct.new(url_path: 'nested/twice/start-page')) } + + it 'generates video html structure' do + markdown = "![video_file](video_file_name.mp4)" + output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug) + + expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/wikis/nested/twice/video_file_name.mp4"') + end + + it 'rewrites and replaces video links names with white spaces to %20' do + markdown = "![video file](video file name.mp4)" + output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug) + + expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/wikis/nested/twice/video%20file%20name.mp4"') + end + end end diff --git a/spec/lib/forever_spec.rb b/spec/lib/forever_spec.rb index cf40c467c72..494c0561975 100644 --- a/spec/lib/forever_spec.rb +++ b/spec/lib/forever_spec.rb @@ -7,6 +7,7 @@ describe Forever do context 'when using PostgreSQL' do it 'should return Postgresql future date' do allow(Gitlab::Database).to receive(:postgresql?).and_return(true) + expect(subject).to eq(described_class::POSTGRESQL_DATE) end end @@ -14,6 +15,7 @@ describe Forever do context 'when using MySQL' do it 'should return MySQL future date' do allow(Gitlab::Database).to receive(:postgresql?).and_return(false) + expect(subject).to eq(described_class::MYSQL_DATE) end end diff --git a/spec/lib/gitlab/cleanup/project_uploads_spec.rb b/spec/lib/gitlab/cleanup/project_uploads_spec.rb index 11e605eece6..bf130b8fabd 100644 --- a/spec/lib/gitlab/cleanup/project_uploads_spec.rb +++ b/spec/lib/gitlab/cleanup/project_uploads_spec.rb @@ -132,7 +132,6 @@ describe Gitlab::Cleanup::ProjectUploads do let!(:path) { File.join(FileUploader.root, orphaned.model.full_path, orphaned.path) } before do - stub_feature_flags(import_export_object_storage: true) stub_uploads_object_storage(FileUploader) FileUtils.mkdir_p(File.dirname(path)) @@ -156,7 +155,6 @@ describe Gitlab::Cleanup::ProjectUploads do let!(:new_path) { File.join(FileUploader.root, '-', 'project-lost-found', 'wrong', orphaned.path) } before do - stub_feature_flags(import_export_object_storage: true) stub_uploads_object_storage(FileUploader) FileUtils.mkdir_p(File.dirname(path)) diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb index 1f35d1e4880..44568f2a653 100644 --- a/spec/lib/gitlab/closing_issue_extractor_spec.rb +++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb @@ -338,6 +338,13 @@ describe Gitlab::ClosingIssueExtractor do end end + context "with an invalid keyword such as suffix insted of fix" do + it do + message = "suffix #{reference}" + expect(subject.closed_by_message(message)).to eq([]) + end + end + context 'with multiple references' do let(:other_issue) { create(:issue, project: project) } let(:third_issue) { create(:issue, project: project) } diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb index 2c63f3b0455..6d29044ffd5 100644 --- a/spec/lib/gitlab/contributions_calendar_spec.rb +++ b/spec/lib/gitlab/contributions_calendar_spec.rb @@ -62,13 +62,16 @@ describe Gitlab::ContributionsCalendar do expect(calendar.activity_dates).to eq(last_week => 2, today => 1) end - it "only shows private events to authorized users" do - create_event(private_project, today) - create_event(feature_project, today) + context "when the user has opted-in for private contributions" do + it "shows private and public events to all users" do + user.update_column(:include_private_contributions, true) + create_event(private_project, today) + create_event(public_project, today) - expect(calendar.activity_dates[today]).to eq(0) - expect(calendar(user).activity_dates[today]).to eq(0) - expect(calendar(contributor).activity_dates[today]).to eq(2) + expect(calendar.activity_dates[today]).to eq(1) + expect(calendar(user).activity_dates[today]).to eq(1) + expect(calendar(contributor).activity_dates[today]).to eq(2) + end end it "counts the diff notes on merge request" do @@ -128,7 +131,7 @@ describe Gitlab::ContributionsCalendar do e3 = create_event(feature_project, today) create_event(public_project, last_week) - expect(calendar.events_by_date(today)).to contain_exactly(e1) + expect(calendar.events_by_date(today)).to contain_exactly(e1, e3) expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3) end diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb new file mode 100644 index 00000000000..bfcfed4231f --- /dev/null +++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Diff::HighlightCache do + let(:merge_request) { create(:merge_request_with_diffs) } + + subject(:cache) { described_class.new(merge_request.diffs, backend: backend) } + + describe '#decorate' do + let(:backend) { double('backend').as_null_object } + + # Manually creates a Diff::File object to avoid triggering the cache on + # the FileCollection::MergeRequestDiff + let(:diff_file) do + diffs = merge_request.diffs + raw_diff = diffs.diffable.raw_diffs(diffs.diff_options.merge(paths: ['CHANGELOG'])).first + Gitlab::Diff::File.new(raw_diff, + repository: diffs.project.repository, + diff_refs: diffs.diff_refs, + fallback_diff_refs: diffs.fallback_diff_refs) + end + + it 'does not calculate highlighting when reading from cache' do + cache.write_if_empty + cache.decorate(diff_file) + + expect_any_instance_of(Gitlab::Diff::Highlight).not_to receive(:highlight) + + diff_file.highlighted_diff_lines + end + + it 'assigns highlighted diff lines to the DiffFile' do + cache.write_if_empty + cache.decorate(diff_file) + + expect(diff_file.highlighted_diff_lines.size).to be > 5 + end + + it 'submits a single reading from the cache' do + cache.decorate(diff_file) + cache.decorate(diff_file) + + expect(backend).to have_received(:read).with(cache.key).once + end + end + + describe '#write_if_empty' do + let(:backend) { double('backend', read: {}).as_null_object } + + it 'submits a single writing to the cache' do + cache.write_if_empty + cache.write_if_empty + + expect(backend).to have_received(:write).with(cache.key, + hash_including('CHANGELOG-false-false-false'), + expires_in: 1.week).once + end + end + + describe '#clear' do + let(:backend) { double('backend').as_null_object } + + it 'clears cache' do + cache.clear + + expect(backend).to have_received(:delete).with(cache.key) + end + end +end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 17348b01006..28c34e234f7 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -583,6 +583,37 @@ describe Gitlab::Git::Repository, :seed_helper do end end + describe '#find_remote_root_ref' do + it 'gets the remote root ref from GitalyClient' do + expect_any_instance_of(Gitlab::GitalyClient::RemoteService) + .to receive(:find_remote_root_ref).and_call_original + + expect(repository.find_remote_root_ref('origin')).to eq 'master' + end + + it 'returns UTF-8' do + expect(repository.find_remote_root_ref('origin')).to be_utf8 + end + + it 'returns nil when remote name is nil' do + expect_any_instance_of(Gitlab::GitalyClient::RemoteService) + .not_to receive(:find_remote_root_ref) + + expect(repository.find_remote_root_ref(nil)).to be_nil + end + + it 'returns nil when remote name is empty' do + expect_any_instance_of(Gitlab::GitalyClient::RemoteService) + .not_to receive(:find_remote_root_ref) + + expect(repository.find_remote_root_ref('')).to be_nil + end + + it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RemoteService, :find_remote_root_ref do + subject { repository.find_remote_root_ref('origin') } + end + end + describe "#log" do shared_examples 'repository log' do let(:commit_with_old_name) do diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb index 54f2ea33f90..bcdf12a00a0 100644 --- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb @@ -19,7 +19,14 @@ describe Gitlab::GitalyClient::CommitService do right_commit_id: commit.id, collapse_diffs: false, enforce_limits: true, - **Gitlab::Git::DiffCollection.collection_limits.to_h + # Tests limitation parameters explicitly + max_files: 100, + max_lines: 5000, + max_bytes: 512000, + safe_max_files: 100, + safe_max_lines: 5000, + safe_max_bytes: 512000, + max_patch_bytes: 102400 ) expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:commit_diff).with(request, kind_of(Hash)) @@ -37,7 +44,14 @@ describe Gitlab::GitalyClient::CommitService do right_commit_id: initial_commit.id, collapse_diffs: false, enforce_limits: true, - **Gitlab::Git::DiffCollection.collection_limits.to_h + # Tests limitation parameters explicitly + max_files: 100, + max_lines: 5000, + max_bytes: 512000, + safe_max_files: 100, + safe_max_lines: 5000, + safe_max_bytes: 512000, + max_patch_bytes: 102400 ) expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:commit_diff).with(request, kind_of(Hash)) diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb index f03c7e3f04b..9030a49983d 100644 --- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb @@ -45,6 +45,26 @@ describe Gitlab::GitalyClient::RemoteService do end end + describe '#find_remote_root_ref' do + it 'sends an find_remote_root_ref message and returns the root ref' do + expect_any_instance_of(Gitaly::RemoteService::Stub) + .to receive(:find_remote_root_ref) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .and_return(double(ref: 'master')) + + expect(client.find_remote_root_ref('origin')).to eq 'master' + end + + it 'ensure ref is a valid UTF-8 string' do + expect_any_instance_of(Gitaly::RemoteService::Stub) + .to receive(:find_remote_root_ref) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .and_return(double(ref: "an_invalid_ref_\xE5")) + + expect(client.find_remote_root_ref('origin')).to eq "an_invalid_ref_Ã¥" + end + end + describe '#update_remote_mirror' do let(:ref_name) { 'remote_mirror_1' } let(:only_branches_matching) { ['my-branch', 'master'] } diff --git a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_object_storage_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_object_storage_spec.rb deleted file mode 100644 index 5059d68e54b..00000000000 --- a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_object_storage_spec.rb +++ /dev/null @@ -1,105 +0,0 @@ -require 'spec_helper' - -describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do - let!(:service) { described_class.new } - let!(:project) { create(:project, :with_object_export) } - let(:shared) { project.import_export_shared } - let!(:user) { create(:user) } - - describe '#execute' do - before do - allow(service).to receive(:strategy_execute) - stub_feature_flags(import_export_object_storage: true) - end - - it 'returns if project exported file is not found' do - allow(project).to receive(:export_project_object_exists?).and_return(false) - - expect(service).not_to receive(:strategy_execute) - - service.execute(user, project) - end - - it 'creates a lock file in the export dir' do - allow(service).to receive(:delete_after_export_lock) - - service.execute(user, project) - - expect(lock_path_exist?).to be_truthy - end - - context 'when the method succeeds' do - it 'removes the lock file' do - service.execute(user, project) - - expect(lock_path_exist?).to be_falsey - end - end - - context 'when the method fails' do - before do - allow(service).to receive(:strategy_execute).and_call_original - end - - context 'when validation fails' do - before do - allow(service).to receive(:invalid?).and_return(true) - end - - it 'does not create the lock file' do - expect(service).not_to receive(:create_or_update_after_export_lock) - - service.execute(user, project) - end - - it 'does not execute main logic' do - expect(service).not_to receive(:strategy_execute) - - service.execute(user, project) - end - - it 'logs validation errors in shared context' do - expect(service).to receive(:log_validation_errors) - - service.execute(user, project) - end - end - - context 'when an exception is raised' do - it 'removes the lock' do - expect { service.execute(user, project) }.to raise_error(NotImplementedError) - - expect(lock_path_exist?).to be_falsey - end - end - end - end - - describe '#log_validation_errors' do - it 'add the message to the shared context' do - errors = %w(test_message test_message2) - - allow(service).to receive(:invalid?).and_return(true) - allow(service.errors).to receive(:full_messages).and_return(errors) - - expect(shared).to receive(:add_error_message).twice.and_call_original - - service.execute(user, project) - - expect(shared.errors).to eq errors - end - end - - describe '#to_json' do - it 'adds the current strategy class to the serialized attributes' do - params = { param1: 1 } - result = params.merge(klass: described_class.to_s).to_json - - expect(described_class.new(params).to_json).to eq result - end - end - - def lock_path_exist? - File.exist?(described_class.lock_file_path(project)) - end -end diff --git a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb index 566b7f46c87..9a442de2900 100644 --- a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb +++ b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb @@ -9,11 +9,10 @@ describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do describe '#execute' do before do allow(service).to receive(:strategy_execute) - stub_feature_flags(import_export_object_storage: false) end it 'returns if project exported file is not found' do - allow(project).to receive(:export_project_path).and_return(nil) + allow(project).to receive(:export_file_exists?).and_return(false) expect(service).not_to receive(:strategy_execute) diff --git a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb index 7f2e0a4ee2c..ec17ad8541f 100644 --- a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb +++ b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb @@ -24,34 +24,13 @@ describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do end describe '#execute' do - context 'without object storage' do - before do - stub_feature_flags(import_export_object_storage: false) - end - - it 'removes the exported project file after the upload' do - allow(strategy).to receive(:send_file) - allow(strategy).to receive(:handle_response_error) - - expect(project).to receive(:remove_exported_project_file) - - strategy.execute(user, project) - end - end - - context 'with object storage' do - before do - stub_feature_flags(import_export_object_storage: true) - end + it 'removes the exported project file after the upload' do + allow(strategy).to receive(:send_file) + allow(strategy).to receive(:handle_response_error) - it 'removes the exported project file after the upload' do - allow(strategy).to receive(:send_file) - allow(strategy).to receive(:handle_response_error) + expect(project).to receive(:remove_exports) - expect(project).to receive(:remove_exported_project_file) - - strategy.execute(user, project) - end + strategy.execute(user, project) end end end diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index b4269bd5786..ec2bdbe22e1 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -288,6 +288,7 @@ project: - fork_network_member - fork_network - custom_attributes +- prometheus_metrics - lfs_file_locks - project_badges - source_of_merge_requests @@ -303,6 +304,8 @@ award_emoji: - user priorities: - label +prometheus_metrics: +- project timelogs: - issue - merge_request @@ -321,3 +324,9 @@ metrics: - latest_closed_by - merged_by - pipeline +resource_label_events: +- user +- issue +- merge_request +- epic +- label diff --git a/spec/lib/gitlab/import_export/avatar_saver_spec.rb b/spec/lib/gitlab/import_export/avatar_saver_spec.rb index 90e6d653d34..2bd1b9924c6 100644 --- a/spec/lib/gitlab/import_export/avatar_saver_spec.rb +++ b/spec/lib/gitlab/import_export/avatar_saver_spec.rb @@ -8,8 +8,7 @@ describe Gitlab::ImportExport::AvatarSaver do before do FileUtils.mkdir_p("#{shared.export_path}/avatar/") - allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) - stub_feature_flags(import_export_object_storage: false) + allow_any_instance_of(Gitlab::ImportExport::Shared).to receive(:export_path).and_return(export_path) end after do @@ -19,7 +18,7 @@ describe Gitlab::ImportExport::AvatarSaver do it 'saves a project avatar' do described_class.new(project: project_with_avatar, shared: shared).save - expect(File).to exist("#{shared.export_path}/avatar/dk.png") + expect(File).to exist(Dir["#{shared.export_path}/avatar/**/dk.png"].first) end it 'is fine not to have an avatar' do diff --git a/spec/lib/gitlab/import_export/file_importer_object_storage_spec.rb b/spec/lib/gitlab/import_export/file_importer_object_storage_spec.rb deleted file mode 100644 index 287745eb40e..00000000000 --- a/spec/lib/gitlab/import_export/file_importer_object_storage_spec.rb +++ /dev/null @@ -1,89 +0,0 @@ -require 'spec_helper' - -describe Gitlab::ImportExport::FileImporter do - let(:shared) { Gitlab::ImportExport::Shared.new(nil) } - let(:storage_path) { "#{Dir.tmpdir}/file_importer_spec" } - let(:valid_file) { "#{shared.export_path}/valid.json" } - let(:symlink_file) { "#{shared.export_path}/invalid.json" } - let(:hidden_symlink_file) { "#{shared.export_path}/.hidden" } - let(:subfolder_symlink_file) { "#{shared.export_path}/subfolder/invalid.json" } - let(:evil_symlink_file) { "#{shared.export_path}/.\nevil" } - - before do - stub_const('Gitlab::ImportExport::FileImporter::MAX_RETRIES', 0) - stub_feature_flags(import_export_object_storage: true) - stub_uploads_object_storage(FileUploader) - - allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(storage_path) - allow_any_instance_of(Gitlab::ImportExport::CommandLineUtil).to receive(:untar_zxf).and_return(true) - allow_any_instance_of(Gitlab::ImportExport::Shared).to receive(:relative_archive_path).and_return('test') - allow(SecureRandom).to receive(:hex).and_return('abcd') - setup_files - end - - after do - FileUtils.rm_rf(storage_path) - end - - context 'normal run' do - before do - described_class.import(project: build(:project), archive_file: '', shared: shared) - end - - it 'removes symlinks in root folder' do - expect(File.exist?(symlink_file)).to be false - end - - it 'removes hidden symlinks in root folder' do - expect(File.exist?(hidden_symlink_file)).to be false - end - - it 'removes evil symlinks in root folder' do - expect(File.exist?(evil_symlink_file)).to be false - end - - it 'removes symlinks in subfolders' do - expect(File.exist?(subfolder_symlink_file)).to be false - end - - it 'does not remove a valid file' do - expect(File.exist?(valid_file)).to be true - end - - it 'creates the file in the right subfolder' do - expect(shared.export_path).to include('test/abcd') - end - end - - context 'error' do - before do - allow_any_instance_of(described_class).to receive(:wait_for_archived_file).and_raise(StandardError) - described_class.import(project: build(:project), archive_file: '', shared: shared) - end - - it 'removes symlinks in root folder' do - expect(File.exist?(symlink_file)).to be false - end - - it 'removes hidden symlinks in root folder' do - expect(File.exist?(hidden_symlink_file)).to be false - end - - it 'removes symlinks in subfolders' do - expect(File.exist?(subfolder_symlink_file)).to be false - end - - it 'does not remove a valid file' do - expect(File.exist?(valid_file)).to be true - end - end - - def setup_files - FileUtils.mkdir_p("#{shared.export_path}/subfolder/") - FileUtils.touch(valid_file) - FileUtils.ln_s(valid_file, symlink_file) - FileUtils.ln_s(valid_file, subfolder_symlink_file) - FileUtils.ln_s(valid_file, hidden_symlink_file) - FileUtils.ln_s(valid_file, evil_symlink_file) - end -end diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb index 78fccdf1dfc..bf34cefe18f 100644 --- a/spec/lib/gitlab/import_export/file_importer_spec.rb +++ b/spec/lib/gitlab/import_export/file_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Gitlab::ImportExport::FileImporter do let(:shared) { Gitlab::ImportExport::Shared.new(nil) } - let(:export_path) { "#{Dir.tmpdir}/file_importer_spec" } + let(:storage_path) { "#{Dir.tmpdir}/file_importer_spec" } let(:valid_file) { "#{shared.export_path}/valid.json" } let(:symlink_file) { "#{shared.export_path}/invalid.json" } let(:hidden_symlink_file) { "#{shared.export_path}/.hidden" } @@ -11,7 +11,9 @@ describe Gitlab::ImportExport::FileImporter do before do stub_const('Gitlab::ImportExport::FileImporter::MAX_RETRIES', 0) - allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + stub_uploads_object_storage(FileUploader) + + allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(storage_path) allow_any_instance_of(Gitlab::ImportExport::CommandLineUtil).to receive(:untar_zxf).and_return(true) allow_any_instance_of(Gitlab::ImportExport::Shared).to receive(:relative_archive_path).and_return('test') allow(SecureRandom).to receive(:hex).and_return('abcd') @@ -19,12 +21,12 @@ describe Gitlab::ImportExport::FileImporter do end after do - FileUtils.rm_rf(export_path) + FileUtils.rm_rf(storage_path) end context 'normal run' do before do - described_class.import(project: nil, archive_file: '', shared: shared) + described_class.import(project: build(:project), archive_file: '', shared: shared) end it 'removes symlinks in root folder' do @@ -55,7 +57,7 @@ describe Gitlab::ImportExport::FileImporter do context 'error' do before do allow_any_instance_of(described_class).to receive(:wait_for_archived_file).and_raise(StandardError) - described_class.import(project: nil, archive_file: '', shared: shared) + described_class.import(project: build(:project), archive_file: '', shared: shared) end it 'removes symlinks in root folder' do diff --git a/spec/lib/gitlab/import_export/importer_object_storage_spec.rb b/spec/lib/gitlab/import_export/importer_object_storage_spec.rb deleted file mode 100644 index 24a994b3611..00000000000 --- a/spec/lib/gitlab/import_export/importer_object_storage_spec.rb +++ /dev/null @@ -1,115 +0,0 @@ -require 'spec_helper' - -describe Gitlab::ImportExport::Importer do - let(:user) { create(:user) } - let(:test_path) { "#{Dir.tmpdir}/importer_spec" } - let(:shared) { project.import_export_shared } - let(:project) { create(:project) } - let(:import_file) { fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz') } - - subject(:importer) { described_class.new(project) } - - before do - allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(test_path) - allow_any_instance_of(Gitlab::ImportExport::FileImporter).to receive(:remove_import_file) - stub_feature_flags(import_export_object_storage: true) - stub_uploads_object_storage(FileUploader) - - FileUtils.mkdir_p(shared.export_path) - ImportExportUpload.create(project: project, import_file: import_file) - end - - after do - FileUtils.rm_rf(test_path) - end - - describe '#execute' do - it 'succeeds' do - importer.execute - - expect(shared.errors).to be_empty - end - - it 'extracts the archive' do - expect(Gitlab::ImportExport::FileImporter).to receive(:import).and_call_original - - importer.execute - end - - it 'checks the version' do - expect(Gitlab::ImportExport::VersionChecker).to receive(:check!).and_call_original - - importer.execute - end - - context 'all restores are executed' do - [ - Gitlab::ImportExport::AvatarRestorer, - Gitlab::ImportExport::RepoRestorer, - Gitlab::ImportExport::WikiRestorer, - Gitlab::ImportExport::UploadsRestorer, - Gitlab::ImportExport::LfsRestorer, - Gitlab::ImportExport::StatisticsRestorer - ].each do |restorer| - it "calls the #{restorer}" do - fake_restorer = double(restorer.to_s) - - expect(fake_restorer).to receive(:restore).and_return(true).at_least(1) - expect(restorer).to receive(:new).and_return(fake_restorer).at_least(1) - - importer.execute - end - end - - it 'restores the ProjectTree' do - expect(Gitlab::ImportExport::ProjectTreeRestorer).to receive(:new).and_call_original - - importer.execute - end - - it 'removes the import file' do - expect(importer).to receive(:remove_import_file).and_call_original - - importer.execute - - expect(project.import_export_upload.import_file&.file).to be_nil - end - end - - context 'when project successfully restored' do - let!(:existing_project) { create(:project, namespace: user.namespace) } - let(:project) { create(:project, namespace: user.namespace, name: 'whatever', path: 'whatever') } - - before do - restorers = double(:restorers, all?: true) - - allow(subject).to receive(:import_file).and_return(true) - allow(subject).to receive(:check_version!).and_return(true) - allow(subject).to receive(:restorers).and_return(restorers) - allow(project).to receive(:import_data).and_return(double(data: { 'original_path' => existing_project.path })) - end - - context 'when import_data' do - context 'has original_path' do - it 'overwrites existing project' do - expect_any_instance_of(::Projects::OverwriteProjectService).to receive(:execute).with(existing_project) - - subject.execute - end - end - - context 'has not original_path' do - before do - allow(project).to receive(:import_data).and_return(double(data: {})) - end - - it 'does not call the overwrite service' do - expect_any_instance_of(::Projects::OverwriteProjectService).not_to receive(:execute).with(existing_project) - - subject.execute - end - end - end - end - end -end diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb index 8053c48ad6c..11f98d782b1 100644 --- a/spec/lib/gitlab/import_export/importer_spec.rb +++ b/spec/lib/gitlab/import_export/importer_spec.rb @@ -4,16 +4,18 @@ describe Gitlab::ImportExport::Importer do let(:user) { create(:user) } let(:test_path) { "#{Dir.tmpdir}/importer_spec" } let(:shared) { project.import_export_shared } - let(:project) { create(:project, import_source: File.join(test_path, 'test_project_export.tar.gz')) } + let(:project) { create(:project) } + let(:import_file) { fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz') } subject(:importer) { described_class.new(project) } before do allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(test_path) allow_any_instance_of(Gitlab::ImportExport::FileImporter).to receive(:remove_import_file) + stub_uploads_object_storage(FileUploader) FileUtils.mkdir_p(shared.export_path) - FileUtils.cp(Rails.root.join('spec/features/projects/import_export/test_project_export.tar.gz'), test_path) + ImportExportUpload.create(project: project, import_file: import_file) end after do @@ -64,6 +66,14 @@ describe Gitlab::ImportExport::Importer do importer.execute end + it 'removes the import file' do + expect(importer).to receive(:remove_import_file).and_call_original + + importer.execute + + expect(project.import_export_upload.import_file&.file).to be_nil + end + it 'sets the correct visibility_level when visibility level is a string' do project.create_or_update_import_data( data: { override_params: { visibility_level: Gitlab::VisibilityLevel::PRIVATE.to_s } } @@ -85,7 +95,6 @@ describe Gitlab::ImportExport::Importer do allow(subject).to receive(:import_file).and_return(true) allow(subject).to receive(:check_version!).and_return(true) allow(subject).to receive(:restorers).and_return(restorers) - allow(restorers).to receive(:all?).and_return(true) allow(project).to receive(:import_data).and_return(double(data: { 'original_path' => existing_project.path })) end diff --git a/spec/lib/gitlab/import_export/model_configuration_spec.rb b/spec/lib/gitlab/import_export/model_configuration_spec.rb index 5cb8f2589c8..2e28f978c3a 100644 --- a/spec/lib/gitlab/import_export/model_configuration_spec.rb +++ b/spec/lib/gitlab/import_export/model_configuration_spec.rb @@ -16,14 +16,30 @@ describe 'Import/Export model configuration' do # - User, Author... Models we do not care about for checking models names.flatten.uniq - %w(milestones labels user author) + ['project'] end + let(:ce_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' } + let(:ce_models_hash) { YAML.load_file(ce_models_yml) } + + let(:ee_models_yml) { 'ee/spec/lib/gitlab/import_export/all_models.yml' } + let(:ee_models_hash) { File.exist?(ee_models_yml) ? YAML.load_file(ee_models_yml) : {} } - let(:all_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' } - let(:all_models) { YAML.load_file(all_models_yml) } let(:current_models) { setup_models } + let(:all_models_hash) do + all_models_hash = ce_models_hash.dup + + all_models_hash.each do |model, associations| + associations.concat(ee_models_hash[model] || []) + end + + ee_models_hash.each do |model, associations| + all_models_hash[model] ||= associations + end + + all_models_hash + end it 'has no new models' do model_names.each do |model_name| - new_models = Array(current_models[model_name]) - Array(all_models[model_name]) + new_models = Array(current_models[model_name]) - Array(all_models_hash[model_name]) expect(new_models).to be_empty, failure_message(model_name.classify, new_models) end end @@ -31,27 +47,21 @@ describe 'Import/Export model configuration' do # List of current models between models, in the format of # {model: [model_2, model3], ...} def setup_models - all_models_hash = {} - - model_names.each do |model_name| - model_class = relation_class_for_name(model_name) - - all_models_hash[model_name] = associations_for(model_class) - ['project'] + model_names.each_with_object({}) do |model_name, hash| + hash[model_name] = associations_for(relation_class_for_name(model_name)) - ['project'] end - - all_models_hash end def failure_message(parent_model_name, new_models) - <<-MSG + <<~MSG New model(s) <#{new_models.join(',')}> have been added, related to #{parent_model_name}, which is exported by the Import/Export feature. - If you think this model should be included in the export, please add it to IMPORT_EXPORT_CONFIG. - Definitely add it to MODELS_JSON to signal that you've handled this error and to prevent it from showing up in the future. + If you think this model should be included in the export, please add it to `#{Gitlab::ImportExport.config_file}`. - MODELS_JSON: #{File.expand_path(all_models_yml)} - IMPORT_EXPORT_CONFIG: #{Gitlab::ImportExport.config_file} + Definitely add it to `#{File.expand_path(ce_models_yml)}` + #{"or `#{File.expand_path(ee_models_yml)}` if the model/associations are EE-specific\n" if ee_models_hash.any?} + to signal that you've handled this error and to prevent it from showing up in the future. MSG end end diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json index 1b7fa11cb3c..eefd00e7383 100644 --- a/spec/lib/gitlab/import_export/project.json +++ b/spec/lib/gitlab/import_export/project.json @@ -331,6 +331,28 @@ }, "events": [] } + ], + "resource_label_events": [ + { + "id":244, + "action":"remove", + "issue_id":40, + "merge_request_id":null, + "label_id":2, + "user_id":1, + "created_at":"2018-08-28T08:24:00.494Z", + "label": { + "id": 2, + "title": "test2", + "color": "#428bca", + "project_id": 8, + "created_at": "2016-07-22T08:55:44.161Z", + "updated_at": "2016-07-22T08:55:44.161Z", + "template": false, + "description": "", + "type": "ProjectLabel" + } + } ] }, { @@ -2515,6 +2537,17 @@ "events": [] } ], + "resource_label_events": [ + { + "id":243, + "action":"add", + "issue_id":null, + "merge_request_id":27, + "label_id":null, + "user_id":1, + "created_at":"2018-08-28T08:24:00.494Z" + } + ], "merge_request_diff": { "id": 27, "state": "collected", diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb index a88ac0a091e..3ff6be595a8 100644 --- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb @@ -89,6 +89,14 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do expect(ProtectedTag.first.create_access_levels).not_to be_empty end + it 'restores issue resource label events' do + expect(Issue.find_by(title: 'Voluptatem').resource_label_events).not_to be_empty + end + + it 'restores merge requests resource label events' do + expect(MergeRequest.find_by(title: 'MR1').resource_label_events).not_to be_empty + end + context 'event at forth level of the tree' do let(:event) { Event.where(action: 6).first } diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb index fec8a2af9ab..5dc372263ad 100644 --- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb @@ -169,6 +169,14 @@ describe Gitlab::ImportExport::ProjectTreeSaver do expect(priorities.flatten).not_to be_empty end + it 'has issue resource label events' do + expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty + end + + it 'has merge request resource label events' do + expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty + end + it 'saves the correct service type' do expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService') end @@ -291,6 +299,9 @@ describe Gitlab::ImportExport::ProjectTreeSaver do project: project, commit_id: ci_build.pipeline.sha) + create(:resource_label_event, label: project_label, issue: issue) + create(:resource_label_event, label: group_label, merge_request: merge_request) + create(:event, :created, target: milestone, project: project, author: user) create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' }) diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 579f175c4a8..e9f1be172b0 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -555,6 +555,19 @@ ProjectCustomAttribute: - project_id - key - value +PrometheusMetric: +- id +- created_at +- updated_at +- project_id +- y_label +- unit +- legend +- title +- query +- group +- common +- identifier Badge: - id - link_url @@ -566,3 +579,11 @@ Badge: - type ProjectCiCdSetting: - group_runners_enabled +ResourceLabelEvent: +- id +- action +- issue_id +- merge_request_id +- label_id +- user_id +- created_at diff --git a/spec/lib/gitlab/import_export/saver_spec.rb b/spec/lib/gitlab/import_export/saver_spec.rb index 02f1a4b81aa..d185ff2dfcc 100644 --- a/spec/lib/gitlab/import_export/saver_spec.rb +++ b/spec/lib/gitlab/import_export/saver_spec.rb @@ -18,26 +18,12 @@ describe Gitlab::ImportExport::Saver do FileUtils.rm_rf(export_path) end - context 'local archive' do - it 'saves the repo to disk' do - stub_feature_flags(import_export_object_storage: false) + it 'saves the repo using object storage' do + stub_uploads_object_storage(ImportExportUploader) - subject.save + subject.save - expect(shared.errors).to be_empty - expect(Dir.empty?(shared.archive_path)).to be false - end - end - - context 'object storage' do - it 'saves the repo using object storage' do - stub_feature_flags(import_export_object_storage: true) - stub_uploads_object_storage(ImportExportUploader) - - subject.save - - expect(ImportExportUpload.find_by(project: project).export_file.url) - .to match(%r[\/uploads\/-\/system\/import_export_upload\/export_file.*]) - end + expect(ImportExportUpload.find_by(project: project).export_file.url) + .to match(%r[\/uploads\/-\/system\/import_export_upload\/export_file.*]) end end diff --git a/spec/lib/gitlab/import_export/uploads_manager_spec.rb b/spec/lib/gitlab/import_export/uploads_manager_spec.rb index f799de18cd0..792117e1df1 100644 --- a/spec/lib/gitlab/import_export/uploads_manager_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_manager_spec.rb @@ -4,6 +4,7 @@ describe Gitlab::ImportExport::UploadsManager do let(:shared) { project.import_export_shared } let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } let(:project) { create(:project) } + let(:upload) { create(:upload, :issuable_upload, :object_storage, model: project) } let(:exported_file_path) { "#{shared.export_path}/uploads/#{upload.secret}/#{File.basename(upload.path)}" } subject(:manager) { described_class.new(project: project, shared: shared) } @@ -69,44 +70,20 @@ describe Gitlab::ImportExport::UploadsManager do end end end + end - context 'using object storage' do - let!(:upload) { create(:upload, :issuable_upload, :object_storage, model: project) } - - before do - stub_feature_flags(import_export_object_storage: true) - stub_uploads_object_storage(FileUploader) - end - - it 'saves the file' do - fake_uri = double - - expect(fake_uri).to receive(:open).and_return(StringIO.new('File content')) - expect(URI).to receive(:parse).and_return(fake_uri) - - manager.save + describe '#restore' do + before do + stub_uploads_object_storage(FileUploader) - expect(File.read(exported_file_path)).to eq('File content') - end + FileUtils.mkdir_p(File.join(shared.export_path, 'uploads/72a497a02fe3ee09edae2ed06d390038')) + FileUtils.touch(File.join(shared.export_path, 'uploads/72a497a02fe3ee09edae2ed06d390038', "dummy.txt")) end - describe '#restore' do - context 'using object storage' do - before do - stub_feature_flags(import_export_object_storage: true) - stub_uploads_object_storage(FileUploader) - - FileUtils.mkdir_p(File.join(shared.export_path, 'uploads/72a497a02fe3ee09edae2ed06d390038')) - FileUtils.touch(File.join(shared.export_path, 'uploads/72a497a02fe3ee09edae2ed06d390038', "dummy.txt")) - end + it 'restores the file' do + manager.restore - it 'restores the file' do - manager.restore - - expect(project.uploads.size).to eq(1) - expect(project.uploads.first.build_uploader.filename).to eq('dummy.txt') - end - end + expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt') end end end diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb index acef97459b8..6072f18b8c7 100644 --- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb @@ -8,7 +8,7 @@ describe Gitlab::ImportExport::UploadsRestorer do before do allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) FileUtils.mkdir_p(File.join(shared.export_path, 'uploads/random')) - FileUtils.touch(File.join(shared.export_path, 'uploads/random', "dummy.txt")) + FileUtils.touch(File.join(shared.export_path, 'uploads/random', 'dummy.txt')) end after do @@ -27,9 +27,7 @@ describe Gitlab::ImportExport::UploadsRestorer do it 'copies the uploads to the project path' do subject.restore - uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) } - - expect(uploads).to include('dummy.txt') + expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt') end end @@ -45,9 +43,7 @@ describe Gitlab::ImportExport::UploadsRestorer do it 'copies the uploads to the project path' do subject.restore - uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) } - - expect(uploads).to include('dummy.txt') + expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt') end end end diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb index c716edd9397..24993460e51 100644 --- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb @@ -7,7 +7,6 @@ describe Gitlab::ImportExport::UploadsSaver do let(:shared) { project.import_export_shared } before do - stub_feature_flags(import_export_object_storage: false) allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) end diff --git a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb index 5589db92b1d..1a108003bc2 100644 --- a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb +++ b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb @@ -6,7 +6,7 @@ describe Gitlab::Prometheus::AdditionalMetricsParser do let(:parser_error_class) { Gitlab::Prometheus::ParsingError } describe '#load_groups_from_yaml' do - subject { described_class.load_groups_from_yaml } + subject { described_class.load_groups_from_yaml('dummy.yaml') } describe 'parsing sample yaml' do let(:sample_yaml) do diff --git a/spec/lib/gitlab/prometheus/metric_group_spec.rb b/spec/lib/gitlab/prometheus/metric_group_spec.rb new file mode 100644 index 00000000000..e7d16e73663 --- /dev/null +++ b/spec/lib/gitlab/prometheus/metric_group_spec.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'rails_helper' + +describe Gitlab::Prometheus::MetricGroup do + describe '.common_metrics' do + let!(:project_metric) { create(:prometheus_metric) } + let!(:common_metric_group_a) { create(:prometheus_metric, :common, group: :aws_elb) } + let!(:common_metric_group_b_q1) { create(:prometheus_metric, :common, group: :kubernetes) } + let!(:common_metric_group_b_q2) { create(:prometheus_metric, :common, group: :kubernetes) } + + subject { described_class.common_metrics } + + it 'returns exactly two groups' do + expect(subject.map(&:name)).to contain_exactly( + 'Response metrics (AWS ELB)', 'System metrics (Kubernetes)') + end + + it 'returns exactly three metric queries' do + expect(subject.map(&:metrics).flatten.map(&:id)).to contain_exactly( + common_metric_group_a.id, common_metric_group_b_q1.id, + common_metric_group_b_q2.id) + end + end + + describe '.for_project' do + let!(:other_project) { create(:project) } + let!(:project_metric) { create(:prometheus_metric) } + let!(:common_metric) { create(:prometheus_metric, :common, group: :aws_elb) } + + subject do + described_class.for_project(other_project) + .map(&:metrics).flatten + .map(&:id) + end + + it 'returns exactly one common metric' do + is_expected.to contain_exactly(common_metric.id) + end + end +end diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb new file mode 100644 index 00000000000..7ffcef2baef --- /dev/null +++ b/spec/lib/gitlab/tree_summary_spec.rb @@ -0,0 +1,202 @@ +require 'spec_helper' + +describe Gitlab::TreeSummary do + using RSpec::Parameterized::TableSyntax + + let(:project) { create(:project, :empty_repo) } + let(:repo) { project.repository } + let(:commit) { repo.head_commit } + + let(:path) { nil } + let(:offset) { nil } + let(:limit) { nil } + + subject(:summary) { described_class.new(commit, project, path: path, offset: offset, limit: limit) } + + describe '#initialize' do + it 'defaults offset to 0' do + expect(summary.offset).to eq(0) + end + + it 'defaults limit to 25' do + expect(summary.limit).to eq(25) + end + end + + describe '#summarize' do + let(:project) { create(:project, :custom_repo, files: { 'a.txt' => '' }) } + + subject(:summarized) { summary.summarize } + + it 'returns an array of entries, and an array of commits' do + expect(summarized).to be_a(Array) + expect(summarized.size).to eq(2) + + entries, commits = *summarized + aggregate_failures do + expect(entries).to contain_exactly( + a_hash_including(file_name: 'a.txt', commit: have_attributes(id: commit.id)) + ) + + expect(commits).to match_array(entries.map { |entry| entry[:commit] }) + end + end + end + + describe '#summarize (entries)' do + let(:limit) { 2 } + + custom_files = { + 'a.txt' => '', + 'b.txt' => '', + 'directory/c.txt' => '' + } + + let(:project) { create(:project, :custom_repo, files: custom_files) } + let(:commit) { repo.head_commit } + + subject(:entries) { summary.summarize.first } + + it 'summarizes the entries within the window' do + is_expected.to contain_exactly( + a_hash_including(type: :tree, file_name: 'directory'), + a_hash_including(type: :blob, file_name: 'a.txt') + # b.txt is excluded by the limit + ) + end + + it 'references the commit and commit path in entries' do + entry = entries.first + expected_commit_path = Gitlab::Routing.url_helpers.project_commit_path(project, commit) + + expect(entry[:commit]).to be_a(::Commit) + expect(entry[:commit_path]).to eq expected_commit_path + end + + context 'in a good subdirectory' do + let(:path) { 'directory' } + + it 'summarizes the entries in the subdirectory' do + is_expected.to contain_exactly(a_hash_including(type: :blob, file_name: 'c.txt')) + end + end + + context 'in a non-existent subdirectory' do + let(:path) { 'tmp' } + + it { is_expected.to be_empty } + end + + context 'custom offset and limit' do + let(:offset) { 2 } + + it 'returns entries from the offset' do + is_expected.to contain_exactly(a_hash_including(type: :blob, file_name: 'b.txt')) + end + end + end + + describe '#summarize (commits)' do + # This is a commit in the master branch of the gitlab-test repository that + # satisfies certain assumptions these tests depend on + let(:test_commit_sha) { '7975be0116940bf2ad4321f79d02a55c5f7779aa' } + let(:whitespace_commit_sha) { '66eceea0db202bb39c4e445e8ca28689645366c5' } + + let(:project) { create(:project, :repository) } + let(:commit) { repo.commit(test_commit_sha) } + let(:limit) { nil } + let(:entries) { summary.summarize.first } + + subject(:commits) do + summary.summarize.last + end + + it 'returns an Array of ::Commit objects' do + is_expected.not_to be_empty + is_expected.to all(be_kind_of(::Commit)) + end + + it 'deduplicates commits when multiple entries reference the same commit' do + expect(commits.size).to be < entries.size + end + + context 'in a subdirectory' do + let(:path) { 'files' } + + it 'returns commits for entries in the subdirectory' do + expect(commits).to satisfy_one { |c| c.id == whitespace_commit_sha } + end + end + end + + describe '#more?' do + let(:path) { 'tmp/more' } + + where(:num_entries, :offset, :limit, :expected_result) do + 0 | 0 | 0 | false + 0 | 0 | 1 | false + + 1 | 0 | 0 | true + 1 | 0 | 1 | false + 1 | 1 | 0 | false + 1 | 1 | 1 | false + + 2 | 0 | 0 | true + 2 | 0 | 1 | true + 2 | 0 | 2 | false + 2 | 0 | 3 | false + 2 | 1 | 0 | true + 2 | 1 | 1 | false + 2 | 2 | 0 | false + 2 | 2 | 1 | false + end + + with_them do + before do + create_file('dummy', path: 'other') if num_entries.zero? + 1.upto(num_entries) { |n| create_file(n, path: path) } + end + + subject { summary.more? } + + it { is_expected.to eq(expected_result) } + end + end + + describe '#next_offset' do + let(:path) { 'tmp/next_offset' } + + where(:num_entries, :offset, :limit, :expected_result) do + 0 | 0 | 0 | 0 + 0 | 0 | 1 | 1 + 0 | 1 | 0 | 1 + 0 | 1 | 1 | 1 + + 1 | 0 | 0 | 0 + 1 | 0 | 1 | 1 + 1 | 1 | 0 | 1 + 1 | 1 | 1 | 2 + end + + with_them do + before do + create_file('dummy', path: 'other') if num_entries.zero? + 1.upto(num_entries) { |n| create_file(n, path: path) } + end + + subject { summary.next_offset } + + it { is_expected.to eq(expected_result) } + end + end + + def create_file(unique, path:) + repo.create_file( + project.creator, + "#{path}/file-#{unique}.txt", + 'content', + message: "Commit message #{unique}", + branch_name: 'master' + ) + end +end diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 23869f3d2da..b3f55a2e1bd 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -336,6 +336,22 @@ describe Gitlab::Workhorse do it { expect { subject }.to raise_exception('Unsupported action: download') } end end + + context 'when receive_max_input_size has been updated' do + it 'returns custom git config' do + allow(Gitlab::CurrentSettings).to receive(:receive_max_input_size) { 1 } + + expect(subject[:GitConfigOptions]).to be_present + end + end + + context 'when receive_max_input_size is empty' do + it 'returns an empty git config' do + allow(Gitlab::CurrentSettings).to receive(:receive_max_input_size) { nil } + + expect(subject[:GitConfigOptions]).to be_empty + end + end end describe '.set_key_and_notify' do diff --git a/spec/mailers/emails/auto_devops_spec.rb b/spec/mailers/emails/auto_devops_spec.rb new file mode 100644 index 00000000000..839caf3f50e --- /dev/null +++ b/spec/mailers/emails/auto_devops_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Emails::AutoDevops do + include EmailSpec::Matchers + + describe '#auto_devops_disabled_email' do + let(:owner) { create(:user) } + let(:namespace) { create(:namespace, owner: owner) } + let(:project) { create(:project, :repository, :auto_devops) } + let(:pipeline) { create(:ci_pipeline, :failed, project: project) } + + subject { Notify.autodevops_disabled_email(pipeline, owner.email) } + + it 'sents email with correct subject' do + is_expected.to have_subject("#{project.name} | Auto DevOps pipeline was disabled for #{project.name}") + end + + it 'sents an email to the user' do + recipient = subject.header[:to].addrs.map(&:address).first + + expect(recipient).to eq(owner.email) + end + + it 'is sent as GitLab email' do + sender = subject.header[:from].addrs[0].address + + expect(sender).to match(/gitlab/) + end + end +end diff --git a/spec/migrations/import_common_metrics_spec.rb b/spec/migrations/import_common_metrics_spec.rb new file mode 100644 index 00000000000..1001629007c --- /dev/null +++ b/spec/migrations/import_common_metrics_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require 'spec_helper' +require Rails.root.join('db', 'migrate', '20180831164910_import_common_metrics.rb') + +describe ImportCommonMetrics, :migration do + describe '#up' do + it "imports all prometheus metrics" do + expect(PrometheusMetric.common).to be_empty + + migrate! + + expect(PrometheusMetric.common).not_to be_empty + end + end +end diff --git a/spec/migrations/remove_orphaned_label_links_spec.rb b/spec/migrations/remove_orphaned_label_links_spec.rb new file mode 100644 index 00000000000..13b8919343e --- /dev/null +++ b/spec/migrations/remove_orphaned_label_links_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180906051323_remove_orphaned_label_links.rb') + +describe RemoveOrphanedLabelLinks, :migration do + let(:label_links) { table(:label_links) } + let(:labels) { table(:labels) } + + let(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs + let(:label) { create_label } + + context 'add foreign key on label_id' do + let!(:label_link_with_label) { create_label_link(label_id: label.id) } + let!(:label_link_without_label) { create_label_link(label_id: nil) } + + it 'removes orphaned labels without corresponding label' do + expect { migrate! }.to change { LabelLink.count }.from(2).to(1) + end + + it 'does not remove entries with valid label_id' do + expect { migrate! }.not_to change { label_link_with_label.reload } + end + end + + def create_label(**opts) + labels.create!( + project_id: project.id, + **opts + ) + end + + def create_label_link(**opts) + label_links.create!( + target_id: 1, + target_type: 'Issue', + **opts + ) + end +end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 77b7332a761..14ccc2960bb 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -1941,4 +1941,28 @@ describe Ci::Pipeline, :mailer do expect(pipeline.total_size).to eq(5) end end + + describe '#status' do + context 'when transitioning to failed' do + context 'when pipeline has autodevops as source' do + let(:pipeline) { create(:ci_pipeline, :running, :auto_devops_source) } + + it 'calls autodevops disable service' do + expect(AutoDevops::DisableWorker).to receive(:perform_async).with(pipeline.id) + + pipeline.drop + end + end + + context 'when pipeline has other source' do + let(:pipeline) { create(:ci_pipeline, :running, :repository_source) } + + it 'does not call auto devops disable service' do + expect(AutoDevops::DisableWorker).not_to receive(:perform_async) + + pipeline.drop + end + end + end + end end diff --git a/spec/models/label_note_spec.rb b/spec/models/label_note_spec.rb new file mode 100644 index 00000000000..f69874d94aa --- /dev/null +++ b/spec/models/label_note_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe LabelNote do + set(:project) { create(:project, :repository) } + set(:user) { create(:user) } + set(:label) { create(:label, project: project) } + set(:label2) { create(:label, project: project) } + let(:resource_parent) { project } + + context 'when resource is issue' do + set(:resource) { create(:issue, project: project) } + + it_behaves_like 'label note created from events' + end + + context 'when resource is merge request' do + set(:resource) { create(:merge_request, source_project: project, target_project: project) } + + it_behaves_like 'label note created from events' + end +end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 9b7f932ec3a..3649990670b 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -394,12 +394,6 @@ describe Namespace do child.destroy end end - - it 'removes the exports folder' do - expect(namespace).to receive(:remove_exports!) - - namespace.destroy - end end context 'hashed storage' do @@ -414,12 +408,6 @@ describe Namespace do expect(File.exist?(deleted_path_in_dir)).to be(false) end - - it 'removes the exports folder' do - expect(namespace).to receive(:remove_exports!) - - namespace.destroy - end end end @@ -706,26 +694,6 @@ describe Namespace do end end - describe '#remove_exports' do - let(:legacy_project) { create(:project, :with_export, :legacy_storage, namespace: namespace) } - let(:hashed_project) { create(:project, :with_export, namespace: namespace) } - let(:export_path) { Dir.mktmpdir('namespace_remove_exports_spec') } - let(:legacy_export) { legacy_project.export_project_path } - let(:hashed_export) { hashed_project.export_project_path } - - it 'removes exports for legacy and hashed projects' do - allow(Gitlab::ImportExport).to receive(:storage_path) { export_path } - - expect(File.exist?(legacy_export)).to be_truthy - expect(File.exist?(hashed_export)).to be_truthy - - namespace.remove_exports! - - expect(File.exist?(legacy_export)).to be_falsy - expect(File.exist?(hashed_export)).to be_falsy - end - end - describe '#full_path_was' do context 'when the group has no parent' do it 'should return the path was' do diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb index 54f1a0e38a5..788b3179b01 100644 --- a/spec/models/project_services/jira_service_spec.rb +++ b/spec/models/project_services/jira_service_spec.rb @@ -231,12 +231,12 @@ describe JiraService do end it 'logs exception when transition id is not valid' do - allow(Rails.logger).to receive(:info) - WebMock.stub_request(:post, @transitions_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)).and_raise('Bad Request') + allow(@jira_service).to receive(:log_error) + WebMock.stub_request(:post, @transitions_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)).and_raise("Bad Request") @jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project)) - expect(Rails.logger).to have_received(:info).with('JiraService Issue Transition failed message ERROR: http://jira.example.com - Bad Request') + expect(@jira_service).to have_received(:log_error).with("Issue transition failed", error: "Bad Request", client_url: "http://jira.example.com") end it 'calls the api with jira_issue_transition_id' do diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 264632dba4b..dfe2de71a76 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -2854,73 +2854,12 @@ describe Project do end describe '#remove_export' do - let(:legacy_project) { create(:project, :legacy_storage, :with_export) } let(:project) { create(:project, :with_export) } - before do - stub_feature_flags(import_export_object_storage: false) - end - - it 'removes the exports directory for the project' do - expect(File.exist?(project.export_path)).to be_truthy - - allow(FileUtils).to receive(:rm_rf).and_call_original - expect(FileUtils).to receive(:rm_rf).with(project.export_path).and_call_original - project.remove_exports - - expect(File.exist?(project.export_path)).to be_falsy - end - - it 'is a no-op on legacy projects when there is no namespace' do - export_path = legacy_project.export_path - - legacy_project.namespace.delete - legacy_project.reload - - expect(FileUtils).not_to receive(:rm_rf).with(export_path) - - legacy_project.remove_exports - - expect(File.exist?(export_path)).to be_truthy - end - - it 'runs on hashed storage projects when there is no namespace' do - export_path = project.export_path - - project.namespace.delete - legacy_project.reload - - allow(FileUtils).to receive(:rm_rf).and_call_original - expect(FileUtils).to receive(:rm_rf).with(export_path).and_call_original - + it 'removes the export' do project.remove_exports - expect(File.exist?(export_path)).to be_falsy - end - - it 'is run when the project is destroyed' do - expect(project).to receive(:remove_exports).and_call_original - - project.destroy - end - end - - describe '#remove_exported_project_file' do - let(:project) { create(:project, :with_export) } - - it 'removes the exported project file' do - stub_feature_flags(import_export_object_storage: false) - - exported_file = project.export_project_path - - expect(File.exist?(exported_file)).to be_truthy - - allow(FileUtils).to receive(:rm_rf).and_call_original - expect(FileUtils).to receive(:rm_rf).with(exported_file).and_call_original - - project.remove_exported_project_file - - expect(File.exist?(exported_file)).to be_falsy + expect(project.export_file_exists?).to be_falsey end end @@ -4044,6 +3983,40 @@ describe Project do end end + describe '#update_root_ref' do + let(:project) { create(:project, :repository) } + + it 'updates the default branch when HEAD has changed' do + stub_find_remote_root_ref(project, ref: 'feature') + + expect { project.update_root_ref('origin') } + .to change { project.default_branch } + .from('master') + .to('feature') + end + + it 'does not update the default branch when HEAD does not change' do + stub_find_remote_root_ref(project, ref: 'master') + + expect { project.update_root_ref('origin') } + .not_to change { project.default_branch } + end + + it 'does not update the default branch when HEAD does not exist' do + stub_find_remote_root_ref(project, ref: 'foo') + + expect { project.update_root_ref('origin') } + .not_to change { project.default_branch } + end + + def stub_find_remote_root_ref(project, ref:) + allow(project.repository) + .to receive(:find_remote_root_ref) + .with('origin') + .and_return(ref) + end + end + def rugged_config Gitlab::GitalyClient::StorageSettings.allow_disk_access do project.repository.rugged.config diff --git a/spec/models/prometheus_metric_spec.rb b/spec/models/prometheus_metric_spec.rb new file mode 100644 index 00000000000..a83a31ae88c --- /dev/null +++ b/spec/models/prometheus_metric_spec.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe PrometheusMetric do + subject { build(:prometheus_metric) } + let(:other_project) { build(:project) } + + it { is_expected.to belong_to(:project) } + it { is_expected.to validate_presence_of(:title) } + it { is_expected.to validate_presence_of(:query) } + it { is_expected.to validate_presence_of(:group) } + + describe 'common metrics' do + using RSpec::Parameterized::TableSyntax + + where(:common, :project, :result) do + false | other_project | true + false | nil | false + true | other_project | false + true | nil | true + end + + with_them do + before do + subject.common = common + subject.project = project + end + + it { expect(subject.valid?).to eq(result) } + end + end + + describe '#query_series' do + using RSpec::Parameterized::TableSyntax + + where(:legend, :type) do + 'Some other legend' | NilClass + 'Status Code' | Array + end + + with_them do + before do + subject.legend = legend + end + + it { expect(subject.query_series).to be_a(type) } + end + end + + describe '#group_title' do + shared_examples 'group_title' do |group, title| + subject { build(:prometheus_metric, group: group).group_title } + + it "returns text #{title} for group #{group}" do + expect(subject).to eq(title) + end + end + + it_behaves_like 'group_title', :business, 'Business metrics (Custom)' + it_behaves_like 'group_title', :response, 'Response metrics (Custom)' + it_behaves_like 'group_title', :system, 'System metrics (Custom)' + end + + describe '#to_query_metric' do + it 'converts to queryable metric object' do + expect(subject.to_query_metric).to be_instance_of(Gitlab::Prometheus::Metric) + end + + it 'queryable metric object has title' do + expect(subject.to_query_metric.title).to eq(subject.title) + end + + it 'queryable metric object has y_label' do + expect(subject.to_query_metric.y_label).to eq(subject.y_label) + end + + it 'queryable metric has no required_metric' do + expect(subject.to_query_metric.required_metrics).to eq([]) + end + + it 'queryable metric has weight 0' do + expect(subject.to_query_metric.weight).to eq(0) + end + + it 'queryable metrics has query description' do + queries = [ + { + query_range: subject.query, + unit: subject.unit, + label: subject.legend + } + ] + + expect(subject.to_query_metric.queries).to eq(queries) + end + end +end diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb index 4756caa1b97..da6e1b5610d 100644 --- a/spec/models/resource_label_event_spec.rb +++ b/spec/models/resource_label_event_spec.rb @@ -3,7 +3,7 @@ require 'rails_helper' RSpec.describe ResourceLabelEvent, type: :model do - subject { build(:resource_label_event) } + subject { build(:resource_label_event, issue: issue) } let(:issue) { create(:issue) } let(:merge_request) { create(:merge_request) } @@ -16,8 +16,6 @@ RSpec.describe ResourceLabelEvent, type: :model do describe 'validations' do it { is_expected.to be_valid } - it { is_expected.to validate_presence_of(:label) } - it { is_expected.to validate_presence_of(:user) } describe 'Issuable validation' do it 'is invalid if issue_id and merge_request_id are missing' do @@ -45,4 +43,52 @@ RSpec.describe ResourceLabelEvent, type: :model do end end end + + describe '#expire_etag_cache' do + def expect_expiration(issue) + expect_any_instance_of(Gitlab::EtagCaching::Store) + .to receive(:touch) + .with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes") + end + + it 'expires resource note etag cache on event save' do + expect_expiration(subject.issuable) + + subject.save! + end + + it 'expires resource note etag cache on event destroy' do + subject.save! + + expect_expiration(subject.issuable) + + subject.destroy! + end + end + + describe '#outdated_markdown?' do + it 'returns true if label is missing and reference is not empty' do + subject.attributes = { reference: 'ref', label_id: nil } + + expect(subject.outdated_markdown?).to be true + end + + it 'returns true if reference is not set yet' do + subject.attributes = { reference: nil } + + expect(subject.outdated_markdown?).to be true + end + + it 'returns true markdown is outdated' do + subject.attributes = { cached_markdown_version: 0 } + + expect(subject.outdated_markdown?).to be true + end + + it 'returns false if label and reference are set' do + subject.attributes = { reference: 'whatever', cached_markdown_version: CacheMarkdownField::CACHE_COMMONMARK_VERSION } + + expect(subject.outdated_markdown?).to be false + end + end end diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb index 029ad7f3e9f..25eecb3f909 100644 --- a/spec/models/service_spec.rb +++ b/spec/models/service_spec.rb @@ -345,4 +345,31 @@ describe Service do expect(service.api_field_names).to eq(['safe_field']) end end + + context 'logging' do + let(:project) { create(:project) } + let(:service) { create(:service, project: project) } + let(:test_message) { "test message" } + let(:arguments) do + { + service_class: service.class.name, + project_path: project.full_path, + project_id: project.id, + message: test_message, + additional_argument: 'some argument' + } + end + + it 'logs info messages using json logger' do + expect(Gitlab::JsonLogger).to receive(:info).with(arguments) + + service.log_info(test_message, additional_argument: 'some argument') + end + + it 'logs error messages using json logger' do + expect(Gitlab::JsonLogger).to receive(:error).with(arguments) + + service.log_error(test_message, additional_argument: 'some argument') + end + end end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 2a7aff39240..bee4a3d24a7 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -2957,6 +2957,48 @@ describe User do end end + describe '#requires_usage_stats_consent?' do + let(:user) { create(:user, created_at: 8.days.ago) } + + before do + allow(user).to receive(:has_current_license?).and_return false + end + + context 'in single-user environment' do + it 'requires user consent after one week' do + create(:user, ghost: true) + + expect(user.requires_usage_stats_consent?).to be true + end + + it 'requires user consent after one week if there is another ghost user' do + expect(user.requires_usage_stats_consent?).to be true + end + + it 'does not require consent in the first week' do + user.created_at = 6.days.ago + + expect(user.requires_usage_stats_consent?).to be false + end + + it 'does not require consent if usage stats were set by this user' do + allow(Gitlab::CurrentSettings).to receive(:usage_stats_set_by_user_id).and_return(user.id) + + expect(user.requires_usage_stats_consent?).to be false + end + end + + context 'in multi-user environment' do + before do + create(:user) + end + + it 'does not require consent' do + expect(user.requires_usage_stats_consent?).to be false + end + end + end + context 'with uploads' do it_behaves_like 'model with mounted uploader', false do let(:model_object) { create(:user, :with_avatar) } diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index 6890f46c724..e0b5b34f9c4 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -369,6 +369,26 @@ describe API::Internal do expect(user.reload.last_activity_on).to be_nil end end + + context 'when receive_max_input_size has been updated' do + it 'returns custom git config' do + allow(Gitlab::CurrentSettings).to receive(:receive_max_input_size) { 1 } + + push(key, project) + + expect(json_response["git_config_options"]).to be_present + end + end + + context 'when receive_max_input_size is empty' do + it 'returns an empty git config' do + allow(Gitlab::CurrentSettings).to receive(:receive_max_input_size) { nil } + + push(key, project) + + expect(json_response["git_config_options"]).to be_empty + end + end end end diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb index 45e4e35d773..0586025956f 100644 --- a/spec/requests/api/project_export_spec.rb +++ b/spec/requests/api/project_export_spec.rb @@ -4,8 +4,8 @@ describe API::ProjectExport do set(:project) { create(:project) } set(:project_none) { create(:project) } set(:project_started) { create(:project) } - set(:project_finished) { create(:project) } - set(:project_after_export) { create(:project) } + let(:project_finished) { create(:project, :with_export) } + let(:project_after_export) { create(:project, :with_export) } set(:user) { create(:user) } set(:admin) { create(:admin) } @@ -29,13 +29,7 @@ describe API::ProjectExport do # simulate exporting work directory FileUtils.mkdir_p File.join(project_started.export_path, 'securerandom-hex') - # simulate exported - FileUtils.mkdir_p project_finished.export_path - FileUtils.touch File.join(project_finished.export_path, '_export.tar.gz') - # simulate in after export action - FileUtils.mkdir_p project_after_export.export_path - FileUtils.touch File.join(project_after_export.export_path, '_export.tar.gz') FileUtils.touch Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy.lock_file_path(project_after_export) end @@ -191,14 +185,11 @@ describe API::ProjectExport do context 'when upload complete' do before do - FileUtils.rm_rf(project_after_export.export_path) - - if project_after_export.export_project_object_exists? - upload = project_after_export.import_export_upload + project_after_export.remove_exports + end - upload.remove_export_file! - upload.save - end + it 'has removed the export' do + expect(project_after_export.export_file_exists?).to be_falsey end it_behaves_like '404 response' do @@ -273,13 +264,13 @@ describe API::ProjectExport do before do stub_uploads_object_storage(ImportExportUploader) - [project, project_finished, project_after_export].each do |p| - p.add_maintainer(user) + project.add_maintainer(user) + project_finished.add_maintainer(user) + project_after_export.add_maintainer(user) - upload = ImportExportUpload.new(project: p) - upload.export_file = fixture_file_upload('spec/fixtures/project_export.tar.gz', "`/tar.gz") - upload.save! - end + upload = ImportExportUpload.new(project: project) + upload.export_file = fixture_file_upload('spec/fixtures/project_export.tar.gz', "`/tar.gz") + upload.save! end it_behaves_like 'get project download by strategy' diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb index bc06f3c3732..c8fa4754810 100644 --- a/spec/requests/api/project_import_spec.rb +++ b/spec/requests/api/project_import_spec.rb @@ -7,7 +7,6 @@ describe API::ProjectImport do let(:namespace) { create(:group) } before do allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) - stub_feature_flags(import_export_object_storage: true) stub_uploads_object_storage(FileUploader) namespace.add_owner(user) diff --git a/spec/requests/api/resource_label_events_spec.rb b/spec/requests/api/resource_label_events_spec.rb new file mode 100644 index 00000000000..b7d4a5152cc --- /dev/null +++ b/spec/requests/api/resource_label_events_spec.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe API::ResourceLabelEvents do + set(:user) { create(:user) } + set(:project) { create(:project, :public, :repository, namespace: user.namespace) } + set(:private_user) { create(:user) } + + before do + project.add_developer(user) + end + + shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_name| + describe "GET /#{parent_type}/:id/#{eventable_type}/:noteable_id/resource_label_events" do + it "returns an array of resource label events" do + get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events", user) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + expect(json_response).to be_an Array + expect(json_response.first['id']).to eq(event.id) + end + + it "returns a 404 error when eventable id not found" do + get api("/#{parent_type}/#{parent.id}/#{eventable_type}/12345/resource_label_events", user) + + expect(response).to have_gitlab_http_status(404) + end + + it "returns 404 when not authorized" do + parent.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) + + get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events", private_user) + + expect(response).to have_gitlab_http_status(404) + end + end + + describe "GET /#{parent_type}/:id/#{eventable_type}/:noteable_id/resource_label_events/:event_id" do + it "returns a resource label event by id" do + get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events/#{event.id}", user) + + expect(response).to have_gitlab_http_status(200) + expect(json_response['id']).to eq(event.id) + end + + it "returns a 404 error if resource label event not found" do + get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events/12345", user) + + expect(response).to have_gitlab_http_status(404) + end + end + end + + context 'when eventable is an Issue' do + let(:issue) { create(:issue, project: project, author: user) } + + it_behaves_like 'resource_label_events API', 'projects', 'issues', 'iid' do + let(:parent) { project } + let(:eventable) { issue } + let!(:event) { create(:resource_label_event, issue: issue) } + end + end + + context 'when eventable is a Merge Request' do + let(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: user) } + + it_behaves_like 'resource_label_events API', 'projects', 'merge_requests', 'iid' do + let(:parent) { project } + let(:eventable) { merge_request } + let!(:event) { create(:resource_label_event, merge_request: merge_request) } + end + end +end diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb index dcf4503ef9c..fa1a421d528 100644 --- a/spec/services/issuable/common_system_notes_service_spec.rb +++ b/spec/services/issuable/common_system_notes_service_spec.rb @@ -12,12 +12,21 @@ describe Issuable::CommonSystemNotesService do it_behaves_like 'system note creation', { time_estimate: 5 }, 'changed time estimate' context 'when new label is added' do + let(:label) { create(:label, project: project) } + before do - label = create(:label, project: project) issuable.labels << label + issuable.save end - it_behaves_like 'system note creation', {}, /added ~\w+ label/ + it 'creates a resource label event' do + described_class.new(project, user).execute(issuable, []) + event = issuable.reload.resource_label_events.last + + expect(event).not_to be_nil + expect(event.label_id).to eq label.id + expect(event.user_id).to eq user.id + end end context 'when new milestone is assigned' do diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb index 609eef76d2c..b5767583952 100644 --- a/spec/services/issues/move_service_spec.rb +++ b/spec/services/issues/move_service_spec.rb @@ -122,6 +122,17 @@ describe Issues::MoveService do end end + context 'issue with resource label events' do + it 'assigns resource label events to new issue' do + old_issue.resource_label_events = create_list(:resource_label_event, 2, issue: old_issue) + + new_issue = move_service.execute(old_issue, new_project) + + expected = old_issue.resource_label_events.map(&:label_id) + expect(new_issue.resource_label_events.map(&:label_id)).to match_array(expected) + end + end + context 'generic issue' do include_context 'issue move executed' diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb index 5bcfef46b75..07aa8449a66 100644 --- a/spec/services/issues/update_service_spec.rb +++ b/spec/services/issues/update_service_spec.rb @@ -189,11 +189,12 @@ describe Issues::UpdateService, :mailer do expect(note.note).to include "assigned to #{user2.to_reference}" end - it 'creates system note about issue label edit' do - note = find_note('added ~') + it 'creates a resource label event' do + event = issue.resource_label_events.last - expect(note).not_to be_nil - expect(note.note).to include "added #{label.to_reference} label" + expect(event).not_to be_nil + expect(event.label_id).to eq label.id + expect(event.user_id).to eq user.id end it 'creates system note about title change' do diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb index 0ced5d1b6d6..9f1da7d9419 100644 --- a/spec/services/merge_requests/build_service_spec.rb +++ b/spec/services/merge_requests/build_service_spec.rb @@ -13,6 +13,8 @@ describe MergeRequests::BuildService do let(:description) { nil } let(:source_branch) { 'feature-branch' } let(:target_branch) { 'master' } + let(:milestone_id) { nil } + let(:label_ids) { [] } let(:merge_request) { service.execute } let(:compare) { double(:compare, commits: commits) } let(:commit_1) { double(:commit_1, sha: 'f00ba7', safe_message: "Initial commit\n\nCreate the app") } @@ -25,7 +27,9 @@ describe MergeRequests::BuildService do source_branch: source_branch, target_branch: target_branch, source_project: source_project, - target_project: target_project) + target_project: target_project, + milestone_id: milestone_id, + label_ids: label_ids) end before do @@ -179,6 +183,33 @@ describe MergeRequests::BuildService do expect(merge_request.description).to eq(expected_description) end end + + context 'when the source branch matches an internal issue' do + let(:label) { create(:label, project: project) } + let(:milestone) { create(:milestone, project: project) } + let(:source_branch) { '123-fix-issue' } + + before do + issue.update!(iid: 123, labels: [label], milestone: milestone) + end + + it 'assigns the issue label and milestone' do + expect(merge_request.milestone).to eq(milestone) + expect(merge_request.labels).to match_array([label]) + end + + context 'when milestone_id and label_ids are shared in the params' do + let(:label2) { create(:label, project: project) } + let(:milestone2) { create(:milestone, project: project) } + let(:label_ids) { [label2.id] } + let(:milestone_id) { milestone2.id } + + it 'assigns milestone_id and label_ids instead of issue labels and milestone' do + expect(merge_request.milestone).to eq(milestone2) + expect(merge_request.labels).to match_array([label2]) + end + end + end end end diff --git a/spec/services/merge_requests/reload_diffs_service_spec.rb b/spec/services/merge_requests/reload_diffs_service_spec.rb index a0a27d247fc..21f369a3818 100644 --- a/spec/services/merge_requests/reload_diffs_service_spec.rb +++ b/spec/services/merge_requests/reload_diffs_service_spec.rb @@ -57,6 +57,7 @@ describe MergeRequests::ReloadDiffsService, :use_clean_rails_memory_store_cachin expect(Rails.cache).to receive(:delete).with(old_cache_key).and_call_original expect(Rails.cache).to receive(:read).with(new_cache_key).and_call_original expect(Rails.cache).to receive(:write).with(new_cache_key, anything, anything).and_call_original + subject.execute end end diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb index f0029af83cc..55dfab81c26 100644 --- a/spec/services/merge_requests/update_service_spec.rb +++ b/spec/services/merge_requests/update_service_spec.rb @@ -109,11 +109,12 @@ describe MergeRequests::UpdateService, :mailer do expect(note.note).to include "assigned to #{user2.to_reference}" end - it 'creates system note about merge_request label edit' do - note = find_note('added ~') + it 'creates a resource label event' do + event = merge_request.resource_label_events.last - expect(note).not_to be_nil - expect(note.note).to include "added #{label.to_reference} label" + expect(event).not_to be_nil + expect(event.label_id).to eq label.id + expect(event.user_id).to eq user.id end it 'creates system note about title change' do diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index c442f6fe32f..68a361fa882 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -1969,6 +1969,23 @@ describe NotificationService, :mailer do end end + context 'Auto DevOps notifications' do + describe '#autodevops_disabled' do + let(:owner) { create(:user) } + let(:namespace) { create(:namespace, owner: owner) } + let(:project) { create(:project, :repository, :auto_devops, namespace: namespace) } + let(:pipeline_user) { create(:user) } + let(:pipeline) { create(:ci_pipeline, :failed, project: project, user: pipeline_user) } + + it 'emails project owner and user that triggered the pipeline' do + notification.autodevops_disabled(pipeline, [owner.email, pipeline_user.email]) + + should_email(owner) + should_email(pipeline_user) + end + end + end + def build_team(project) @u_watcher = create_global_setting_for(create(:user), :watch) @u_participating = create_global_setting_for(create(:user), :participating) diff --git a/spec/services/projects/auto_devops/disable_service_spec.rb b/spec/services/projects/auto_devops/disable_service_spec.rb new file mode 100644 index 00000000000..76977d7a1a7 --- /dev/null +++ b/spec/services/projects/auto_devops/disable_service_spec.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true +require 'spec_helper' + +describe Projects::AutoDevops::DisableService, '#execute' do + let(:project) { create(:project, :repository, :auto_devops) } + let(:auto_devops) { project.auto_devops } + + subject { described_class.new(project).execute } + + context 'when Auto DevOps disabled at instance level' do + before do + stub_application_setting(auto_devops_enabled: false) + end + + it { is_expected.to be_falsy } + end + + context 'when Auto DevOps enabled at instance level' do + before do + stub_application_setting(auto_devops_enabled: true) + end + + context 'when Auto DevOps explicitly enabled on project' do + before do + auto_devops.update!(enabled: true) + end + + it { is_expected.to be_falsy } + end + + context 'when Auto DevOps explicitly disabled on project' do + before do + auto_devops.update!(enabled: false) + end + + it { is_expected.to be_falsy } + end + + context 'when Auto DevOps is implicitly enabled' do + before do + auto_devops.update!(enabled: nil) + end + + context 'when is the first pipeline failure' do + before do + create(:ci_pipeline, :failed, :auto_devops_source, project: project) + end + + it 'should disable Auto DevOps for project' do + subject + + expect(auto_devops.enabled).to eq(false) + end + end + + context 'when it is not the first pipeline failure' do + before do + create_list(:ci_pipeline, 2, :failed, :auto_devops_source, project: project) + end + + it 'should explicitly disable Auto DevOps for project' do + subject + + expect(auto_devops.reload.enabled).to eq(false) + end + end + + context 'when an Auto DevOps pipeline has succeeded before' do + before do + create(:ci_pipeline, :success, :auto_devops_source, project: project) + end + + it 'should not disable Auto DevOps for project' do + subject + + expect(auto_devops.reload.enabled).to be_nil + end + end + end + + context 'when project does not have an Auto DevOps record related' do + let(:project) { create(:project, :repository) } + + before do + create(:ci_pipeline, :failed, :auto_devops_source, project: project) + end + + it 'should disable Auto DevOps for project' do + subject + auto_devops = project.reload.auto_devops + + expect(auto_devops.enabled).to eq(false) + end + + it 'should create a ProjectAutoDevops record' do + expect { subject }.to change { ProjectAutoDevops.count }.from(0).to(1) + end + end + end +end diff --git a/spec/services/projects/container_repository/destroy_service_spec.rb b/spec/services/projects/container_repository/destroy_service_spec.rb new file mode 100644 index 00000000000..307ccc88865 --- /dev/null +++ b/spec/services/projects/container_repository/destroy_service_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Projects::ContainerRepository::DestroyService do + set(:user) { create(:user) } + set(:project) { create(:project, :private) } + + subject { described_class.new(project, user) } + + before do + stub_container_registry_config(enabled: true) + end + + context 'when user does not have access to registry' do + let!(:repository) { create(:container_repository, :root, project: project) } + + it 'does not delete a repository' do + expect { subject.execute(repository) }.not_to change { ContainerRepository.all.count } + end + end + + context 'when user has access to registry' do + before do + project.add_developer(user) + end + + context 'when root container repository exists' do + let!(:repository) { create(:container_repository, :root, project: project) } + + before do + stub_container_registry_tags(repository: :any, tags: []) + end + + it 'deletes the repository' do + expect { described_class.new(project, user).execute(repository) }.to change { ContainerRepository.all.count }.by(-1) + end + end + end +end diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb index 96e8a80b334..56a36432462 100644 --- a/spec/services/projects/update_remote_mirror_service_spec.rb +++ b/spec/services/projects/update_remote_mirror_service_spec.rb @@ -1,107 +1,118 @@ require 'spec_helper' describe Projects::UpdateRemoteMirrorService do - set(:project) { create(:project, :repository) } - let(:owner) { project.owner } + let(:project) { create(:project, :repository) } let(:remote_project) { create(:forked_project_with_submodules) } - let(:repository) { project.repository } - let(:raw_repository) { repository.raw } let(:remote_mirror) { project.remote_mirrors.create!(url: remote_project.http_url_to_repo, enabled: true, only_protected_branches: false) } + let(:remote_name) { remote_mirror.remote_name } - subject { described_class.new(project, project.creator) } + subject(:service) { described_class.new(project, project.creator) } describe "#execute" do before do - repository.add_branch(owner, 'existing-branch', 'master') + project.repository.add_branch(project.owner, 'existing-branch', 'master') allow(remote_mirror).to receive(:update_repository).and_return(true) end + it "ensures the remote exists" do + stub_fetch_remote(project, remote_name: remote_name) + stub_find_remote_root_ref(project, remote_name: remote_name) + + expect(remote_mirror).to receive(:ensure_remote!) + + service.execute(remote_mirror) + end + it "fetches the remote repository" do - expect(remote_mirror).to receive(:ensure_remote!).and_call_original - expect(repository).to receive(:fetch_remote).with(remote_mirror.remote_name, no_tags: true) do - sync_remote(repository, remote_mirror.remote_name, local_branch_names) - end + stub_find_remote_root_ref(project, remote_name: remote_name) + + expect(project.repository) + .to receive(:fetch_remote) + .with(remote_mirror.remote_name, no_tags: true) + + service.execute(remote_mirror) + end + + it "updates the default branch when HEAD has changed" do + stub_fetch_remote(project, remote_name: remote_name) + stub_find_remote_root_ref(project, remote_name: remote_name, ref: "existing-branch") - subject.execute(remote_mirror) + expect { service.execute(remote_mirror) } + .to change { project.default_branch } + .from("master") + .to("existing-branch") end - it "succeeds" do - allow(repository).to receive(:fetch_remote) { sync_remote(repository, remote_mirror.remote_name, local_branch_names) } + it "does not update the default branch when HEAD does not change" do + stub_fetch_remote(project, remote_name: remote_name) + stub_find_remote_root_ref(project, remote_name: remote_name, ref: "master") + + expect { service.execute(remote_mirror) }.not_to change { project.default_branch } + end - result = subject.execute(remote_mirror) + it "returns success when updated succeeds" do + stub_fetch_remote(project, remote_name: remote_name) + stub_find_remote_root_ref(project, remote_name: remote_name) + + result = service.execute(remote_mirror) expect(result[:status]).to eq(:success) end context 'when syncing all branches' do it "push all the branches the first time" do - allow(repository).to receive(:fetch_remote) + stub_fetch_remote(project, remote_name: remote_name) + stub_find_remote_root_ref(project, remote_name: remote_name) expect(remote_mirror).to receive(:update_repository).with({}) - subject.execute(remote_mirror) + service.execute(remote_mirror) end end context 'when only syncing protected branches' do - let(:unprotected_branch_name) { 'existing-branch' } - let(:protected_branch_name) do - project.repository.branch_names.find { |n| n != unprotected_branch_name } - end - let!(:protected_branch) do - create(:protected_branch, project: project, name: protected_branch_name) - end - - before do - project.reload + it "sync updated protected branches" do + stub_fetch_remote(project, remote_name: remote_name) + stub_find_remote_root_ref(project, remote_name: remote_name) + protected_branch = create_protected_branch(project) remote_mirror.only_protected_branches = true - end - it "sync updated protected branches" do - allow(repository).to receive(:fetch_remote) - expect(remote_mirror).to receive(:update_repository).with(only_branches_matching: [protected_branch_name]) + expect(remote_mirror) + .to receive(:update_repository) + .with(only_branches_matching: [protected_branch.name]) - subject.execute(remote_mirror) + service.execute(remote_mirror) end - end - end - def sync_remote(repository, remote_name, local_branch_names) - local_branch_names.each do |branch| - commit = repository.commit(branch) - repository.write_ref("refs/remotes/#{remote_name}/#{branch}", commit.id) if commit + def create_protected_branch(project) + branch_name = project.repository.branch_names.find { |n| n != 'existing-branch' } + create(:protected_branch, project: project, name: branch_name) + end end end - def update_remote_branch(repository, remote_name, branch) - masterrev = repository.commit('master').id - - repository.write_ref("refs/remotes/#{remote_name}/#{branch}", masterrev, force: true) - repository.expire_branches_cache + def stub_find_remote_root_ref(project, ref: 'master', remote_name:) + allow(project.repository) + .to receive(:find_remote_root_ref) + .with(remote_name) + .and_return(ref) end - def update_branch(repository, branch) - masterrev = repository.commit('master').id - - repository.write_ref("refs/heads/#{branch}", masterrev, force: true) - repository.expire_branches_cache + def stub_fetch_remote(project, remote_name:) + allow(project.repository) + .to receive(:fetch_remote) + .with(remote_name, no_tags: true) { fetch_remote(project.repository, remote_name) } end - def generate_tags(repository, *tag_names) - tag_names.each_with_object([]) do |name, tags| - tag = repository.find_tag(name) - target = tag.try(:target) - target_commit = tag.try(:dereferenced_target) - tags << Gitlab::Git::Tag.new(repository.raw_repository, { - name: name, - target: target, - target_commit: target_commit - }) + def fetch_remote(repository, remote_name) + local_branch_names(repository).each do |branch| + commit = repository.commit(branch) + repository.write_ref("refs/remotes/#{remote_name}/#{branch}", commit.id) if commit end end - def local_branch_names + def local_branch_names(repository) branch_names = repository.branches.map(&:name) # we want the protected branch to be pushed first branch_names.unshift(branch_names.delete('master')) diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb index bf1c157c4a2..06cad9c00d2 100644 --- a/spec/services/quick_actions/interpret_service_spec.rb +++ b/spec/services/quick_actions/interpret_service_spec.rb @@ -272,6 +272,28 @@ describe QuickActions::InterpretService do end end + shared_examples 'lock command' do + let(:issue) { create(:issue, project: project, discussion_locked: false) } + let(:merge_request) { create(:merge_request, source_project: project, discussion_locked: false) } + + it 'returns discussion_locked: true if content contains /lock' do + _, updates = service.execute(content, issuable) + + expect(updates).to eq(discussion_locked: true) + end + end + + shared_examples 'unlock command' do + let(:issue) { create(:issue, project: project, discussion_locked: true) } + let(:merge_request) { create(:merge_request, source_project: project, discussion_locked: true) } + + it 'returns discussion_locked: true if content contains /unlock' do + _, updates = service.execute(content, issuable) + + expect(updates).to eq(discussion_locked: false) + end + end + shared_examples 'empty command' do it 'populates {} if content contains an unsupported command' do _, updates = service.execute(content, issuable) @@ -786,6 +808,26 @@ describe QuickActions::InterpretService do let(:issuable) { issue } end + it_behaves_like 'lock command' do + let(:content) { '/lock' } + let(:issuable) { issue } + end + + it_behaves_like 'lock command' do + let(:content) { '/lock' } + let(:issuable) { merge_request } + end + + it_behaves_like 'unlock command' do + let(:content) { '/unlock' } + let(:issuable) { issue } + end + + it_behaves_like 'unlock command' do + let(:content) { '/unlock' } + let(:issuable) { merge_request } + end + context '/todo' do let(:content) { '/todo' } @@ -961,6 +1003,16 @@ describe QuickActions::InterpretService do let(:content) { '/duplicate #{issue.to_reference}' } let(:issuable) { issue } end + + it_behaves_like 'empty command' do + let(:content) { '/lock' } + let(:issuable) { issue } + end + + it_behaves_like 'empty command' do + let(:content) { '/unlock' } + let(:issuable) { issue } + end end context '/award command' do diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb index 41b0fb3eea3..4c9138fb1ef 100644 --- a/spec/services/resource_events/change_labels_service_spec.rb +++ b/spec/services/resource_events/change_labels_service_spec.rb @@ -18,6 +18,14 @@ describe ResourceEvents::ChangeLabelsService do expect(event.action).to eq(action) end + it 'expires resource note etag cache' do + expect_any_instance_of(Gitlab::EtagCaching::Store) + .to receive(:touch) + .with("/#{resource.project.namespace.to_param}/#{resource.project.to_param}/noteable/issue/#{resource.id}/notes") + + described_class.new(resource, author).execute(added_labels: [labels[0]]) + end + context 'when adding a label' do let(:added) { [labels[0]] } let(:removed) { [] } diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb new file mode 100644 index 00000000000..0d333d541c9 --- /dev/null +++ b/spec/services/resource_events/merge_into_notes_service_spec.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe ResourceEvents::MergeIntoNotesService do + def create_event(params) + event_params = { action: :add, label: label, issue: resource, + user: user } + + create(:resource_label_event, event_params.merge(params)) + end + + def create_note(params) + opts = { noteable: resource, project: project } + + create(:note_on_issue, opts.merge(params)) + end + + set(:project) { create(:project) } + set(:user) { create(:user) } + set(:resource) { create(:issue, project: project) } + set(:label) { create(:label, project: project) } + set(:label2) { create(:label, project: project) } + let(:time) { Time.now } + + describe '#execute' do + it 'merges label events into notes in order of created_at' do + note1 = create_note(created_at: 4.days.ago) + note2 = create_note(created_at: 2.days.ago) + event1 = create_event(created_at: 3.days.ago) + event2 = create_event(created_at: 1.day.ago) + + notes = described_class.new(resource, user).execute([note1, note2]) + + expected = [note1, event1, note2, event2].map(&:discussion_id) + expect(notes.map(&:discussion_id)).to eq expected + end + + it 'squashes events with same time and author into single note' do + user2 = create(:user) + + create_event(created_at: time) + create_event(created_at: time, label: label2, action: :remove) + create_event(created_at: time, user: user2) + create_event(created_at: 1.day.ago, label: label2) + + notes = described_class.new(resource, user).execute() + + expected = [ + "added #{label.to_reference} label and removed #{label2.to_reference} label", + "added #{label.to_reference} label", + "added #{label2.to_reference} label" + ] + + expect(notes.count).to eq 3 + expect(notes.map(&:note)).to match_array expected + end + + it 'fetches only notes created after last_fetched_at' do + create_event(created_at: 4.days.ago) + event = create_event(created_at: 1.day.ago) + + notes = described_class.new(resource, user, + last_fetched_at: 2.days.ago.to_i).execute() + + expect(notes.count).to eq 1 + expect(notes.first.discussion_id).to eq event.discussion_id + end + end +end diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb index 442de61f69b..f4b7cb8c90a 100644 --- a/spec/services/system_note_service_spec.rb +++ b/spec/services/system_note_service_spec.rb @@ -197,45 +197,6 @@ describe SystemNoteService do end end - describe '.change_label' do - subject { described_class.change_label(noteable, project, author, added, removed) } - - let(:labels) { create_list(:label, 2, project: project) } - let(:added) { [] } - let(:removed) { [] } - - it_behaves_like 'a system note' do - let(:action) { 'label' } - end - - context 'with added labels' do - let(:added) { labels } - let(:removed) { [] } - - it 'sets the note text' do - expect(subject.note).to eq "added ~#{labels[0].id} ~#{labels[1].id} labels" - end - end - - context 'with removed labels' do - let(:added) { [] } - let(:removed) { labels } - - it 'sets the note text' do - expect(subject.note).to eq "removed ~#{labels[0].id} ~#{labels[1].id} labels" - end - end - - context 'with added and removed labels' do - let(:added) { [labels[0]] } - let(:removed) { [labels[1]] } - - it 'sets the note text' do - expect(subject.note).to eq "added ~#{labels[0].id} and removed ~#{labels[1].id} labels" - end - end - end - describe '.change_milestone' do context 'for a project milestone' do subject { described_class.change_milestone(noteable, project, author, milestone) } @@ -288,6 +249,30 @@ describe SystemNoteService do end end + describe '.change_due_date' do + subject { described_class.change_due_date(noteable, project, author, due_date) } + + let(:due_date) { Date.today } + + it_behaves_like 'a system note' do + let(:action) { 'due_date' } + end + + context 'when due date added' do + it 'sets the note text' do + expect(subject.note).to eq "changed due date to #{Date.today.to_s(:long)}" + end + end + + context 'when due date removed' do + let(:due_date) { nil } + + it 'sets the note text' do + expect(subject.note).to eq 'removed due date' + end + end + end + describe '.change_status' do subject { described_class.change_status(noteable, project, author, status, source) } @@ -725,7 +710,7 @@ describe SystemNoteService do let(:jira_tracker) { project.jira_service } let(:commit) { project.commit } let(:comment_url) { jira_api_comment_url(jira_issue.id) } - let(:success_message) { "JiraService SUCCESS: Successfully posted to http://jira.example.net." } + let(:success_message) { "SUCCESS: Successfully posted to http://jira.example.net." } before do stub_jira_urls(jira_issue.id) diff --git a/spec/services/wikis/create_attachment_service_spec.rb b/spec/services/wikis/create_attachment_service_spec.rb index 3f4da873ce4..f5899f292c8 100644 --- a/spec/services/wikis/create_attachment_service_spec.rb +++ b/spec/services/wikis/create_attachment_service_spec.rb @@ -88,8 +88,30 @@ describe Wikis::CreateAttachmentService do end end - describe 'validations' do + describe '#parse_file_name' do context 'when file_name' do + context 'has white spaces' do + let(:file_name) { 'file with spaces' } + + it "replaces all of them with '_'" do + result = service.execute + + expect(result[:status]).to eq :success + expect(result[:result][:file_name]).to eq 'file_with_spaces' + end + end + + context 'has other invalid characters' do + let(:file_name) { "file\twith\tinvalid chars" } + + it "replaces all of them with '_'" do + result = service.execute + + expect(result[:status]).to eq :success + expect(result[:result][:file_name]).to eq 'file_with_invalid_chars' + end + end + context 'is not present' do let(:file_name) { nil } diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index c4bb1c13f2e..d1337325973 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -135,6 +135,10 @@ RSpec.configure do |config| Fog.unmock! if Fog.mock? end + config.after(:example) do + Gitlab::CurrentSettings.clear_in_memory_application_settings! + end + config.before(:example, :mailer) do reset_delivered_emails! end diff --git a/spec/support/features/issuable_slash_commands_shared_examples.rb b/spec/support/features/issuable_quick_actions_shared_examples.rb index 9b44c532ff6..846e697eb96 100644 --- a/spec/support/features/issuable_slash_commands_shared_examples.rb +++ b/spec/support/features/issuable_quick_actions_shared_examples.rb @@ -55,7 +55,7 @@ shared_examples 'issuable record that supports quick actions in its description describe "note on #{issuable_type}", :js do before do - visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable) + visit public_send("project_#{issuable_type}_path", project, issuable) end context 'with a note containing commands' do @@ -121,7 +121,7 @@ shared_examples 'issuable record that supports quick actions in its description gitlab_sign_out gitlab_sign_in(guest) - visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable) + visit public_send("project_#{issuable_type}_path", project, issuable) end it "does not close the #{issuable_type}" do @@ -158,7 +158,7 @@ shared_examples 'issuable record that supports quick actions in its description gitlab_sign_out gitlab_sign_in(guest) - visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable) + visit public_send("project_#{issuable_type}_path", project, issuable) end it "does not reopen the #{issuable_type}" do @@ -190,7 +190,7 @@ shared_examples 'issuable record that supports quick actions in its description gitlab_sign_out gitlab_sign_in(guest) - visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable) + visit public_send("project_#{issuable_type}_path", project, issuable) end it "does not change the #{issuable_type} title" do @@ -285,13 +285,87 @@ shared_examples 'issuable record that supports quick actions in its description expect(issuable.reload.assignees).to eq [maintainer] end end + + context "with a note locking the #{issuable_type} discussion" do + before do + issuable.update(discussion_locked: false) + expect(issuable).not_to be_discussion_locked + end + + context "when current user can lock #{issuable_type} discussion" do + it "locks the #{issuable_type} discussion" do + add_note("/lock") + + expect(page).not_to have_content '/lock' + expect(page).to have_content 'Commands applied' + + expect(issuable.reload).to be_discussion_locked + end + end + + context "when current user cannot lock #{issuable_type}" do + before do + guest = create(:user) + project.add_guest(guest) + + gitlab_sign_out + sign_in(guest) + visit public_send("project_#{issuable_type}_path", project, issuable) + end + + it "does not lock the #{issuable_type} discussion" do + add_note("/lock") + + expect(page).not_to have_content 'Commands applied' + + expect(issuable).not_to be_discussion_locked + end + end + end + + context "with a note unlocking the #{issuable_type} discussion" do + before do + issuable.update(discussion_locked: true) + expect(issuable).to be_discussion_locked + end + + context "when current user can unlock #{issuable_type} discussion" do + it "unlocks the #{issuable_type} discussion" do + add_note("/unlock") + + expect(page).not_to have_content '/unlock' + expect(page).to have_content 'Commands applied' + + expect(issuable.reload).not_to be_discussion_locked + end + end + + context "when current user cannot unlock #{issuable_type}" do + before do + guest = create(:user) + project.add_guest(guest) + + gitlab_sign_out + sign_in(guest) + visit public_send("project_#{issuable_type}_path", project, issuable) + end + + it "does not unlock the #{issuable_type} discussion" do + add_note("/unlock") + + expect(page).not_to have_content 'Commands applied' + + expect(issuable).to be_discussion_locked + end + end + end end describe "preview of note on #{issuable_type}", :js do it 'removes quick actions from note and explains them' do create(:user, username: 'bob') - visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable) + visit public_send("project_#{issuable_type}_path", project, issuable) page.within('.js-main-target-form') do fill_in 'note[note]', with: "Awesome!\n/assign @bob " diff --git a/spec/support/import_export/export_file_helper.rb b/spec/support/import_export/export_file_helper.rb index 4d925ac77f4..d9ed405baf4 100644 --- a/spec/support/import_export/export_file_helper.rb +++ b/spec/support/import_export/export_file_helper.rb @@ -52,7 +52,7 @@ module ExportFileHelper # Expands the compressed file for an exported project into +tmpdir+ def in_directory_with_expanded_export(project) Dir.mktmpdir do |tmpdir| - export_file = project.export_project_path + export_file = project.export_file.path _output, exit_status = Gitlab::Popen.popen(%W{tar -zxf #{export_file} -C #{tmpdir}}) yield(exit_status, tmpdir) diff --git a/spec/support/shared_examples/instance_statistics_controllers_shared_examples.rb b/spec/support/shared_examples/instance_statistics_controllers_shared_examples.rb index 5334af841e1..8ea307c7c61 100644 --- a/spec/support/shared_examples/instance_statistics_controllers_shared_examples.rb +++ b/spec/support/shared_examples/instance_statistics_controllers_shared_examples.rb @@ -5,6 +5,8 @@ shared_examples 'instance statistics availability' do before do sign_in(user) + + stub_application_setting(usage_ping_enabled: true) end describe 'GET #index' do diff --git a/spec/support/shared_examples/models/label_note_shared_examples.rb b/spec/support/shared_examples/models/label_note_shared_examples.rb new file mode 100644 index 00000000000..406385c13bd --- /dev/null +++ b/spec/support/shared_examples/models/label_note_shared_examples.rb @@ -0,0 +1,109 @@ +# frozen_string_literal: true + +shared_examples 'label note created from events' do + def create_event(params = {}) + event_params = { action: :add, label: label, user: user } + resource_key = resource.class.name.underscore.to_s + event_params[resource_key] = resource + + build(:resource_label_event, event_params.merge(params)) + end + + def label_refs(events) + labels = events.map(&:label).compact + + labels.map { |l| l.to_reference}.sort.join(' ') + end + + let(:time) { Time.now } + let(:local_label_ids) { [label.id, label2.id] } + + describe '.from_events' do + it 'returns system note with expected attributes' do + event = create_event + + note = described_class.from_events([event, create_event]) + + expect(note.system).to be true + expect(note.author_id).to eq event.user_id + expect(note.discussion_id).to eq event.discussion_id + expect(note.noteable).to eq event.issuable + expect(note.note).to be_present + expect(note.note_html).to be_present + end + + it 'updates markdown cache if reference is not set yet' do + event = create_event(reference: nil) + + described_class.from_events([event]) + + expect(event.reference).not_to be_nil + end + + it 'updates markdown cache if label was deleted' do + event = create_event(reference: 'some_ref', label: nil) + + described_class.from_events([event]) + + expect(event.reference).to eq '' + end + + it 'returns html note' do + events = [create_event(created_at: time)] + + note = described_class.from_events(events) + + expect(note.note_html).to include label.title + end + + it 'returns text note for added labels' do + events = [create_event(created_at: time), + create_event(created_at: time, label: label2), + create_event(created_at: time, label: nil)] + + note = described_class.from_events(events) + + expect(note.note).to eq "added #{label_refs(events)} + 1 deleted label" + end + + it 'returns text note for removed labels' do + events = [create_event(action: :remove, created_at: time), + create_event(action: :remove, created_at: time, label: label2), + create_event(action: :remove, created_at: time, label: nil)] + + note = described_class.from_events(events) + + expect(note.note).to eq "removed #{label_refs(events)} + 1 deleted label" + end + + it 'returns text note for added and removed labels' do + add_events = [create_event(created_at: time), + create_event(created_at: time, label: nil)] + + remove_events = [create_event(action: :remove, created_at: time), + create_event(action: :remove, created_at: time, label: nil)] + + note = described_class.from_events(add_events + remove_events) + + expect(note.note).to eq "added #{label_refs(add_events)} + 1 deleted label and removed #{label_refs(remove_events)} + 1 deleted label" + end + + it 'returns text note for cross-project label' do + other_label = create(:label) + event = create_event(label: other_label) + + note = described_class.from_events([event]) + + expect(note.note).to eq "added #{other_label.to_reference(resource_parent)} label" + end + + it 'returns text note for cross-group label' do + other_label = create(:group_label) + event = create_event(label: other_label) + + note = described_class.from_events([event]) + + expect(note.note).to eq "added #{other_label.to_reference(other_label.group, target_project: project, full: true)} label" + end + end +end diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb index d143014692d..6c4e11910d3 100644 --- a/spec/support/sidekiq.rb +++ b/spec/support/sidekiq.rb @@ -1,7 +1,27 @@ require 'sidekiq/testing/inline' +# If Sidekiq::Testing.inline! is used, SQL transactions done inside +# Sidekiq worker are included in the SQL query limit (in a real +# deployment sidekiq worker is executed separately). To avoid +# increasing SQL limit counter, the request is marked as whitelisted +# during Sidekiq block +class DisableQueryLimit + def call(worker_instance, msg, queue) + transaction = Gitlab::QueryLimiting::Transaction.current + + if !transaction.respond_to?(:whitelisted) || transaction.whitelisted + yield + else + transaction.whitelisted = true + yield + transaction.whitelisted = false + end + end +end + Sidekiq::Testing.server_middleware do |chain| chain.add Gitlab::SidekiqStatus::ServerMiddleware + chain.add DisableQueryLimit end RSpec.configure do |config| diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb index cc2cca10f58..19794227d9f 100644 --- a/spec/tasks/gitlab/cleanup_rake_spec.rb +++ b/spec/tasks/gitlab/cleanup_rake_spec.rb @@ -6,6 +6,8 @@ describe 'gitlab:cleanup rake tasks' do end describe 'cleanup namespaces and repos' do + let(:gitlab_shell) { Gitlab::Shell.new } + let(:storage) { storages.keys.first } let(:storages) do { 'default' => Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/default_storage')) @@ -17,53 +19,56 @@ describe 'gitlab:cleanup rake tasks' do end before do - FileUtils.mkdir(Settings.absolute('tmp/tests/default_storage')) allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) end after do - FileUtils.rm_rf(Settings.absolute('tmp/tests/default_storage')) + Gitlab::GitalyClient::StorageService.new(storage).delete_all_repositories end describe 'cleanup:repos' do before do - FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/broken/project.git')) - FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git')) + gitlab_shell.add_namespace(storage, 'broken/project.git') + gitlab_shell.add_namespace(storage, '@hashed/12/34/5678.git') end it 'moves it to an orphaned path' do - run_rake_task('gitlab:cleanup:repos') - repo_list = Dir['tmp/tests/default_storage/broken/*'] + now = Time.now + + Timecop.freeze(now) do + run_rake_task('gitlab:cleanup:repos') + repo_list = Gitlab::GitalyClient::StorageService.new(storage).list_directories(depth: 0) - expect(repo_list.first).to include('+orphaned+') + expect(repo_list.last).to include("broken+orphaned+#{now.to_i}") + end end it 'ignores @hashed repos' do run_rake_task('gitlab:cleanup:repos') - expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git'))).to be_truthy + expect(gitlab_shell.exists?(storage, '@hashed/12/34/5678.git')).to be(true) end end describe 'cleanup:dirs' do it 'removes missing namespaces' do - FileUtils.mkdir_p(Settings.absolute("tmp/tests/default_storage/namespace_1/project.git")) - FileUtils.mkdir_p(Settings.absolute("tmp/tests/default_storage/namespace_2/project.git")) - allow(Namespace).to receive(:pluck).and_return('namespace_1') + gitlab_shell.add_namespace(storage, "namespace_1/project.git") + gitlab_shell.add_namespace(storage, "namespace_2/project.git") + allow(Namespace).to receive(:pluck).and_return(['namespace_1']) stub_env('REMOVE', 'true') run_rake_task('gitlab:cleanup:dirs') - expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/namespace_1'))).to be_truthy - expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/namespace_2'))).to be_falsey + expect(gitlab_shell.exists?(storage, 'namespace_1')).to be(true) + expect(gitlab_shell.exists?(storage, 'namespace_2')).to be(false) end it 'ignores @hashed directory' do - FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git')) + gitlab_shell.add_namespace(storage, '@hashed/12/34/5678.git') run_rake_task('gitlab:cleanup:dirs') - expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git'))).to be_truthy + expect(gitlab_shell.exists?(storage, '@hashed/12/34/5678.git')).to be(true) end end end diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb index 71fe2c353c0..eafbea07e10 100644 --- a/spec/uploaders/namespace_file_uploader_spec.rb +++ b/spec/uploaders/namespace_file_uploader_spec.rb @@ -26,6 +26,20 @@ describe NamespaceFileUploader do upload_path: IDENTIFIER end + context '.base_dir' do + it 'returns local storage base_dir without store param' do + expect(described_class.base_dir(group)).to eq("uploads/-/system/namespace/#{group.id}") + end + + it 'returns local storage base_dir when store param is Store::LOCAL' do + expect(described_class.base_dir(group, ObjectStorage::Store::LOCAL)).to eq("uploads/-/system/namespace/#{group.id}") + end + + it 'returns remote base_dir when store param is Store::REMOTE' do + expect(described_class.base_dir(group, ObjectStorage::Store::REMOTE)).to eq("namespace/#{group.id}") + end + end + describe "#migrate!" do before do uploader.store!(fixture_file_upload(File.join('spec/fixtures/doc_sample.txt'))) diff --git a/spec/workers/auto_devops/disable_worker_spec.rb b/spec/workers/auto_devops/disable_worker_spec.rb new file mode 100644 index 00000000000..800a07e41a5 --- /dev/null +++ b/spec/workers/auto_devops/disable_worker_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true +require 'spec_helper' + +describe AutoDevops::DisableWorker, '#perform' do + let(:user) { create(:user) } + let(:project) { create(:project, :repository, :auto_devops) } + let(:auto_devops) { project.auto_devops } + let(:pipeline) { create(:ci_pipeline, :failed, :auto_devops_source, project: project, user: user) } + + subject { described_class.new } + + before do + stub_application_setting(auto_devops_enabled: true) + auto_devops.update_attribute(:enabled, nil) + end + + it 'disables auto devops for project' do + subject.perform(pipeline.id) + + expect(auto_devops.reload.enabled).to eq(false) + end + + context 'when project owner is a user' do + let(:owner) { create(:user) } + let(:namespace) { create(:namespace, owner: owner) } + let(:project) { create(:project, :repository, :auto_devops, namespace: namespace) } + + it 'sends an email to pipeline user and project owner' do + expect(NotificationService).to receive_message_chain(:new, :autodevops_disabled).with(pipeline, [user.email, owner.email]) + + subject.perform(pipeline.id) + end + end + + context 'when project does not have owner' do + let(:group) { create(:group) } + let(:project) { create(:project, :repository, :auto_devops, namespace: group) } + + it 'sends an email to pipeline user' do + expect(NotificationService).to receive_message_chain(:new, :autodevops_disabled).with(pipeline, [user.email]) + + subject.perform(pipeline.id) + end + end + + context 'when pipeline is not related to a user and project does not have owner' do + let(:group) { create(:group) } + let(:project) { create(:project, :repository, :auto_devops, namespace: group) } + let(:pipeline) { create(:ci_pipeline, :failed, project: project) } + + it 'does not send an email' do + expect(NotificationService).not_to receive(:new) + + subject.perform(pipeline.id) + end + end +end diff --git a/spec/workers/delete_container_repository_worker_spec.rb b/spec/workers/delete_container_repository_worker_spec.rb new file mode 100644 index 00000000000..8c40611a959 --- /dev/null +++ b/spec/workers/delete_container_repository_worker_spec.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe DeleteContainerRepositoryWorker do + let(:registry) { create(:container_repository) } + let(:project) { registry.project } + let(:user) { project.owner } + + subject { described_class.new } + + describe '#perform' do + it 'executes the destroy service' do + service = instance_double(Projects::ContainerRepository::DestroyService) + expect(service).to receive(:execute) + expect(Projects::ContainerRepository::DestroyService).to receive(:new).with(project, user).and_return(service) + + subject.perform(user.id, registry.id) + end + + it 'does not raise error when user could not be found' do + expect do + subject.perform(-1, registry.id) + end.not_to raise_error + end + + it 'does not raise error when registry could not be found' do + expect do + subject.perform(user.id, -1) + end.not_to raise_error + end + end +end |