summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorDouwe Maan <douwe@gitlab.com>2017-07-04 15:00:01 +0000
committerDouwe Maan <douwe@gitlab.com>2017-07-04 15:00:01 +0000
commit5e2f7f25eb6ed1118cb541e43026915a7c4cdfef (patch)
treefbb893941818f2c9f0f1ce89dd5daf9d67ed00aa /spec
parentafbc7520c296196d0f3f95d4a24a9e42c0e41f3c (diff)
parent016b9f2565f85b9c77a5a779b64483ca1d4e1776 (diff)
downloadgitlab-ce-5e2f7f25eb6ed1118cb541e43026915a7c4cdfef.tar.gz
Merge branch 'master' into '33580-fix-api-scoping'
# Conflicts: # lib/api/users.rb
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/abuse_reports_controller_spec.rb25
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb15
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/conflicts_controller_spec.rb307
-rw-r--r--spec/controllers/projects/merge_requests/creations_controller_spec.rb120
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb160
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb608
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb53
-rw-r--r--spec/factories/projects.rb2
-rw-r--r--spec/features/abuse_report_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb16
-rw-r--r--spec/features/dashboard/projects_spec.rb16
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb17
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb2
-rw-r--r--spec/features/issuables/user_sees_sidebar_spec.rb30
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb2
-rw-r--r--spec/features/issues/form_spec.rb4
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb14
-rw-r--r--spec/features/issues/move_spec.rb5
-rw-r--r--spec/features/issues/update_issues_spec.rb12
-rw-r--r--spec/features/merge_requests/create_new_mr_spec.rb16
-rw-r--r--spec/features/merge_requests/form_spec.rb4
-rw-r--r--spec/features/merge_requests/user_uses_slash_commands_spec.rb2
-rw-r--r--spec/features/merge_requests/widget_spec.rb2
-rw-r--r--spec/features/merge_requests/wip_message_spec.rb4
-rw-r--r--spec/features/projects/branches_spec.rb40
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb2
-rw-r--r--spec/features/projects/import_export/import_file_spec.rb2
-rw-r--r--spec/features/projects/merge_request_button_spec.rb4
-rw-r--r--spec/features/projects/merge_requests/list_spec.rb2
-rw-r--r--spec/features/projects/new_project_spec.rb89
-rw-r--r--spec/features/projects/user_create_dir_spec.rb2
-rw-r--r--spec/features/security/project/internal_access_spec.rb2
-rw-r--r--spec/features/security/project/public_access_spec.rb2
-rw-r--r--spec/features/user_can_display_performance_bar_spec.rb6
-rw-r--r--spec/finders/issues_finder_spec.rb125
-rw-r--r--spec/finders/labels_finder_spec.rb6
-rw-r--r--spec/helpers/groups_helper_spec.rb2
-rw-r--r--spec/helpers/issuables_helper_spec.rb57
-rw-r--r--spec/helpers/milestones_helper_spec.rb36
-rw-r--r--spec/helpers/submodule_helper_spec.rb5
-rw-r--r--spec/javascripts/awards_handler_spec.js15
-rw-r--r--spec/javascripts/filtered_search/dropdown_user_spec.js34
-rw-r--r--spec/javascripts/fixtures/merge_requests.rb19
-rw-r--r--spec/javascripts/fixtures/merge_requests_diffs.rb57
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js3
-rw-r--r--spec/javascripts/lib/utils/dom_utils_spec.js35
-rw-r--r--spec/javascripts/merge_request_notes_spec.js2
-rw-r--r--spec/javascripts/merge_request_tabs_spec.js16
-rw-r--r--spec/javascripts/monitoring/deployments_spec.js133
-rw-r--r--spec/javascripts/monitoring/mock_data.js4229
-rw-r--r--spec/javascripts/monitoring/monitoring_column_spec.js108
-rw-r--r--spec/javascripts/monitoring/monitoring_deployment_spec.js137
-rw-r--r--spec/javascripts/monitoring/monitoring_flag_spec.js76
-rw-r--r--spec/javascripts/monitoring/monitoring_legends_spec.js111
-rw-r--r--spec/javascripts/monitoring/monitoring_row_spec.js57
-rw-r--r--spec/javascripts/monitoring/monitoring_spec.js49
-rw-r--r--spec/javascripts/monitoring/monitoring_state_spec.js110
-rw-r--r--spec/javascripts/monitoring/monitoring_store_spec.js24
-rw-r--r--spec/javascripts/monitoring/prometheus_graph_spec.js98
-rw-r--r--spec/javascripts/monitoring/prometheus_mock_data.js1014
-rw-r--r--spec/javascripts/notes_spec.js45
-rw-r--r--spec/lib/banzai/filter/external_issue_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/issue_reference_filter_spec.rb25
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb33
-rw-r--r--spec/lib/banzai/reference_parser/issue_parser_spec.rb10
-rw-r--r--spec/lib/ci/gitlab_ci_yaml_processor_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb6
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb74
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb78
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb76
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb22
-rw-r--r--spec/lib/gitlab/database/sha_attribute_spec.rb33
-rw-r--r--spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb4
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb3
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb2
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb2
-rw-r--r--spec/lib/gitlab/git/hook_spec.rb38
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb178
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_spec.rb29
-rw-r--r--spec/lib/gitlab/health_checks/fs_shards_check_spec.rb38
-rw-r--r--spec/lib/gitlab/import_export/repo_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/ldap/access_spec.rb10
-rw-r--r--spec/lib/gitlab/popen_spec.rb13
-rw-r--r--spec/lib/gitlab/shell_spec.rb87
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb3
-rw-r--r--spec/migrations/rename_duplicated_variable_key_spec.rb34
-rw-r--r--spec/models/ability_spec.rb11
-rw-r--r--spec/models/ci/pipeline_spec.rb6
-rw-r--r--spec/models/ci/variable_spec.rb46
-rw-r--r--spec/models/concerns/feature_gate_spec.rb19
-rw-r--r--spec/models/concerns/has_variable_spec.rb43
-rw-r--r--spec/models/concerns/routable_spec.rb13
-rw-r--r--spec/models/concerns/sha_attribute_spec.rb27
-rw-r--r--spec/models/environment_spec.rb27
-rw-r--r--spec/models/forked_project_link_spec.rb76
-rw-r--r--spec/models/merge_request_spec.rb16
-rw-r--r--spec/models/namespace_spec.rb30
-rw-r--r--spec/models/project_services/jira_service_spec.rb6
-rw-r--r--spec/models/project_services/redmine_service_spec.rb4
-rw-r--r--spec/models/project_spec.rb99
-rw-r--r--spec/models/project_wiki_spec.rb18
-rw-r--r--spec/models/repository_spec.rb15
-rw-r--r--spec/models/user_spec.rb57
-rw-r--r--spec/policies/base_policy_spec.rb6
-rw-r--r--spec/policies/ci/build_policy_spec.rb28
-rw-r--r--spec/policies/ci/trigger_policy_spec.rb14
-rw-r--r--spec/policies/deploy_key_policy_spec.rb12
-rw-r--r--spec/policies/environment_policy_spec.rb12
-rw-r--r--spec/policies/global_policy_spec.rb34
-rw-r--r--spec/policies/group_policy_spec.rb116
-rw-r--r--spec/policies/issue_policy_spec.rb122
-rw-r--r--spec/policies/personal_snippet_policy_spec.rb68
-rw-r--r--spec/policies/project_policy_spec.rb117
-rw-r--r--spec/policies/project_snippet_policy_spec.rb64
-rw-r--r--spec/policies/user_policy_spec.rb12
-rw-r--r--spec/requests/api/features_spec.rb178
-rw-r--r--spec/requests/api/namespaces_spec.rb35
-rw-r--r--spec/requests/api/projects_spec.rb3
-rw-r--r--spec/requests/api/users_spec.rb55
-rw-r--r--spec/requests/api/v3/projects_spec.rb3
-rw-r--r--spec/routing/project_routing_spec.rb73
-rw-r--r--spec/services/boards/issues/list_service_spec.rb2
-rw-r--r--spec/services/delete_merged_branches_service_spec.rb8
-rw-r--r--spec/services/git_hooks_service_spec.rb7
-rw-r--r--spec/services/git_push_service_spec.rb12
-rw-r--r--spec/services/groups/destroy_service_spec.rb52
-rw-r--r--spec/services/notification_recipient_service_spec.rb34
-rw-r--r--spec/services/projects/transfer_service_spec.rb6
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb36
-rw-r--r--spec/support/fake_migration_classes.rb8
-rw-r--r--spec/support/features/issuable_slash_commands_shared_examples.rb7
-rwxr-xr-xspec/support/generate-seed-repo-rb2
-rw-r--r--spec/support/gitlab-git-test.git/HEAD1
-rw-r--r--spec/support/gitlab-git-test.git/README.md16
-rw-r--r--spec/support/gitlab-git-test.git/config7
-rw-r--r--spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.idxbin0 -> 5496 bytes
-rw-r--r--spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.packbin0 -> 381502 bytes
-rw-r--r--spec/support/gitlab-git-test.git/packed-refs18
-rw-r--r--spec/support/gitlab-git-test.git/refs/heads/.gitkeep0
-rw-r--r--spec/support/gitlab-git-test.git/refs/tags/.gitkeep0
-rw-r--r--spec/support/issue_tracker_service_shared_example.rb8
-rw-r--r--spec/support/matchers/access_matchers_for_controller.rb84
-rw-r--r--spec/support/matchers/be_utf8.rb9
-rwxr-xr-xspec/support/prepare-gitlab-git-test-for-commit17
-rw-r--r--spec/support/seed_helper.rb2
-rw-r--r--spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/features/protected_branches_access_control_ce.rb (renamed from spec/support/protected_branches/access_control_ce_shared_examples.rb)2
-rw-r--r--spec/support/stub_env.rb36
-rw-r--r--spec/support/test_env.rb19
-rwxr-xr-xspec/support/unpack-gitlab-git-test38
-rw-r--r--spec/views/projects/merge_requests/_commits.html.haml_spec.rb2
-rw-r--r--spec/views/projects/merge_requests/creations/_new_submit.html.haml_spec.rb (renamed from spec/views/projects/merge_requests/_new_submit.html.haml_spec.rb)2
-rw-r--r--spec/workers/post_receive_spec.rb38
155 files changed, 8190 insertions, 2784 deletions
diff --git a/spec/controllers/abuse_reports_controller_spec.rb b/spec/controllers/abuse_reports_controller_spec.rb
index 80a418feb3e..ada011e7595 100644
--- a/spec/controllers/abuse_reports_controller_spec.rb
+++ b/spec/controllers/abuse_reports_controller_spec.rb
@@ -13,6 +13,31 @@ describe AbuseReportsController do
sign_in(reporter)
end
+ describe 'GET new' do
+ context 'when the user has already been deleted' do
+ it 'redirects the reporter to root_path' do
+ user_id = user.id
+ user.destroy
+
+ get :new, { user_id: user_id }
+
+ expect(response).to redirect_to root_path
+ expect(flash[:alert]).to eq('Cannot create the abuse report. The user has been deleted.')
+ end
+ end
+
+ context 'when the user has already been blocked' do
+ it 'redirects the reporter to the user\'s profile' do
+ user.block
+
+ get :new, { user_id: user.id }
+
+ expect(response).to redirect_to user
+ expect(flash[:alert]).to eq('Cannot create the abuse report. This user has been blocked.')
+ end
+ end
+ end
+
describe 'POST create' do
context 'with valid attributes' do
it 'saves the abuse report' do
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index f3263bc177d..c6e5fb61cf9 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -23,6 +23,21 @@ describe Groups::MilestonesController do
project.team << [user, :master]
end
+ describe "#index" do
+ it 'shows group milestones page' do
+ get :index, group_id: group.to_param
+
+ expect(response).to have_http_status(200)
+ end
+
+ it 'shows group milestones JSON' do
+ get :index, group_id: group.to_param, format: :json
+
+ expect(response).to have_http_status(200)
+ expect(response.content_type).to eq 'application/json'
+ end
+ end
+
it_behaves_like 'milestone tabs'
describe "#create" do
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index c20cf6a4291..561bc219bb4 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -235,7 +235,7 @@ describe Projects::BlobController do
put :update, default_params
expect(response).to redirect_to(
- new_namespace_project_merge_request_path(
+ namespace_project_new_merge_request_path(
forked_project.namespace,
forked_project,
merge_request: {
diff --git a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
new file mode 100644
index 00000000000..9278ac8edd8
--- /dev/null
+++ b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
@@ -0,0 +1,307 @@
+require 'spec_helper'
+
+describe Projects::MergeRequests::ConflictsController do
+ let(:project) { create(:project) }
+ let(:user) { project.owner }
+ let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
+ let(:merge_request_with_conflicts) do
+ create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start', source_project: project) do |mr|
+ mr.mark_as_unmergeable
+ end
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET show' do
+ context 'when the conflicts cannot be resolved in the UI' do
+ before do
+ allow_any_instance_of(Gitlab::Conflict::Parser)
+ .to receive(:parse).and_raise(Gitlab::Conflict::Parser::UnmergeableFile)
+
+ get :show,
+ namespace_id: merge_request_with_conflicts.project.namespace.to_param,
+ project_id: merge_request_with_conflicts.project,
+ id: merge_request_with_conflicts.iid,
+ format: 'json'
+ end
+
+ it 'returns a 200 status code' do
+ expect(response).to have_http_status(:ok)
+ end
+
+ it 'returns JSON with a message' do
+ expect(json_response.keys).to contain_exactly('message', 'type')
+ end
+ end
+
+ context 'with valid conflicts' do
+ before do
+ get :show,
+ namespace_id: merge_request_with_conflicts.project.namespace.to_param,
+ project_id: merge_request_with_conflicts.project,
+ id: merge_request_with_conflicts.iid,
+ format: 'json'
+ end
+
+ it 'matches the schema' do
+ expect(response).to match_response_schema('conflicts')
+ end
+
+ it 'includes meta info about the MR' do
+ expect(json_response['commit_message']).to include('Merge branch')
+ expect(json_response['commit_sha']).to match(/\h{40}/)
+ expect(json_response['source_branch']).to eq(merge_request_with_conflicts.source_branch)
+ expect(json_response['target_branch']).to eq(merge_request_with_conflicts.target_branch)
+ end
+
+ it 'includes each file that has conflicts' do
+ filenames = json_response['files'].map { |file| file['new_path'] }
+
+ expect(filenames).to contain_exactly('files/ruby/popen.rb', 'files/ruby/regex.rb')
+ end
+
+ it 'splits files into sections with lines' do
+ json_response['files'].each do |file|
+ file['sections'].each do |section|
+ expect(section).to include('conflict', 'lines')
+
+ section['lines'].each do |line|
+ if section['conflict']
+ expect(line['type']).to be_in(%w(old new))
+ expect(line.values_at('old_line', 'new_line')).to contain_exactly(nil, a_kind_of(Integer))
+ else
+ if line['type'].nil?
+ expect(line['old_line']).not_to eq(nil)
+ expect(line['new_line']).not_to eq(nil)
+ else
+ expect(line['type']).to eq('match')
+ expect(line['old_line']).to eq(nil)
+ expect(line['new_line']).to eq(nil)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ it 'has unique section IDs across files' do
+ section_ids = json_response['files'].flat_map do |file|
+ file['sections'].map { |section| section['id'] }.compact
+ end
+
+ expect(section_ids.uniq).to eq(section_ids)
+ end
+ end
+ end
+
+ describe 'GET conflict_for_path' do
+ def conflict_for_path(path)
+ get :conflict_for_path,
+ namespace_id: merge_request_with_conflicts.project.namespace.to_param,
+ project_id: merge_request_with_conflicts.project,
+ id: merge_request_with_conflicts.iid,
+ old_path: path,
+ new_path: path,
+ format: 'json'
+ end
+
+ context 'when the conflicts cannot be resolved in the UI' do
+ before do
+ allow_any_instance_of(Gitlab::Conflict::Parser)
+ .to receive(:parse).and_raise(Gitlab::Conflict::Parser::UnmergeableFile)
+
+ conflict_for_path('files/ruby/regex.rb')
+ end
+
+ it 'returns a 404 status code' do
+ expect(response).to have_http_status(:not_found)
+ end
+ end
+
+ context 'when the file does not exist cannot be resolved in the UI' do
+ before do
+ conflict_for_path('files/ruby/regexp.rb')
+ end
+
+ it 'returns a 404 status code' do
+ expect(response).to have_http_status(:not_found)
+ end
+ end
+
+ context 'with an existing file' do
+ let(:path) { 'files/ruby/regex.rb' }
+
+ before do
+ conflict_for_path(path)
+ end
+
+ it 'returns a 200 status code' do
+ expect(response).to have_http_status(:ok)
+ end
+
+ it 'returns the file in JSON format' do
+ content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts)
+ .file_for_path(path, path)
+ .content
+
+ expect(json_response).to include('old_path' => path,
+ 'new_path' => path,
+ 'blob_icon' => 'file-text-o',
+ 'blob_path' => a_string_ending_with(path),
+ 'blob_ace_mode' => 'ruby',
+ 'content' => content)
+ end
+ end
+ end
+
+ context 'POST resolve_conflicts' do
+ let!(:original_head_sha) { merge_request_with_conflicts.diff_head_sha }
+
+ def resolve_conflicts(files)
+ post :resolve_conflicts,
+ namespace_id: merge_request_with_conflicts.project.namespace.to_param,
+ project_id: merge_request_with_conflicts.project,
+ id: merge_request_with_conflicts.iid,
+ format: 'json',
+ files: files,
+ commit_message: 'Commit message'
+ end
+
+ context 'with valid params' do
+ before do
+ resolved_files = [
+ {
+ 'new_path' => 'files/ruby/popen.rb',
+ 'old_path' => 'files/ruby/popen.rb',
+ 'sections' => {
+ '2f6fcd96b88b36ce98c38da085c795a27d92a3dd_14_14' => 'head'
+ }
+ }, {
+ 'new_path' => 'files/ruby/regex.rb',
+ 'old_path' => 'files/ruby/regex.rb',
+ 'sections' => {
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head',
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21' => 'origin',
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_49_49' => 'origin'
+ }
+ }
+ ]
+
+ resolve_conflicts(resolved_files)
+ end
+
+ it 'creates a new commit on the branch' do
+ expect(original_head_sha).not_to eq(merge_request_with_conflicts.source_branch_head.sha)
+ expect(merge_request_with_conflicts.source_branch_head.message).to include('Commit message')
+ end
+
+ it 'returns an OK response' do
+ expect(response).to have_http_status(:ok)
+ end
+ end
+
+ context 'when sections are missing' do
+ before do
+ resolved_files = [
+ {
+ 'new_path' => 'files/ruby/popen.rb',
+ 'old_path' => 'files/ruby/popen.rb',
+ 'sections' => {
+ '2f6fcd96b88b36ce98c38da085c795a27d92a3dd_14_14' => 'head'
+ }
+ }, {
+ 'new_path' => 'files/ruby/regex.rb',
+ 'old_path' => 'files/ruby/regex.rb',
+ 'sections' => {
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head'
+ }
+ }
+ ]
+
+ resolve_conflicts(resolved_files)
+ end
+
+ it 'returns a 400 error' do
+ expect(response).to have_http_status(:bad_request)
+ end
+
+ it 'has a message with the name of the first missing section' do
+ expect(json_response['message']).to include('6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21')
+ end
+
+ it 'does not create a new commit' do
+ expect(original_head_sha).to eq(merge_request_with_conflicts.source_branch_head.sha)
+ end
+ end
+
+ context 'when files are missing' do
+ before do
+ resolved_files = [
+ {
+ 'new_path' => 'files/ruby/regex.rb',
+ 'old_path' => 'files/ruby/regex.rb',
+ 'sections' => {
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head',
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21' => 'origin',
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_49_49' => 'origin'
+ }
+ }
+ ]
+
+ resolve_conflicts(resolved_files)
+ end
+
+ it 'returns a 400 error' do
+ expect(response).to have_http_status(:bad_request)
+ end
+
+ it 'has a message with the name of the missing file' do
+ expect(json_response['message']).to include('files/ruby/popen.rb')
+ end
+
+ it 'does not create a new commit' do
+ expect(original_head_sha).to eq(merge_request_with_conflicts.source_branch_head.sha)
+ end
+ end
+
+ context 'when a file has identical content to the conflict' do
+ before do
+ content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts)
+ .file_for_path('files/ruby/popen.rb', 'files/ruby/popen.rb')
+ .content
+
+ resolved_files = [
+ {
+ 'new_path' => 'files/ruby/popen.rb',
+ 'old_path' => 'files/ruby/popen.rb',
+ 'content' => content
+ }, {
+ 'new_path' => 'files/ruby/regex.rb',
+ 'old_path' => 'files/ruby/regex.rb',
+ 'sections' => {
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head',
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21' => 'origin',
+ '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_49_49' => 'origin'
+ }
+ }
+ ]
+
+ resolve_conflicts(resolved_files)
+ end
+
+ it 'returns a 400 error' do
+ expect(response).to have_http_status(:bad_request)
+ end
+
+ it 'has a message with the path of the problem file' do
+ expect(json_response['message']).to include('files/ruby/popen.rb')
+ end
+
+ it 'does not create a new commit' do
+ expect(original_head_sha).to eq(merge_request_with_conflicts.source_branch_head.sha)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/merge_requests/creations_controller_spec.rb b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
new file mode 100644
index 00000000000..f9d8f0f5fcf
--- /dev/null
+++ b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
@@ -0,0 +1,120 @@
+require 'spec_helper'
+
+describe Projects::MergeRequests::CreationsController do
+ let(:project) { create(:project) }
+ let(:user) { project.owner }
+ let(:fork_project) { create(:forked_project_with_submodules) }
+
+ before do
+ fork_project.team << [user, :master]
+
+ sign_in(user)
+ end
+
+ describe 'GET new' do
+ context 'merge request that removes a submodule' do
+ render_views
+
+ it 'renders new merge request widget template' do
+ get :new,
+ namespace_id: fork_project.namespace.to_param,
+ project_id: fork_project,
+ merge_request: {
+ source_branch: 'remove-submodule',
+ target_branch: 'master'
+ }
+
+ expect(response).to be_success
+ end
+ end
+ end
+
+ describe 'GET pipelines' do
+ before do
+ create(:ci_pipeline, sha: fork_project.commit('remove-submodule').id,
+ ref: 'remove-submodule',
+ project: fork_project)
+ end
+
+ it 'renders JSON including serialized pipelines' do
+ get :pipelines,
+ namespace_id: fork_project.namespace.to_param,
+ project_id: fork_project,
+ merge_request: {
+ source_branch: 'remove-submodule',
+ target_branch: 'master'
+ },
+ format: :json
+
+ expect(response).to be_ok
+ expect(json_response).to have_key 'pipelines'
+ expect(json_response['pipelines']).not_to be_empty
+ end
+ end
+
+ describe 'GET diff_for_path' do
+ def diff_for_path(extra_params = {})
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ format: 'json'
+ }
+
+ get :diff_for_path, params.merge(extra_params)
+ end
+
+ let(:existing_path) { 'files/ruby/feature.rb' }
+
+ context 'when both branches are in the same project' do
+ it 'disables diff notes' do
+ diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_branch: 'feature', target_branch: 'master' })
+
+ expect(assigns(:diff_notes_disabled)).to be_truthy
+ end
+
+ it 'only renders the diffs for the path given' do
+ expect(controller).to receive(:render_diff_for_path).and_wrap_original do |meth, diffs|
+ expect(diffs.diff_files.map(&:new_path)).to contain_exactly(existing_path)
+ meth.call(diffs)
+ end
+
+ diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_branch: 'feature', target_branch: 'master' })
+ end
+ end
+
+ context 'when the source branch is in a different project to the target' do
+ let(:other_project) { create(:project) }
+
+ before do
+ other_project.team << [user, :master]
+ end
+
+ context 'when the path exists in the diff' do
+ it 'disables diff notes' do
+ diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_project: other_project, source_branch: 'feature', target_branch: 'master' })
+
+ expect(assigns(:diff_notes_disabled)).to be_truthy
+ end
+
+ it 'only renders the diffs for the path given' do
+ expect(controller).to receive(:render_diff_for_path).and_wrap_original do |meth, diffs|
+ expect(diffs.diff_files.map(&:new_path)).to contain_exactly(existing_path)
+ meth.call(diffs)
+ end
+
+ diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_project: other_project, source_branch: 'feature', target_branch: 'master' })
+ end
+ end
+
+ context 'when the path does not exist in the diff' do
+ before do
+ diff_for_path(old_path: 'files/ruby/nopen.rb', new_path: 'files/ruby/nopen.rb', merge_request: { source_project: other_project, source_branch: 'feature', target_branch: 'master' })
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
new file mode 100644
index 00000000000..53fe2bdb189
--- /dev/null
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -0,0 +1,160 @@
+require 'spec_helper'
+
+describe Projects::MergeRequests::DiffsController do
+ let(:project) { create(:project) }
+ let(:user) { project.owner }
+ let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET show' do
+ def go(extra_params = {})
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid,
+ format: 'json'
+ }
+
+ get :show, params.merge(extra_params)
+ end
+
+ context 'with default params' do
+ context 'for the same project' do
+ before do
+ go
+ end
+
+ it 'renders the diffs template to a string' do
+ expect(response).to render_template('projects/merge_requests/diffs/_diffs')
+ expect(json_response).to have_key('html')
+ end
+ end
+
+ context 'with forked projects with submodules' do
+ render_views
+
+ let(:project) { create(:project) }
+ let(:fork_project) { create(:forked_project_with_submodules) }
+ let(:merge_request) { create(:merge_request_with_diffs, source_project: fork_project, source_branch: 'add-submodule-version-bump', target_branch: 'master', target_project: project) }
+
+ before do
+ fork_project.build_forked_project_link(forked_to_project_id: fork_project.id, forked_from_project_id: project.id)
+ fork_project.save
+ merge_request.reload
+ go
+ end
+
+ it 'renders' do
+ expect(response).to be_success
+ expect(response.body).to have_content('Subproject commit')
+ end
+ end
+ end
+
+ context 'with ignore_whitespace_change' do
+ before do
+ go(w: 1)
+ end
+
+ it 'renders the diffs template to a string' do
+ expect(response).to render_template('projects/merge_requests/diffs/_diffs')
+ expect(json_response).to have_key('html')
+ end
+ end
+
+ context 'with view' do
+ before do
+ go(view: 'parallel')
+ end
+
+ it 'saves the preferred diff view in a cookie' do
+ expect(response.cookies['diff_view']).to eq('parallel')
+ end
+ end
+ end
+
+ describe 'GET diff_for_path' do
+ def diff_for_path(extra_params = {})
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid,
+ format: 'json'
+ }
+
+ get :diff_for_path, params.merge(extra_params)
+ end
+
+ let(:existing_path) { 'files/ruby/popen.rb' }
+
+ context 'when the merge request exists' do
+ context 'when the user can view the merge request' do
+ context 'when the path exists in the diff' do
+ it 'enables diff notes' do
+ diff_for_path(old_path: existing_path, new_path: existing_path)
+
+ expect(assigns(:diff_notes_disabled)).to be_falsey
+ expect(assigns(:new_diff_note_attrs)).to eq(noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id)
+ end
+
+ it 'only renders the diffs for the path given' do
+ expect(controller).to receive(:render_diff_for_path).and_wrap_original do |meth, diffs|
+ expect(diffs.diff_files.map(&:new_path)).to contain_exactly(existing_path)
+ meth.call(diffs)
+ end
+
+ diff_for_path(old_path: existing_path, new_path: existing_path)
+ end
+ end
+
+ context 'when the path does not exist in the diff' do
+ before do
+ diff_for_path(old_path: 'files/ruby/nopen.rb', new_path: 'files/ruby/nopen.rb')
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ context 'when the user cannot view the merge request' do
+ before do
+ project.team.truncate
+ diff_for_path(old_path: existing_path, new_path: existing_path)
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ diff_for_path(id: merge_request.iid.succ, old_path: existing_path, new_path: existing_path)
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_http_status(404)
+ end
+ end
+
+ context 'when the merge request belongs to a different project' do
+ let(:other_project) { create(:empty_project) }
+
+ before do
+ other_project.team << [user, :master]
+ diff_for_path(old_path: existing_path, new_path: existing_path, project_id: other_project)
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 6817c2652fd..6f9ce60cf75 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -14,53 +14,6 @@ describe Projects::MergeRequestsController do
sign_in(user)
end
- describe 'GET new' do
- context 'merge request that removes a submodule' do
- render_views
-
- let(:fork_project) { create(:forked_project_with_submodules) }
-
- before do
- fork_project.team << [user, :master]
- end
-
- context 'when rendering HTML response' do
- it 'renders new merge request widget template' do
- submit_new_merge_request
-
- expect(response).to be_success
- end
- end
-
- context 'when rendering JSON response' do
- before do
- create(:ci_pipeline, sha: fork_project.commit('remove-submodule').id,
- ref: 'remove-submodule',
- project: fork_project)
- end
-
- it 'renders JSON including serialized pipelines' do
- submit_new_merge_request(format: :json)
-
- expect(response).to be_ok
- expect(json_response).to have_key 'pipelines'
- expect(json_response['pipelines']).not_to be_empty
- end
- end
- end
-
- def submit_new_merge_request(format: :html)
- get :new,
- namespace_id: fork_project.namespace.to_param,
- project_id: fork_project,
- merge_request: {
- source_branch: 'remove-submodule',
- target_branch: 'master'
- },
- format: format
- end
- end
-
describe 'GET commit_change_content' do
it 'renders commit_change_content template' do
get :commit_change_content,
@@ -497,234 +450,6 @@ describe Projects::MergeRequestsController do
end
end
- describe 'GET diffs' do
- def go(extra_params = {})
- params = {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid
- }
-
- get :diffs, params.merge(extra_params)
- end
-
- it_behaves_like "loads labels", :diffs
-
- context 'with default params' do
- context 'as html' do
- before do
- go(format: 'html')
- end
-
- it 'renders the diff template' do
- expect(response).to render_template('diffs')
- end
- end
-
- context 'as json' do
- before do
- go(format: 'json')
- end
-
- it 'renders the diffs template to a string' do
- expect(response).to render_template('projects/merge_requests/show/_diffs')
- expect(json_response).to have_key('html')
- end
- end
-
- context 'with forked projects with submodules' do
- render_views
-
- let(:project) { create(:project) }
- let(:fork_project) { create(:forked_project_with_submodules) }
- let(:merge_request) { create(:merge_request_with_diffs, source_project: fork_project, source_branch: 'add-submodule-version-bump', target_branch: 'master', target_project: project) }
-
- before do
- fork_project.build_forked_project_link(forked_to_project_id: fork_project.id, forked_from_project_id: project.id)
- fork_project.save
- merge_request.reload
- go(format: 'json')
- end
-
- it 'renders' do
- expect(response).to be_success
- expect(response.body).to have_content('Subproject commit')
- end
- end
- end
-
- context 'with ignore_whitespace_change' do
- context 'as html' do
- before do
- go(format: 'html', w: 1)
- end
-
- it 'renders the diff template' do
- expect(response).to render_template('diffs')
- end
- end
-
- context 'as json' do
- before do
- go(format: 'json', w: 1)
- end
-
- it 'renders the diffs template to a string' do
- expect(response).to render_template('projects/merge_requests/show/_diffs')
- expect(json_response).to have_key('html')
- end
- end
- end
-
- context 'with view' do
- before do
- go(view: 'parallel')
- end
-
- it 'saves the preferred diff view in a cookie' do
- expect(response.cookies['diff_view']).to eq('parallel')
- end
- end
- end
-
- describe 'GET diff_for_path' do
- def diff_for_path(extra_params = {})
- params = {
- namespace_id: project.namespace.to_param,
- project_id: project
- }
-
- get :diff_for_path, params.merge(extra_params)
- end
-
- context 'when an ID param is passed' do
- let(:existing_path) { 'files/ruby/popen.rb' }
-
- context 'when the merge request exists' do
- context 'when the user can view the merge request' do
- context 'when the path exists in the diff' do
- it 'enables diff notes' do
- diff_for_path(id: merge_request.iid, old_path: existing_path, new_path: existing_path)
-
- expect(assigns(:diff_notes_disabled)).to be_falsey
- expect(assigns(:new_diff_note_attrs)).to eq(noteable_type: 'MergeRequest',
- noteable_id: merge_request.id)
- end
-
- it 'only renders the diffs for the path given' do
- expect(controller).to receive(:render_diff_for_path).and_wrap_original do |meth, diffs|
- expect(diffs.diff_files.map(&:new_path)).to contain_exactly(existing_path)
- meth.call(diffs)
- end
-
- diff_for_path(id: merge_request.iid, old_path: existing_path, new_path: existing_path)
- end
- end
-
- context 'when the path does not exist in the diff' do
- before do
- diff_for_path(id: merge_request.iid, old_path: 'files/ruby/nopen.rb', new_path: 'files/ruby/nopen.rb')
- end
-
- it 'returns a 404' do
- expect(response).to have_http_status(404)
- end
- end
- end
-
- context 'when the user cannot view the merge request' do
- before do
- project.team.truncate
- diff_for_path(id: merge_request.iid, old_path: existing_path, new_path: existing_path)
- end
-
- it 'returns a 404' do
- expect(response).to have_http_status(404)
- end
- end
- end
-
- context 'when the merge request does not exist' do
- before do
- diff_for_path(id: merge_request.iid.succ, old_path: existing_path, new_path: existing_path)
- end
-
- it 'returns a 404' do
- expect(response).to have_http_status(404)
- end
- end
-
- context 'when the merge request belongs to a different project' do
- let(:other_project) { create(:empty_project) }
-
- before do
- other_project.team << [user, :master]
- diff_for_path(id: merge_request.iid, old_path: existing_path, new_path: existing_path, project_id: other_project)
- end
-
- it 'returns a 404' do
- expect(response).to have_http_status(404)
- end
- end
- end
-
- context 'when source and target params are passed' do
- let(:existing_path) { 'files/ruby/feature.rb' }
-
- context 'when both branches are in the same project' do
- it 'disables diff notes' do
- diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_branch: 'feature', target_branch: 'master' })
-
- expect(assigns(:diff_notes_disabled)).to be_truthy
- end
-
- it 'only renders the diffs for the path given' do
- expect(controller).to receive(:render_diff_for_path).and_wrap_original do |meth, diffs|
- expect(diffs.diff_files.map(&:new_path)).to contain_exactly(existing_path)
- meth.call(diffs)
- end
-
- diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_branch: 'feature', target_branch: 'master' })
- end
- end
-
- context 'when the source branch is in a different project to the target' do
- let(:other_project) { create(:project) }
-
- before do
- other_project.team << [user, :master]
- end
-
- context 'when the path exists in the diff' do
- it 'disables diff notes' do
- diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_project: other_project, source_branch: 'feature', target_branch: 'master' })
-
- expect(assigns(:diff_notes_disabled)).to be_truthy
- end
-
- it 'only renders the diffs for the path given' do
- expect(controller).to receive(:render_diff_for_path).and_wrap_original do |meth, diffs|
- expect(diffs.diff_files.map(&:new_path)).to contain_exactly(existing_path)
- meth.call(diffs)
- end
-
- diff_for_path(old_path: existing_path, new_path: existing_path, merge_request: { source_project: other_project, source_branch: 'feature', target_branch: 'master' })
- end
- end
-
- context 'when the path does not exist in the diff' do
- before do
- diff_for_path(old_path: 'files/ruby/nopen.rb', new_path: 'files/ruby/nopen.rb', merge_request: { source_project: other_project, source_branch: 'feature', target_branch: 'master' })
- end
-
- it 'returns a 404' do
- expect(response).to have_http_status(404)
- end
- end
- end
- end
- end
-
describe 'GET commits' do
def go(format: 'html')
get :commits,
@@ -734,23 +459,11 @@ describe Projects::MergeRequestsController do
format: format
end
- it_behaves_like "loads labels", :commits
+ it 'renders the commits template to a string' do
+ go format: 'json'
- context 'as html' do
- it 'renders the show template' do
- go
-
- expect(response).to render_template('show')
- end
- end
-
- context 'as json' do
- it 'renders the commits template to a string' do
- go format: 'json'
-
- expect(response).to render_template('projects/merge_requests/show/_commits')
- expect(json_response).to have_key('html')
- end
+ expect(response).to render_template('projects/merge_requests/_commits')
+ expect(json_response).to have_key('html')
end
end
@@ -759,106 +472,16 @@ describe Projects::MergeRequestsController do
create(:ci_pipeline, project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
- end
-
- context 'when using HTML format' do
- it_behaves_like "loads labels", :pipelines
- end
- context 'when using JSON format' do
- before do
- get :pipelines,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid,
- format: :json
- end
-
- it 'responds with serialized pipelines' do
- expect(json_response).not_to be_empty
- end
- end
- end
-
- describe 'GET conflicts' do
- context 'when the conflicts cannot be resolved in the UI' do
- before do
- allow_any_instance_of(Gitlab::Conflict::Parser)
- .to receive(:parse).and_raise(Gitlab::Conflict::Parser::UnmergeableFile)
-
- get :conflicts,
- namespace_id: merge_request_with_conflicts.project.namespace.to_param,
- project_id: merge_request_with_conflicts.project,
- id: merge_request_with_conflicts.iid,
- format: 'json'
- end
-
- it 'returns a 200 status code' do
- expect(response).to have_http_status(:ok)
- end
-
- it 'returns JSON with a message' do
- expect(json_response.keys).to contain_exactly('message', 'type')
- end
+ get :pipelines,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid,
+ format: :json
end
- context 'with valid conflicts' do
- before do
- get :conflicts,
- namespace_id: merge_request_with_conflicts.project.namespace.to_param,
- project_id: merge_request_with_conflicts.project,
- id: merge_request_with_conflicts.iid,
- format: 'json'
- end
-
- it 'matches the schema' do
- expect(response).to match_response_schema('conflicts')
- end
-
- it 'includes meta info about the MR' do
- expect(json_response['commit_message']).to include('Merge branch')
- expect(json_response['commit_sha']).to match(/\h{40}/)
- expect(json_response['source_branch']).to eq(merge_request_with_conflicts.source_branch)
- expect(json_response['target_branch']).to eq(merge_request_with_conflicts.target_branch)
- end
-
- it 'includes each file that has conflicts' do
- filenames = json_response['files'].map { |file| file['new_path'] }
-
- expect(filenames).to contain_exactly('files/ruby/popen.rb', 'files/ruby/regex.rb')
- end
-
- it 'splits files into sections with lines' do
- json_response['files'].each do |file|
- file['sections'].each do |section|
- expect(section).to include('conflict', 'lines')
-
- section['lines'].each do |line|
- if section['conflict']
- expect(line['type']).to be_in(%w(old new))
- expect(line.values_at('old_line', 'new_line')).to contain_exactly(nil, a_kind_of(Integer))
- else
- if line['type'].nil?
- expect(line['old_line']).not_to eq(nil)
- expect(line['new_line']).not_to eq(nil)
- else
- expect(line['type']).to eq('match')
- expect(line['old_line']).to eq(nil)
- expect(line['new_line']).to eq(nil)
- end
- end
- end
- end
- end
- end
-
- it 'has unique section IDs across files' do
- section_ids = json_response['files'].flat_map do |file|
- file['sections'].map { |section| section['id'] }.compact
- end
-
- expect(section_ids.uniq).to eq(section_ids)
- end
+ it 'responds with serialized pipelines' do
+ expect(json_response).not_to be_empty
end
end
@@ -913,215 +536,6 @@ describe Projects::MergeRequestsController do
end
end
- describe 'GET conflict_for_path' do
- def conflict_for_path(path)
- get :conflict_for_path,
- namespace_id: merge_request_with_conflicts.project.namespace.to_param,
- project_id: merge_request_with_conflicts.project,
- id: merge_request_with_conflicts.iid,
- old_path: path,
- new_path: path,
- format: 'json'
- end
-
- context 'when the conflicts cannot be resolved in the UI' do
- before do
- allow_any_instance_of(Gitlab::Conflict::Parser)
- .to receive(:parse).and_raise(Gitlab::Conflict::Parser::UnmergeableFile)
-
- conflict_for_path('files/ruby/regex.rb')
- end
-
- it 'returns a 404 status code' do
- expect(response).to have_http_status(:not_found)
- end
- end
-
- context 'when the file does not exist cannot be resolved in the UI' do
- before do
- conflict_for_path('files/ruby/regexp.rb')
- end
-
- it 'returns a 404 status code' do
- expect(response).to have_http_status(:not_found)
- end
- end
-
- context 'with an existing file' do
- let(:path) { 'files/ruby/regex.rb' }
-
- before do
- conflict_for_path(path)
- end
-
- it 'returns a 200 status code' do
- expect(response).to have_http_status(:ok)
- end
-
- it 'returns the file in JSON format' do
- content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts)
- .file_for_path(path, path)
- .content
-
- expect(json_response).to include('old_path' => path,
- 'new_path' => path,
- 'blob_icon' => 'file-text-o',
- 'blob_path' => a_string_ending_with(path),
- 'blob_ace_mode' => 'ruby',
- 'content' => content)
- end
- end
- end
-
- context 'POST resolve_conflicts' do
- let!(:original_head_sha) { merge_request_with_conflicts.diff_head_sha }
-
- def resolve_conflicts(files)
- post :resolve_conflicts,
- namespace_id: merge_request_with_conflicts.project.namespace.to_param,
- project_id: merge_request_with_conflicts.project,
- id: merge_request_with_conflicts.iid,
- format: 'json',
- files: files,
- commit_message: 'Commit message'
- end
-
- context 'with valid params' do
- before do
- resolved_files = [
- {
- 'new_path' => 'files/ruby/popen.rb',
- 'old_path' => 'files/ruby/popen.rb',
- 'sections' => {
- '2f6fcd96b88b36ce98c38da085c795a27d92a3dd_14_14' => 'head'
- }
- }, {
- 'new_path' => 'files/ruby/regex.rb',
- 'old_path' => 'files/ruby/regex.rb',
- 'sections' => {
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head',
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21' => 'origin',
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_49_49' => 'origin'
- }
- }
- ]
-
- resolve_conflicts(resolved_files)
- end
-
- it 'creates a new commit on the branch' do
- expect(original_head_sha).not_to eq(merge_request_with_conflicts.source_branch_head.sha)
- expect(merge_request_with_conflicts.source_branch_head.message).to include('Commit message')
- end
-
- it 'returns an OK response' do
- expect(response).to have_http_status(:ok)
- end
- end
-
- context 'when sections are missing' do
- before do
- resolved_files = [
- {
- 'new_path' => 'files/ruby/popen.rb',
- 'old_path' => 'files/ruby/popen.rb',
- 'sections' => {
- '2f6fcd96b88b36ce98c38da085c795a27d92a3dd_14_14' => 'head'
- }
- }, {
- 'new_path' => 'files/ruby/regex.rb',
- 'old_path' => 'files/ruby/regex.rb',
- 'sections' => {
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head'
- }
- }
- ]
-
- resolve_conflicts(resolved_files)
- end
-
- it 'returns a 400 error' do
- expect(response).to have_http_status(:bad_request)
- end
-
- it 'has a message with the name of the first missing section' do
- expect(json_response['message']).to include('6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21')
- end
-
- it 'does not create a new commit' do
- expect(original_head_sha).to eq(merge_request_with_conflicts.source_branch_head.sha)
- end
- end
-
- context 'when files are missing' do
- before do
- resolved_files = [
- {
- 'new_path' => 'files/ruby/regex.rb',
- 'old_path' => 'files/ruby/regex.rb',
- 'sections' => {
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head',
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21' => 'origin',
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_49_49' => 'origin'
- }
- }
- ]
-
- resolve_conflicts(resolved_files)
- end
-
- it 'returns a 400 error' do
- expect(response).to have_http_status(:bad_request)
- end
-
- it 'has a message with the name of the missing file' do
- expect(json_response['message']).to include('files/ruby/popen.rb')
- end
-
- it 'does not create a new commit' do
- expect(original_head_sha).to eq(merge_request_with_conflicts.source_branch_head.sha)
- end
- end
-
- context 'when a file has identical content to the conflict' do
- before do
- content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts)
- .file_for_path('files/ruby/popen.rb', 'files/ruby/popen.rb')
- .content
-
- resolved_files = [
- {
- 'new_path' => 'files/ruby/popen.rb',
- 'old_path' => 'files/ruby/popen.rb',
- 'content' => content
- }, {
- 'new_path' => 'files/ruby/regex.rb',
- 'old_path' => 'files/ruby/regex.rb',
- 'sections' => {
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_9_9' => 'head',
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21' => 'origin',
- '6eb14e00385d2fb284765eb1cd8d420d33d63fc9_49_49' => 'origin'
- }
- }
- ]
-
- resolve_conflicts(resolved_files)
- end
-
- it 'returns a 400 error' do
- expect(response).to have_http_status(:bad_request)
- end
-
- it 'has a message with the path of the problem file' do
- expect(json_response['message']).to include('files/ruby/popen.rb')
- end
-
- it 'does not create a new commit' do
- expect(original_head_sha).to eq(merge_request_with_conflicts.source_branch_head.sha)
- end
- end
- end
-
describe 'POST assign_related_issues' do
let(:issue1) { create(:issue, project: project) }
let(:issue2) { create(:issue, project: project) }
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
index f8f95dd9bc8..a8c44d5c313 100644
--- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -84,4 +84,57 @@ describe Projects::PipelineSchedulesController do
end
end
end
+
+ describe 'security' do
+ include AccessMatchersForController
+
+ describe 'GET edit' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_allowed_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ it { expect { go }.to be_denied_for(:visitor) }
+
+ def go
+ get :edit, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
+ end
+ end
+
+ describe 'GET take_ownership' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_allowed_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ it { expect { go }.to be_denied_for(:visitor) }
+
+ def go
+ post :take_ownership, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
+ end
+ end
+
+ describe 'PUT update' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_allowed_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ it { expect { go }.to be_denied_for(:visitor) }
+
+ def go
+ put :update, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id,
+ schedule: { description: 'a' }
+ end
+ end
+ end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index aef1c17a239..1bb2db11e7f 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -220,7 +220,7 @@ FactoryGirl.define do
active: true,
properties: {
'project_url' => 'http://redmine/projects/project_name_in_redmine',
- 'issues_url' => "http://redmine/#{project.id}/project_name_in_redmine/:id",
+ 'issues_url' => 'http://redmine/projects/project_name_in_redmine/issues/:id',
'new_issue_url' => 'http://redmine/projects/project_name_in_redmine/issues/new'
}
)
diff --git a/spec/features/abuse_report_spec.rb b/spec/features/abuse_report_spec.rb
index 5e6cd64c5c1..b88e801c3d7 100644
--- a/spec/features/abuse_report_spec.rb
+++ b/spec/features/abuse_report_spec.rb
@@ -12,7 +12,7 @@ feature 'Abuse reports', feature: true do
click_link 'Report abuse'
- fill_in 'abuse_report_message', with: 'This user send spam'
+ fill_in 'abuse_report_message', with: 'This user sends spam'
click_button 'Send report'
expect(page).to have_content 'Thank you for your report'
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 301c243febd..1c9595def21 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -79,6 +79,22 @@ describe 'Issue Boards', feature: true, js: true do
end
end
+ it 'does not show remove button for backlog or closed issues' do
+ create(:issue, project: project)
+ create(:issue, :closed, project: project)
+
+ visit namespace_project_board_path(project.namespace, project, board)
+ wait_for_requests
+
+ click_card(find('.board:nth-child(1)').first('.card'))
+
+ expect(find('.issue-boards-sidebar')).not_to have_button 'Remove from board'
+
+ click_card(find('.board:nth-child(3)').first('.card'))
+
+ expect(find('.issue-boards-sidebar')).not_to have_button 'Remove from board'
+ end
+
context 'assignee' do
it 'updates the issues assignee' do
click_card(card)
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index f29186f368d..e9ef5d7983a 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
-RSpec.describe 'Dashboard Projects', feature: true do
+feature 'Dashboard Projects' do
let(:user) { create(:user) }
- let(:project) { create(:project, name: "awesome stuff") }
+ let(:project) { create(:project, name: 'awesome stuff') }
let(:project2) { create(:project, :public, name: 'Community project') }
before do
@@ -15,6 +15,14 @@ RSpec.describe 'Dashboard Projects', feature: true do
expect(page).to have_content('awesome stuff')
end
+ it 'shows "New project" button' do
+ visit dashboard_projects_path
+
+ page.within '#content-body' do
+ expect(page).to have_link('New project')
+ end
+ end
+
context 'when last_repository_updated_at, last_activity_at and update_at are present' do
it 'shows the last_repository_updated_at attribute as the update date' do
project.update_attributes!(last_repository_updated_at: Time.now, last_activity_at: 1.hour.ago)
@@ -47,8 +55,8 @@ RSpec.describe 'Dashboard Projects', feature: true do
end
end
- describe "with a pipeline", redis: true do
- let!(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha) }
+ describe 'with a pipeline', redis: true do
+ let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha) }
before do
# Since the cache isn't updated when a new pipeline is created
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 24da5db305f..7fa4d198e00 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -317,23 +317,6 @@ feature 'Dashboard Todos' do
end
end
- context 'User has a Todo in a project pending deletion' do
- before do
- deleted_project = create(:project, :public, pending_delete: true)
- create(:todo, :mentioned, user: user, project: deleted_project, target: issue, author: author)
- create(:todo, :mentioned, user: user, project: deleted_project, target: issue, author: author, state: :done)
- sign_in(user)
- visit dashboard_todos_path
- end
-
- it 'shows "All done" message' do
- within('.todos-count') { expect(page).to have_content '0' }
- expect(page).to have_content 'To do 0'
- expect(page).to have_content 'Done 0'
- expect(page).to have_selector('.todos-all-done', count: 1)
- end
- end
-
context 'User has a Build Failed todo' do
let!(:todo) { create(:todo, :build_failed, user: user, project: project, author: author) }
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index ea749528c11..d492a15ea17 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -129,7 +129,7 @@ feature 'Expand and collapse diffs', js: true, feature: true do
before do
large_diff.find('.diff-line-num', match: :prefer_exact).hover
- large_diff.find('.add-diff-note').click
+ large_diff.find('.add-diff-note', match: :prefer_exact).click
large_diff.find('.note-textarea').send_keys comment_text
large_diff.find_button('Comment').click
wait_for_requests
diff --git a/spec/features/issuables/user_sees_sidebar_spec.rb b/spec/features/issuables/user_sees_sidebar_spec.rb
new file mode 100644
index 00000000000..4d7a7dc1806
--- /dev/null
+++ b/spec/features/issuables/user_sees_sidebar_spec.rb
@@ -0,0 +1,30 @@
+require 'rails_helper'
+
+describe 'Issue Sidebar on Mobile' do
+ include MobileHelpers
+
+ let(:project) { create(:project, :public) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:issue) { create(:issue, project: project) }
+ let!(:user) { create(:user)}
+
+ before do
+ sign_in(user)
+ end
+
+ context 'mobile sidebar on merge requests', js: true do
+ before do
+ visit namespace_project_merge_request_path(merge_request.project.namespace, merge_request.project, merge_request)
+ end
+
+ it_behaves_like "issue sidebar stays collapsed on mobile"
+ end
+
+ context 'mobile sidebar on issues', js: true do
+ before do
+ visit namespace_project_issue_path(project.namespace, project, issue)
+ end
+
+ it_behaves_like "issue sidebar stays collapsed on mobile"
+ end
+end
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index 863f8f75cd8..4cb728cc82b 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -459,7 +459,7 @@ describe 'Filter issues', js: true, feature: true do
context 'issue label clicked' do
before do
- find('.issues-list .issue .issue-info a .label', text: multiple_words_label.title).click
+ find('.issues-list .issue .issue-main-info .issuable-info a .label', text: multiple_words_label.title).click
end
it 'filters' do
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index b369ef1ff79..58f6bd277e4 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -31,8 +31,8 @@ describe 'New/edit issue', :feature, :js do
# the original method, resulting in infinite recurison when called.
# This is likely a bug with helper modules included into dynamically generated view classes.
# To work around this, we have to hold on to and call to the original implementation manually.
- original_issue_dropdown_options = FormHelper.instance_method(:issue_dropdown_options)
- allow_any_instance_of(FormHelper).to receive(:issue_dropdown_options).and_wrap_original do |original, *args|
+ original_issue_dropdown_options = FormHelper.instance_method(:issue_assignees_dropdown_options)
+ allow_any_instance_of(FormHelper).to receive(:issue_assignees_dropdown_options).and_wrap_original do |original, *args|
options = original_issue_dropdown_options.bind(original.receiver).call(*args)
options[:data][:per_page] = 2
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 163bc4bb32f..09724781a27 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -154,20 +154,6 @@ feature 'Issue Sidebar', feature: true do
end
end
- context 'as a allowed mobile user', js: true do
- before do
- project.team << [user, :developer]
- resize_screen_xs
- visit_issue(project, issue)
- end
-
- context 'mobile sidebar' do
- it 'collapses the sidebar for small screens' do
- expect(page).not_to have_css('aside.right-sidebar.right-sidebar-collapsed')
- end
- end
- end
-
context 'as a guest' do
before do
project.team << [user, :guest]
diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb
index 21a7637fe7f..d6e78157e5f 100644
--- a/spec/features/issues/move_spec.rb
+++ b/spec/features/issues/move_spec.rb
@@ -41,13 +41,10 @@ feature 'issue move to another project' do
find('#issuable-move', visible: false).set(new_project.id)
click_button('Save changes')
- wait_for_requests
-
- expect(current_url).to include project_path(new_project)
-
expect(page).to have_content("Text with #{cross_reference}#{mr.to_reference}")
expect(page).to have_content("moved from #{cross_reference}#{issue.to_reference}")
expect(page).to have_content(issue.title)
+ expect(page.current_path).to include project_path(new_project)
end
scenario 'searching project dropdown', js: true do
diff --git a/spec/features/issues/update_issues_spec.rb b/spec/features/issues/update_issues_spec.rb
index dc981406e4e..df704b55839 100644
--- a/spec/features/issues/update_issues_spec.rb
+++ b/spec/features/issues/update_issues_spec.rb
@@ -1,16 +1,16 @@
require 'rails_helper'
-feature 'Multiple issue updating from issues#index', feature: true do
+feature 'Multiple issue updating from issues#index', :js do
let!(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
let!(:user) { create(:user)}
before do
project.team << [user, :master]
- gitlab_sign_in(user)
+ sign_in(user)
end
- context 'status', js: true do
+ context 'status' do
it 'sets to closed' do
visit namespace_project_issues_path(project.namespace, project)
@@ -37,7 +37,7 @@ feature 'Multiple issue updating from issues#index', feature: true do
end
end
- context 'assignee', js: true do
+ context 'assignee' do
it 'updates to current user' do
visit namespace_project_issues_path(project.namespace, project)
@@ -67,8 +67,8 @@ feature 'Multiple issue updating from issues#index', feature: true do
end
end
- context 'milestone', js: true do
- let(:milestone) { create(:milestone, project: project) }
+ context 'milestone' do
+ let!(:milestone) { create(:milestone, project: project) }
it 'updates milestone' do
visit namespace_project_issues_path(project.namespace, project)
diff --git a/spec/features/merge_requests/create_new_mr_spec.rb b/spec/features/merge_requests/create_new_mr_spec.rb
index 8f7adbccaaa..6a08e50bf5e 100644
--- a/spec/features/merge_requests/create_new_mr_spec.rb
+++ b/spec/features/merge_requests/create_new_mr_spec.rb
@@ -65,7 +65,7 @@ feature 'Create New Merge Request', feature: true, js: true do
it 'does not leak the private project name & namespace' do
private_project = create(:project, :private)
- visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { target_project_id: private_project.id })
+ visit namespace_project_new_merge_request_path(project.namespace, project, merge_request: { target_project_id: private_project.id })
expect(page).not_to have_content private_project.path_with_namespace
expect(page).to have_content project.path_with_namespace
@@ -76,7 +76,7 @@ feature 'Create New Merge Request', feature: true, js: true do
it 'does not leak the private project name & namespace' do
private_project = create(:project, :private)
- visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { source_project_id: private_project.id })
+ visit namespace_project_new_merge_request_path(project.namespace, project, merge_request: { source_project_id: private_project.id })
expect(page).not_to have_content private_project.path_with_namespace
expect(page).to have_content project.path_with_namespace
@@ -84,13 +84,13 @@ feature 'Create New Merge Request', feature: true, js: true do
end
it 'populates source branch button' do
- visit new_namespace_project_merge_request_path(project.namespace, project, change_branches: true, merge_request: { target_branch: 'master', source_branch: 'fix' })
+ visit namespace_project_new_merge_request_path(project.namespace, project, change_branches: true, merge_request: { target_branch: 'master', source_branch: 'fix' })
expect(find('.js-source-branch')).to have_content('fix')
end
it 'allows to change the diff view' do
- visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { target_branch: 'master', source_branch: 'fix' })
+ visit namespace_project_new_merge_request_path(project.namespace, project, merge_request: { target_branch: 'master', source_branch: 'fix' })
click_link 'Changes'
@@ -106,7 +106,7 @@ feature 'Create New Merge Request', feature: true, js: true do
end
it 'does not allow non-existing branches' do
- visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { target_branch: 'non-exist-target', source_branch: 'non-exist-source' })
+ visit namespace_project_new_merge_request_path(project.namespace, project, merge_request: { target_branch: 'non-exist-target', source_branch: 'non-exist-source' })
expect(page).to have_content('The form contains the following errors')
expect(page).to have_content('Source branch "non-exist-source" does not exist')
@@ -115,7 +115,7 @@ feature 'Create New Merge Request', feature: true, js: true do
context 'when a branch contains commits that both delete and add the same image' do
it 'renders the diff successfully' do
- visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { target_branch: 'master', source_branch: 'deleted-image-test' })
+ visit namespace_project_new_merge_request_path(project.namespace, project, merge_request: { target_branch: 'master', source_branch: 'deleted-image-test' })
click_link "Changes"
@@ -125,7 +125,7 @@ feature 'Create New Merge Request', feature: true, js: true do
# Isolates a regression (see #24627)
it 'does not show error messages on initial form' do
- visit new_namespace_project_merge_request_path(project.namespace, project)
+ visit namespace_project_new_merge_request_path(project.namespace, project)
expect(page).not_to have_selector('#error_explanation')
expect(page).not_to have_content('The form contains the following error')
end
@@ -138,7 +138,7 @@ feature 'Create New Merge Request', feature: true, js: true do
end
it 'shows pipelines for a new merge request' do
- visit new_namespace_project_merge_request_path(
+ visit namespace_project_new_merge_request_path(
project.namespace, project,
merge_request: { target_branch: 'master', source_branch: 'fix' })
diff --git a/spec/features/merge_requests/form_spec.rb b/spec/features/merge_requests/form_spec.rb
index 1996c2fa09a..d03d498ce21 100644
--- a/spec/features/merge_requests/form_spec.rb
+++ b/spec/features/merge_requests/form_spec.rb
@@ -23,7 +23,7 @@ describe 'New/edit merge request', feature: true, js: true do
context 'new merge request' do
before do
- visit new_namespace_project_merge_request_path(
+ visit namespace_project_new_merge_request_path(
project.namespace,
project,
merge_request: {
@@ -182,7 +182,7 @@ describe 'New/edit merge request', feature: true, js: true do
context 'new merge request' do
before do
- visit new_namespace_project_merge_request_path(
+ visit namespace_project_new_merge_request_path(
fork_project.namespace,
fork_project,
merge_request: {
diff --git a/spec/features/merge_requests/user_uses_slash_commands_spec.rb b/spec/features/merge_requests/user_uses_slash_commands_spec.rb
index 71aa71e380e..a1f123f15ec 100644
--- a/spec/features/merge_requests/user_uses_slash_commands_spec.rb
+++ b/spec/features/merge_requests/user_uses_slash_commands_spec.rb
@@ -131,7 +131,7 @@ feature 'Merge Requests > User uses quick actions', feature: true, js: true do
end
it 'changes target_branch in new merge_request' do
- visit new_namespace_project_merge_request_path(another_project.namespace, another_project, new_url_opts)
+ visit namespace_project_new_merge_request_path(another_project.namespace, another_project, new_url_opts)
fill_in "merge_request_title", with: 'My brand new feature'
fill_in "merge_request_description", with: "le feature \n/target_branch fix\nFeature description:"
diff --git a/spec/features/merge_requests/widget_spec.rb b/spec/features/merge_requests/widget_spec.rb
index 3ac1f603de6..d8e9b949204 100644
--- a/spec/features/merge_requests/widget_spec.rb
+++ b/spec/features/merge_requests/widget_spec.rb
@@ -12,7 +12,7 @@ describe 'Merge request', :feature, :js do
context 'new merge request' do
before do
- visit new_namespace_project_merge_request_path(
+ visit namespace_project_new_merge_request_path(
project.namespace,
project,
merge_request: {
diff --git a/spec/features/merge_requests/wip_message_spec.rb b/spec/features/merge_requests/wip_message_spec.rb
index 72d001bf408..0e304ba50af 100644
--- a/spec/features/merge_requests/wip_message_spec.rb
+++ b/spec/features/merge_requests/wip_message_spec.rb
@@ -11,7 +11,7 @@ feature 'Work In Progress help message', feature: true do
context 'with WIP commits' do
it 'shows a specific WIP hint' do
- visit new_namespace_project_merge_request_path(
+ visit namespace_project_new_merge_request_path(
project.namespace,
project,
merge_request: {
@@ -32,7 +32,7 @@ feature 'Work In Progress help message', feature: true do
context 'without WIP commits' do
it 'shows the regular WIP message' do
- visit new_namespace_project_merge_request_path(
+ visit namespace_project_new_merge_request_path(
project.namespace,
project,
merge_request: {
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 8694366de35..0050864d305 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -21,10 +21,48 @@ describe 'Branches', feature: true do
it 'shows all the branches' do
visit namespace_project_branches_path(project.namespace, project)
- repository.branches { |branch| expect(page).to have_content("#{branch.name}") }
+ repository.branches_sorted_by(:name).first(20).each do |branch|
+ expect(page).to have_content("#{branch.name}")
+ end
expect(page).to have_content("Protected branches can be managed in project settings")
end
+ it 'sorts the branches by name' do
+ visit namespace_project_branches_path(project.namespace, project)
+
+ click_button "Name" # Open sorting dropdown
+ click_link "Name"
+
+ sorted = repository.branches_sorted_by(:name).first(20).map do |branch|
+ Regexp.escape(branch.name)
+ end
+ expect(page).to have_content(/#{sorted.join(".*")}/)
+ end
+
+ it 'sorts the branches by last updated' do
+ visit namespace_project_branches_path(project.namespace, project)
+
+ click_button "Name" # Open sorting dropdown
+ click_link "Last updated"
+
+ sorted = repository.branches_sorted_by(:updated_desc).first(20).map do |branch|
+ Regexp.escape(branch.name)
+ end
+ expect(page).to have_content(/#{sorted.join(".*")}/)
+ end
+
+ it 'sorts the branches by oldest updated' do
+ visit namespace_project_branches_path(project.namespace, project)
+
+ click_button "Name" # Open sorting dropdown
+ click_link "Oldest updated"
+
+ sorted = repository.branches_sorted_by(:updated_asc).first(20).map do |branch|
+ Regexp.escape(branch.name)
+ end
+ expect(page).to have_content(/#{sorted.join(".*")}/)
+ end
+
it 'avoids a N+1 query in branches index' do
control_count = ActiveRecord::QueryRecorder.new { visit namespace_project_branches_path(project.namespace, project) }.count
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
index b48dcf6c774..a98a69a0fd6 100644
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ b/spec/features/projects/environments/environment_metrics_spec.rb
@@ -27,7 +27,7 @@ feature 'Environment > Metrics', :feature do
scenario 'shows metrics' do
click_link('See metrics')
- expect(page).to have_css('svg.prometheus-graph')
+ expect(page).to have_css('div#prometheus-graphs')
end
end
diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb
index a111aa87c52..3f8d2255298 100644
--- a/spec/features/projects/import_export/import_file_spec.rb
+++ b/spec/features/projects/import_export/import_file_spec.rb
@@ -98,6 +98,6 @@ feature 'Import/Export - project import integration test', feature: true, js: tr
end
def project_hook_exists?(project)
- Gitlab::Git::Hook.new('post-receive', project.repository.path).exists?
+ Gitlab::Git::Hook.new('post-receive', project).exists?
end
end
diff --git a/spec/features/projects/merge_request_button_spec.rb b/spec/features/projects/merge_request_button_spec.rb
index 6de8855016d..58054bbbbed 100644
--- a/spec/features/projects/merge_request_button_spec.rb
+++ b/spec/features/projects/merge_request_button_spec.rb
@@ -23,7 +23,7 @@ feature 'Merge Request button', feature: true do
end
it 'shows Create merge request button' do
- href = new_namespace_project_merge_request_path(project.namespace,
+ href = namespace_project_new_merge_request_path(project.namespace,
project,
merge_request: { source_branch: 'feature',
target_branch: 'master' })
@@ -67,7 +67,7 @@ feature 'Merge Request button', feature: true do
let(:user) { forked_project.owner }
it 'shows Create merge request button' do
- href = new_namespace_project_merge_request_path(forked_project.namespace,
+ href = namespace_project_new_merge_request_path(forked_project.namespace,
forked_project,
merge_request: { source_branch: 'feature',
target_branch: 'master' })
diff --git a/spec/features/projects/merge_requests/list_spec.rb b/spec/features/projects/merge_requests/list_spec.rb
index f2a2fd0311f..7ce3156215a 100644
--- a/spec/features/projects/merge_requests/list_spec.rb
+++ b/spec/features/projects/merge_requests/list_spec.rb
@@ -27,7 +27,7 @@ feature 'Merge Requests List' do
it 'empty state should have a create merge request button' do
visit namespace_project_merge_requests_path(project.namespace, project)
- expect(page).to have_link 'New merge request', href: new_namespace_project_merge_request_path(project.namespace, project)
+ expect(page).to have_link 'New merge request', href: namespace_project_new_merge_request_path(project.namespace, project)
end
context 'if there are merge requests' do
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 37d9a97033b..22fb1223739 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -1,13 +1,27 @@
-require "spec_helper"
+require 'spec_helper'
-feature "New project", feature: true do
+feature 'New project' do
let(:user) { create(:admin) }
before do
- gitlab_sign_in(user)
+ sign_in(user)
end
- context "Visibility level selector" do
+ it 'shows "New project" page' do
+ visit new_project_path
+
+ expect(page).to have_content('Project path')
+ expect(page).to have_content('Project name')
+
+ expect(page).to have_link('GitHub')
+ expect(page).to have_link('Bitbucket')
+ expect(page).to have_link('GitLab.com')
+ expect(page).to have_link('Google Code')
+ expect(page).to have_button('Repo by URL')
+ expect(page).to have_link('GitLab export')
+ end
+
+ context 'Visibility level selector' do
Gitlab::VisibilityLevel.options.each do |key, level|
it "sets selector to #{key}" do
stub_application_setting(default_project_visibility: level)
@@ -28,20 +42,20 @@ feature "New project", feature: true do
end
end
- context "Namespace selector" do
- context "with user namespace" do
+ context 'Namespace selector' do
+ context 'with user namespace' do
before do
visit new_project_path
end
- it "selects the user namespace" do
- namespace = find("#project_namespace_id")
+ it 'selects the user namespace' do
+ namespace = find('#project_namespace_id')
expect(namespace.text).to eq user.username
end
end
- context "with group namespace" do
+ context 'with group namespace' do
let(:group) { create(:group, :private, owner: user) }
before do
@@ -49,13 +63,13 @@ feature "New project", feature: true do
visit new_project_path(namespace_id: group.id)
end
- it "selects the group namespace" do
- namespace = find("#project_namespace_id option[selected]")
+ it 'selects the group namespace' do
+ namespace = find('#project_namespace_id option[selected]')
expect(namespace.text).to eq group.name
end
- context "on validation error" do
+ context 'on validation error' do
before do
fill_in('project_path', with: 'private-group-project')
choose('Internal')
@@ -64,15 +78,15 @@ feature "New project", feature: true do
expect(page).to have_css '.project-edit-errors .alert.alert-danger'
end
- it "selects the group namespace" do
- namespace = find("#project_namespace_id option[selected]")
+ it 'selects the group namespace' do
+ namespace = find('#project_namespace_id option[selected]')
expect(namespace.text).to eq group.name
end
end
end
- context "with subgroup namespace" do
+ context 'with subgroup namespace' do
let(:group) { create(:group, :private, owner: user) }
let(:subgroup) { create(:group, parent: group) }
@@ -81,8 +95,8 @@ feature "New project", feature: true do
visit new_project_path(namespace_id: subgroup.id)
end
- it "selects the group namespace" do
- namespace = find("#project_namespace_id option[selected]")
+ it 'selects the group namespace' do
+ namespace = find('#project_namespace_id option[selected]')
expect(namespace.text).to eq subgroup.full_path
end
@@ -94,10 +108,45 @@ feature "New project", feature: true do
visit new_project_path
end
- it 'does not autocomplete sensitive git repo URL' do
- autocomplete = find('#project_import_url')['autocomplete']
+ context 'from git repository url' do
+ before do
+ first('.import_git').click
+ end
+
+ it 'does not autocomplete sensitive git repo URL' do
+ autocomplete = find('#project_import_url')['autocomplete']
+
+ expect(autocomplete).to eq('off')
+ end
+
+ it 'shows import instructions' do
+ git_import_instructions = first('.js-toggle-content')
- expect(autocomplete).to eq('off')
+ expect(git_import_instructions).to be_visible
+ expect(git_import_instructions).to have_content 'Git repository URL'
+ end
+ end
+
+ context 'from GitHub' do
+ before do
+ first('.import_github').click
+ end
+
+ it 'shows import instructions' do
+ expect(page).to have_content('Import Projects from GitHub')
+ expect(current_path).to eq new_import_github_path
+ end
+ end
+
+ context 'from Google Code' do
+ before do
+ first('.import_google_code').click
+ end
+
+ it 'shows import instructions' do
+ expect(page).to have_content('Import projects from Google Code')
+ expect(current_path).to eq new_import_google_code_path
+ end
end
end
end
diff --git a/spec/features/projects/user_create_dir_spec.rb b/spec/features/projects/user_create_dir_spec.rb
index f375e1215db..5d0acad3832 100644
--- a/spec/features/projects/user_create_dir_spec.rb
+++ b/spec/features/projects/user_create_dir_spec.rb
@@ -51,7 +51,7 @@ feature 'New directory creation', feature: true, js: true do
expect(page).to have_content 'New Merge Request'
expect(page).to have_content "From #{new_branch_name} into master"
expect(page).to have_content 'Add new directory'
- expect(current_path).to eq(new_namespace_project_merge_request_path(project.namespace, project))
+ expect(current_path).to eq(namespace_project_new_merge_request_path(project.namespace, project))
end
end
end
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index f33406a40a7..5e26b8bbed6 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -239,7 +239,7 @@ describe "Internal Project Access", feature: true do
end
describe "GET /:project_path/merge_requests/new" do
- subject { new_namespace_project_merge_request_path(project.namespace, project) }
+ subject { namespace_project_new_merge_request_path(project.namespace, project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index 16a1331b2f3..59655b0c31a 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -452,7 +452,7 @@ describe "Public Project Access", feature: true do
end
describe "GET /:project_path/merge_requests/new" do
- subject { new_namespace_project_merge_request_path(project.namespace, project) }
+ subject { namespace_project_new_merge_request_path(project.namespace, project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
diff --git a/spec/features/user_can_display_performance_bar_spec.rb b/spec/features/user_can_display_performance_bar_spec.rb
index 1bd7e038939..24fff1a3052 100644
--- a/spec/features/user_can_display_performance_bar_spec.rb
+++ b/spec/features/user_can_display_performance_bar_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'User can display performacne bar', :js do
+describe 'User can display performance bar', :js do
shared_examples 'performance bar is disabled' do
it 'does not show the performance bar by default' do
expect(page).not_to have_css('#peek')
@@ -27,8 +27,8 @@ describe 'User can display performacne bar', :js do
find('body').native.send_keys('pb')
end
- it 'does not show the performance bar by default' do
- expect(page).not_to have_css('#peek')
+ it 'shows the performance bar' do
+ expect(page).to have_css('#peek')
end
end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 8ace1fb5751..4a52f0d5c58 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -295,22 +295,121 @@ describe IssuesFinder do
end
end
- describe '.not_restricted_by_confidentiality' do
- let(:authorized_user) { create(:user) }
- let(:project) { create(:empty_project, namespace: authorized_user.namespace) }
- let!(:public_issue) { create(:issue, project: project) }
- let!(:confidential_issue) { create(:issue, project: project, confidential: true) }
-
- it 'returns non confidential issues for nil user' do
- expect(described_class.send(:not_restricted_by_confidentiality, nil)).to include(public_issue)
- end
+ describe '#with_confidentiality_access_check' do
+ let(:guest) { create(:user) }
+ set(:authorized_user) { create(:user) }
+ set(:project) { create(:empty_project, namespace: authorized_user.namespace) }
+ set(:public_issue) { create(:issue, project: project) }
+ set(:confidential_issue) { create(:issue, project: project, confidential: true) }
+
+ context 'when no project filter is given' do
+ let(:params) { {} }
+
+ context 'for an anonymous user' do
+ subject { described_class.new(nil, params).with_confidentiality_access_check }
+
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
+ end
+
+ context 'for a user without project membership' do
+ subject { described_class.new(user, params).with_confidentiality_access_check }
+
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
+ end
+
+ context 'for a guest user' do
+ subject { described_class.new(guest, params).with_confidentiality_access_check }
+
+ before do
+ project.add_guest(guest)
+ end
+
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
+ end
+
+ context 'for a project member with access to view confidential issues' do
+ subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
- it 'returns non confidential issues for user not authorized for the issues projects' do
- expect(described_class.send(:not_restricted_by_confidentiality, user)).to include(public_issue)
+ it 'returns all issues' do
+ expect(subject).to include(public_issue, confidential_issue)
+ end
+ end
end
- it 'returns all issues for user authorized for the issues projects' do
- expect(described_class.send(:not_restricted_by_confidentiality, authorized_user)).to include(public_issue, confidential_issue)
+ context 'when searching within a specific project' do
+ let(:params) { { project_id: project.id } }
+
+ context 'for an anonymous user' do
+ subject { described_class.new(nil, params).with_confidentiality_access_check }
+
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
+
+ it 'does not filter by confidentiality' do
+ expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
+ end
+
+ context 'for a user without project membership' do
+ subject { described_class.new(user, params).with_confidentiality_access_check }
+
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
+
+ it 'filters by confidentiality' do
+ expect(Issue).to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
+ end
+
+ context 'for a guest user' do
+ subject { described_class.new(guest, params).with_confidentiality_access_check }
+
+ before do
+ project.add_guest(guest)
+ end
+
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
+
+ it 'filters by confidentiality' do
+ expect(Issue).to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
+ end
+
+ context 'for a project member with access to view confidential issues' do
+ subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
+
+ it 'returns all issues' do
+ expect(subject).to include(public_issue, confidential_issue)
+ end
+
+ it 'does not filter by confidentiality' do
+ expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/finders/labels_finder_spec.rb b/spec/finders/labels_finder_spec.rb
index 1724cdba830..95d96354b77 100644
--- a/spec/finders/labels_finder_spec.rb
+++ b/spec/finders/labels_finder_spec.rb
@@ -49,12 +49,12 @@ describe LabelsFinder do
end
context 'filtering by group_id' do
- it 'returns labels available for any project within the group' do
+ it 'returns labels available for any non-archived project within the group' do
group_1.add_developer(user)
-
+ project_1.archive!
finder = described_class.new(user, group_id: group_1.id)
- expect(finder.execute).to eq [group_label_2, project_label_1, group_label_1, project_label_5]
+ expect(finder.execute).to eq [group_label_2, group_label_1, project_label_5]
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 8da22dc78fa..e3f9d9db9eb 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -91,7 +91,7 @@ describe GroupsHelper do
let!(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
it 'outputs the groups in the correct order' do
- expect(group_title(very_deep_nested_group)).to match(/>#{group.name}<\/a>.*>#{nested_group.name}<\/a>.*>#{deep_nested_group.name}<\/a>/)
+ expect(helper.group_title(very_deep_nested_group)).to match(/>#{group.name}<\/a>.*>#{nested_group.name}<\/a>.*>#{deep_nested_group.name}<\/a>/)
end
end
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 15cb620199d..d2e918ef014 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -77,54 +77,89 @@ describe IssuablesHelper do
}.with_indifferent_access
end
+ let(:issues_finder) { IssuesFinder.new(nil, params) }
+ let(:merge_requests_finder) { MergeRequestsFinder.new(nil, params) }
+
+ before do
+ allow(helper).to receive(:issues_finder).and_return(issues_finder)
+ allow(helper).to receive(:merge_requests_finder).and_return(merge_requests_finder)
+ end
+
it 'returns the cached value when called for the same issuable type & with the same params' do
- expect(helper).to receive(:params).twice.and_return(params)
- expect(helper).to receive(:issuables_count_for_state).with(:issues, :opened).and_return(42)
+ expect(issues_finder).to receive(:count_by_state).and_return(opened: 42)
expect(helper.issuables_state_counter_text(:issues, :opened))
.to eq('<span>Open</span> <span class="badge">42</span>')
- expect(helper).not_to receive(:issuables_count_for_state)
+ expect(issues_finder).not_to receive(:count_by_state)
expect(helper.issuables_state_counter_text(:issues, :opened))
.to eq('<span>Open</span> <span class="badge">42</span>')
end
+ it 'takes confidential status into account when searching for issues' do
+ expect(issues_finder).to receive(:count_by_state).and_return(opened: 42)
+
+ expect(helper.issuables_state_counter_text(:issues, :opened))
+ .to include('42')
+
+ expect(issues_finder).to receive(:user_cannot_see_confidential_issues?).twice.and_return(false)
+ expect(issues_finder).to receive(:count_by_state).and_return(opened: 40)
+
+ expect(helper.issuables_state_counter_text(:issues, :opened))
+ .to include('40')
+
+ expect(issues_finder).to receive(:user_can_see_all_confidential_issues?).and_return(true)
+ expect(issues_finder).to receive(:count_by_state).and_return(opened: 45)
+
+ expect(helper.issuables_state_counter_text(:issues, :opened))
+ .to include('45')
+ end
+
+ it 'does not take confidential status into account when searching for merge requests' do
+ expect(merge_requests_finder).to receive(:count_by_state).and_return(opened: 42)
+ expect(merge_requests_finder).not_to receive(:user_cannot_see_confidential_issues?)
+ expect(merge_requests_finder).not_to receive(:user_can_see_all_confidential_issues?)
+
+ expect(helper.issuables_state_counter_text(:merge_requests, :opened))
+ .to include('42')
+ end
+
it 'does not take some keys into account in the cache key' do
- expect(helper).to receive(:params).and_return({
+ expect(issues_finder).to receive(:count_by_state).and_return(opened: 42)
+ expect(issues_finder).to receive(:params).and_return({
author_id: '11',
state: 'foo',
sort: 'foo',
utf8: 'foo',
page: 'foo'
}.with_indifferent_access)
- expect(helper).to receive(:issuables_count_for_state).with(:issues, :opened).and_return(42)
expect(helper.issuables_state_counter_text(:issues, :opened))
.to eq('<span>Open</span> <span class="badge">42</span>')
- expect(helper).to receive(:params).and_return({
+ expect(issues_finder).not_to receive(:count_by_state)
+ expect(issues_finder).to receive(:params).and_return({
author_id: '11',
state: 'bar',
sort: 'bar',
utf8: 'bar',
page: 'bar'
}.with_indifferent_access)
- expect(helper).not_to receive(:issuables_count_for_state)
expect(helper.issuables_state_counter_text(:issues, :opened))
.to eq('<span>Open</span> <span class="badge">42</span>')
end
it 'does not take params order into account in the cache key' do
- expect(helper).to receive(:params).and_return('author_id' => '11', 'state' => 'opened')
- expect(helper).to receive(:issuables_count_for_state).with(:issues, :opened).and_return(42)
+ expect(issues_finder).to receive(:params).and_return('author_id' => '11', 'state' => 'opened')
+ expect(issues_finder).to receive(:count_by_state).and_return(opened: 42)
expect(helper.issuables_state_counter_text(:issues, :opened))
.to eq('<span>Open</span> <span class="badge">42</span>')
- expect(helper).to receive(:params).and_return('state' => 'opened', 'author_id' => '11')
- expect(helper).not_to receive(:issuables_count_for_state)
+ expect(issues_finder).to receive(:params).and_return('state' => 'opened', 'author_id' => '11')
+ expect(issues_finder).not_to receive(:count_by_state)
expect(helper.issuables_state_counter_text(:issues, :opened))
.to eq('<span>Open</span> <span class="badge">42</span>')
diff --git a/spec/helpers/milestones_helper_spec.rb b/spec/helpers/milestones_helper_spec.rb
index 3cb809d42b5..24d4f1b4938 100644
--- a/spec/helpers/milestones_helper_spec.rb
+++ b/spec/helpers/milestones_helper_spec.rb
@@ -1,6 +1,42 @@
require 'spec_helper'
describe MilestonesHelper do
+ describe '#milestones_filter_dropdown_path' do
+ let(:project) { create(:empty_project) }
+ let(:project2) { create(:empty_project) }
+ let(:group) { create(:group) }
+
+ context 'when @project present' do
+ it 'returns project milestones JSON URL' do
+ assign(:project, project)
+
+ expect(helper.milestones_filter_dropdown_path).to eq(namespace_project_milestones_path(project.namespace, project, :json))
+ end
+ end
+
+ context 'when @target_project present' do
+ it 'returns targeted project milestones JSON URL' do
+ assign(:target_project, project2)
+
+ expect(helper.milestones_filter_dropdown_path).to eq(namespace_project_milestones_path(project2.namespace, project2, :json))
+ end
+ end
+
+ context 'when @group present' do
+ it 'returns group milestones JSON URL' do
+ assign(:group, group)
+
+ expect(helper.milestones_filter_dropdown_path).to eq(group_milestones_path(group, :json))
+ end
+ end
+
+ context 'when neither of @project/@target_project/@group present' do
+ it 'returns dashboard milestones JSON URL' do
+ expect(helper.milestones_filter_dropdown_path).to eq(dashboard_milestones_path(:json))
+ end
+ end
+ end
+
describe "#milestone_date_range" do
def result_for(*args)
milestone_date_range(build(:milestone, *args))
diff --git a/spec/helpers/submodule_helper_spec.rb b/spec/helpers/submodule_helper_spec.rb
index cb727430117..9e561d0f191 100644
--- a/spec/helpers/submodule_helper_spec.rb
+++ b/spec/helpers/submodule_helper_spec.rb
@@ -170,6 +170,11 @@ describe SubmoduleHelper do
expect(result).to eq(["/#{group.path}/test", "/#{group.path}/test/tree/#{commit_id}"])
end
+ it 'with trailing whitespace' do
+ result = relative_self_links('../test.git ', commit_id)
+ expect(result).to eq(["/#{group.path}/test", "/#{group.path}/test/tree/#{commit_id}"])
+ end
+
it 'two levels down' do
result = relative_self_links('../../test.git', commit_id)
expect(result).to eq(["/#{group.path}/test", "/#{group.path}/test/tree/#{commit_id}"])
diff --git a/spec/javascripts/awards_handler_spec.js b/spec/javascripts/awards_handler_spec.js
index 3fc03324d16..8e056882108 100644
--- a/spec/javascripts/awards_handler_spec.js
+++ b/spec/javascripts/awards_handler_spec.js
@@ -1,7 +1,7 @@
/* eslint-disable space-before-function-paren, no-var, one-var, one-var-declaration-per-line, no-unused-expressions, comma-dangle, new-parens, no-unused-vars, quotes, jasmine/no-spec-dupes, prefer-template, max-len */
import Cookies from 'js-cookie';
-import AwardsHandler from '~/awards_handler';
+import loadAwardsHandler from '~/awards_handler';
import '~/lib/utils/common_utils';
@@ -26,14 +26,13 @@ import '~/lib/utils/common_utils';
describe('AwardsHandler', function() {
preloadFixtures('issues/issue_with_comment.html.raw');
- beforeEach(function() {
+ beforeEach(function(done) {
loadFixtures('issues/issue_with_comment.html.raw');
- awardsHandler = new AwardsHandler;
- spyOn(awardsHandler, 'postEmoji').and.callFake((function(_this) {
- return function(button, url, emoji, cb) {
- return cb();
- };
- })(this));
+ loadAwardsHandler(true).then((obj) => {
+ awardsHandler = obj;
+ spyOn(awardsHandler, 'postEmoji').and.callFake((button, url, emoji, cb) => cb());
+ done();
+ }).catch(fail);
let isEmojiMenuBuilt = false;
openAndWaitForEmojiMenu = function() {
diff --git a/spec/javascripts/filtered_search/dropdown_user_spec.js b/spec/javascripts/filtered_search/dropdown_user_spec.js
index f7708301b6e..0132f4b7c93 100644
--- a/spec/javascripts/filtered_search/dropdown_user_spec.js
+++ b/spec/javascripts/filtered_search/dropdown_user_spec.js
@@ -66,4 +66,38 @@ describe('Dropdown User', () => {
window.gon = {};
});
});
+
+ describe('hideCurrentUser', () => {
+ const fixtureTemplate = 'issues/issue_list.html.raw';
+ preloadFixtures(fixtureTemplate);
+
+ let dropdown;
+ let authorFilterDropdownElement;
+
+ beforeEach(() => {
+ loadFixtures(fixtureTemplate);
+ authorFilterDropdownElement = document.querySelector('#js-dropdown-author');
+ const dummyInput = document.createElement('div');
+ dropdown = new gl.DropdownUser(null, authorFilterDropdownElement, dummyInput);
+ });
+
+ const findCurrentUserElement = () => authorFilterDropdownElement.querySelector('.js-current-user');
+
+ it('hides the current user from dropdown', () => {
+ const currentUserElement = findCurrentUserElement();
+ expect(currentUserElement).not.toBe(null);
+
+ dropdown.hideCurrentUser();
+
+ expect(currentUserElement.classList).toContain('hidden');
+ });
+
+ it('does nothing if no user is logged in', () => {
+ const currentUserElement = findCurrentUserElement();
+ currentUserElement.parentNode.removeChild(currentUserElement);
+ expect(findCurrentUserElement()).toBe(null);
+
+ dropdown.hideCurrentUser();
+ });
+ });
});
diff --git a/spec/javascripts/fixtures/merge_requests.rb b/spec/javascripts/fixtures/merge_requests.rb
index daaddd8f390..7e2f364ffa4 100644
--- a/spec/javascripts/fixtures/merge_requests.rb
+++ b/spec/javascripts/fixtures/merge_requests.rb
@@ -55,27 +55,14 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
render_merge_request(example.description, merge_request)
end
- it 'merge_requests/inline_changes_tab_with_comments.json' do |example|
- create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request)
- create(:note_on_merge_request, author: admin, project: project, noteable: merge_request)
- render_merge_request(example.description, merge_request, action: :diffs, format: :json)
- end
-
- it 'merge_requests/parallel_changes_tab_with_comments.json' do |example|
- create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request)
- create(:note_on_merge_request, author: admin, project: project, noteable: merge_request)
- render_merge_request(example.description, merge_request, action: :diffs, format: :json, view: 'parallel')
- end
-
private
- def render_merge_request(fixture_file_name, merge_request, action: :show, format: :html, view: 'inline')
- get action,
+ def render_merge_request(fixture_file_name, merge_request)
+ get :show,
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.to_param,
- format: format,
- view: view
+ format: :html
expect(response).to be_success
store_frontend_fixture(response, fixture_file_name)
diff --git a/spec/javascripts/fixtures/merge_requests_diffs.rb b/spec/javascripts/fixtures/merge_requests_diffs.rb
new file mode 100644
index 00000000000..ac5b06ace6d
--- /dev/null
+++ b/spec/javascripts/fixtures/merge_requests_diffs.rb
@@ -0,0 +1,57 @@
+
+require 'spec_helper'
+
+describe Projects::MergeRequests::DiffsController, '(JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ let(:admin) { create(:admin) }
+ let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
+ let(:project) { create(:project, namespace: namespace, path: 'merge-requests-project') }
+ let(:merge_request) { create(:merge_request, :with_diffs, source_project: project, target_project: project, description: '- [ ] Task List Item') }
+ let(:path) { "files/ruby/popen.rb" }
+ let(:position) do
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ old_line: nil,
+ new_line: 14,
+ diff_refs: merge_request.diff_refs
+ )
+ end
+
+ render_views
+
+ before(:all) do
+ clean_frontend_fixtures('merge_request_diffs/')
+ end
+
+ before(:each) do
+ sign_in(admin)
+ end
+
+ it 'merge_request_diffs/inline_changes_tab_with_comments.json' do |example|
+ create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request)
+ create(:note_on_merge_request, author: admin, project: project, noteable: merge_request)
+ render_merge_request(example.description, merge_request)
+ end
+
+ it 'merge_request_diffs/parallel_changes_tab_with_comments.json' do |example|
+ create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request)
+ create(:note_on_merge_request, author: admin, project: project, noteable: merge_request)
+ render_merge_request(example.description, merge_request, view: 'parallel')
+ end
+
+ private
+
+ def render_merge_request(fixture_file_name, merge_request, view: 'inline')
+ get :show,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.to_param,
+ format: :json,
+ view: view
+
+ expect(response).to be_success
+ store_frontend_fixture(response, fixture_file_name)
+ end
+end
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index 9df92318864..bc13373a27e 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -42,9 +42,6 @@ describe('Issuable output', () => {
}).$mount();
});
- afterEach(() => {
- });
-
it('should render a title/description/edited and update title/description/edited on update', (done) => {
vm.poll.options.successCallback({
json() {
diff --git a/spec/javascripts/lib/utils/dom_utils_spec.js b/spec/javascripts/lib/utils/dom_utils_spec.js
new file mode 100644
index 00000000000..867bf5912d1
--- /dev/null
+++ b/spec/javascripts/lib/utils/dom_utils_spec.js
@@ -0,0 +1,35 @@
+import { addClassIfElementExists } from '~/lib/utils/dom_utils';
+
+describe('DOM Utils', () => {
+ describe('addClassIfElementExists', () => {
+ const className = 'biology';
+ const fixture = `
+ <div class="parent">
+ <div class="child"></div>
+ </div>
+ `;
+
+ let parentElement;
+
+ beforeEach(() => {
+ setFixtures(fixture);
+ parentElement = document.querySelector('.parent');
+ });
+
+ it('adds class if element exists', () => {
+ const childElement = parentElement.querySelector('.child');
+ expect(childElement).not.toBe(null);
+
+ addClassIfElementExists(childElement, className);
+
+ expect(childElement.classList).toContain(className);
+ });
+
+ it('does not throw if element does not exist', () => {
+ const childElement = parentElement.querySelector('.other-child');
+ expect(childElement).toBe(null);
+
+ addClassIfElementExists(childElement, className);
+ });
+ });
+});
diff --git a/spec/javascripts/merge_request_notes_spec.js b/spec/javascripts/merge_request_notes_spec.js
index 9e9eb17d439..395dc560671 100644
--- a/spec/javascripts/merge_request_notes_spec.js
+++ b/spec/javascripts/merge_request_notes_spec.js
@@ -15,7 +15,7 @@ describe('Merge request notes', () => {
gl.utils = gl.utils || {};
const discussionTabFixture = 'merge_requests/diff_comment.html.raw';
- const changesTabJsonFixture = 'merge_requests/inline_changes_tab_with_comments.json';
+ const changesTabJsonFixture = 'merge_request_diffs/inline_changes_tab_with_comments.json';
preloadFixtures(discussionTabFixture, changesTabJsonFixture);
describe('Discussion tab with diff comments', () => {
diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js
index bb6b5d852d3..49ef21f75de 100644
--- a/spec/javascripts/merge_request_tabs_spec.js
+++ b/spec/javascripts/merge_request_tabs_spec.js
@@ -23,8 +23,8 @@ import 'vendor/jquery.scrollTo';
$.extend(stubLocation, defaults, stubs || {});
};
- const inlineChangesTabJsonFixture = 'merge_requests/inline_changes_tab_with_comments.json';
- const parallelChangesTabJsonFixture = 'merge_requests/parallel_changes_tab_with_comments.json';
+ const inlineChangesTabJsonFixture = 'merge_request_diffs/inline_changes_tab_with_comments.json';
+ const parallelChangesTabJsonFixture = 'merge_request_diffs/parallel_changes_tab_with_comments.json';
preloadFixtures(
'merge_requests/merge_request_with_task_list.html.raw',
'merge_requests/diff_comment.html.raw',
@@ -52,14 +52,10 @@ import 'vendor/jquery.scrollTo';
loadFixtures('merge_requests/merge_request_with_task_list.html.raw');
this.subject = this.class.activateTab;
});
- it('shows the first tab when action is show', function () {
+ it('shows the notes tab when action is show', function () {
this.subject('show');
expect($('#notes')).toHaveClass('active');
});
- it('shows the notes tab when action is notes', function () {
- this.subject('notes');
- expect($('#notes')).toHaveClass('active');
- });
it('shows the commits tab when action is commits', function () {
this.subject('commits');
expect($('#commits')).toHaveClass('active');
@@ -161,7 +157,7 @@ import 'vendor/jquery.scrollTo';
setLocation({
pathname: '/foo/bar/merge_requests/1/commits'
});
- expect(this.subject('notes')).toBe('/foo/bar/merge_requests/1');
+ expect(this.subject('show')).toBe('/foo/bar/merge_requests/1');
expect(this.subject('diffs')).toBe('/foo/bar/merge_requests/1/diffs');
});
@@ -170,7 +166,7 @@ import 'vendor/jquery.scrollTo';
pathname: '/foo/bar/merge_requests/1/diffs'
});
- expect(this.subject('notes')).toBe('/foo/bar/merge_requests/1');
+ expect(this.subject('show')).toBe('/foo/bar/merge_requests/1');
expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits');
});
@@ -178,7 +174,7 @@ import 'vendor/jquery.scrollTo';
setLocation({
pathname: '/foo/bar/merge_requests/1/diffs.html'
});
- expect(this.subject('notes')).toBe('/foo/bar/merge_requests/1');
+ expect(this.subject('show')).toBe('/foo/bar/merge_requests/1');
expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits');
});
diff --git a/spec/javascripts/monitoring/deployments_spec.js b/spec/javascripts/monitoring/deployments_spec.js
deleted file mode 100644
index 19bc11d0f24..00000000000
--- a/spec/javascripts/monitoring/deployments_spec.js
+++ /dev/null
@@ -1,133 +0,0 @@
-import d3 from 'd3';
-import PrometheusGraph from '~/monitoring/prometheus_graph';
-import Deployments from '~/monitoring/deployments';
-import { prometheusMockData } from './prometheus_mock_data';
-
-describe('Metrics deployments', () => {
- const fixtureName = 'environments/metrics/metrics.html.raw';
- let deployment;
- let prometheusGraph;
-
- const graphElement = () => document.querySelector('.prometheus-graph');
-
- preloadFixtures(fixtureName);
-
- beforeEach((done) => {
- // Setup the view
- loadFixtures(fixtureName);
-
- d3.selectAll('.prometheus-graph')
- .append('g')
- .attr('class', 'graph-container');
-
- prometheusGraph = new PrometheusGraph();
- deployment = new Deployments(1000, 500);
-
- spyOn(prometheusGraph, 'init');
- spyOn($, 'ajax').and.callFake(() => {
- const d = $.Deferred();
- d.resolve({
- deployments: [{
- id: 1,
- created_at: deployment.chartData[10].time,
- sha: 'testing',
- tag: false,
- ref: {
- name: 'testing',
- },
- }, {
- id: 2,
- created_at: deployment.chartData[15].time,
- sha: '',
- tag: true,
- ref: {
- name: 'tag',
- },
- }],
- });
-
- setTimeout(done);
-
- return d.promise();
- });
-
- prometheusGraph.configureGraph();
- prometheusGraph.transformData(prometheusMockData.metrics);
-
- deployment.init(prometheusGraph.graphSpecificProperties.memory_values.data);
- });
-
- it('creates line on graph for deploment', () => {
- expect(
- graphElement().querySelectorAll('.deployment-line').length,
- ).toBe(2);
- });
-
- it('creates hidden deploy boxes', () => {
- expect(
- graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box').length,
- ).toBe(2);
- });
-
- it('hides the info boxes by default', () => {
- expect(
- graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box.hidden').length,
- ).toBe(2);
- });
-
- it('shows sha short code when tag is false', () => {
- expect(
- graphElement().querySelector('.deploy-info-1-cpu_values .js-deploy-info-box').textContent.trim(),
- ).toContain('testin');
- });
-
- it('shows ref name when tag is true', () => {
- expect(
- graphElement().querySelector('.deploy-info-2-cpu_values .js-deploy-info-box').textContent.trim(),
- ).toContain('tag');
- });
-
- it('shows info box when moving mouse over line', () => {
- deployment.mouseOverDeployInfo(deployment.data[0].xPos, 'cpu_values');
-
- expect(
- graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box.hidden').length,
- ).toBe(1);
-
- expect(
- graphElement().querySelector('.deploy-info-1-cpu_values .js-deploy-info-box.hidden'),
- ).toBeNull();
- });
-
- it('hides previously visible info box when moving mouse away', () => {
- deployment.mouseOverDeployInfo(500, 'cpu_values');
-
- expect(
- graphElement().querySelectorAll('.prometheus-graph .js-deploy-info-box.hidden').length,
- ).toBe(2);
-
- expect(
- graphElement().querySelector('.deploy-info-1-cpu_values .js-deploy-info-box.hidden'),
- ).not.toBeNull();
- });
-
- describe('refText', () => {
- it('returns shortened SHA', () => {
- expect(
- Deployments.refText({
- tag: false,
- sha: '123456789',
- }),
- ).toBe('123456');
- });
-
- it('returns tag name', () => {
- expect(
- Deployments.refText({
- tag: true,
- ref: 'v1.0',
- }),
- ).toBe('v1.0');
- });
- });
-});
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
new file mode 100644
index 00000000000..56d938e1fbe
--- /dev/null
+++ b/spec/javascripts/monitoring/mock_data.js
@@ -0,0 +1,4229 @@
+/* eslint-disable quote-props, indent, comma-dangle */
+
+const metricsGroupsAPIResponse = {
+ 'success': true,
+ 'data': [
+ {
+ 'group': 'Kubernetes',
+ 'priority': 1,
+ 'metrics': [
+ {
+ 'title': 'Memory usage',
+ 'weight': 1,
+ 'queries': [
+ {
+ 'query_range': 'avg(container_memory_usage_bytes{%{environment_filter}}) / 2^20',
+ 'y_label': 'Memory',
+ 'unit': 'MiB',
+ 'result': [
+ {
+ 'metric': {},
+ 'values': [
+ [
+ 1495700554.925,
+ '8.0390625'
+ ],
+ [
+ 1495700614.925,
+ '8.0390625'
+ ],
+ [
+ 1495700674.925,
+ '8.0390625'
+ ],
+ [
+ 1495700734.925,
+ '8.0390625'
+ ],
+ [
+ 1495700794.925,
+ '8.0390625'
+ ],
+ [
+ 1495700854.925,
+ '8.0390625'
+ ],
+ [
+ 1495700914.925,
+ '8.0390625'
+ ],
+ [
+ 1495700974.925,
+ '8.0390625'
+ ],
+ [
+ 1495701034.925,
+ '8.0390625'
+ ],
+ [
+ 1495701094.925,
+ '8.0390625'
+ ],
+ [
+ 1495701154.925,
+ '8.0390625'
+ ],
+ [
+ 1495701214.925,
+ '8.0390625'
+ ],
+ [
+ 1495701274.925,
+ '8.0390625'
+ ],
+ [
+ 1495701334.925,
+ '8.0390625'
+ ],
+ [
+ 1495701394.925,
+ '8.0390625'
+ ],
+ [
+ 1495701454.925,
+ '8.0390625'
+ ],
+ [
+ 1495701514.925,
+ '8.0390625'
+ ],
+ [
+ 1495701574.925,
+ '8.0390625'
+ ],
+ [
+ 1495701634.925,
+ '8.0390625'
+ ],
+ [
+ 1495701694.925,
+ '8.0390625'
+ ],
+ [
+ 1495701754.925,
+ '8.0390625'
+ ],
+ [
+ 1495701814.925,
+ '8.0390625'
+ ],
+ [
+ 1495701874.925,
+ '8.0390625'
+ ],
+ [
+ 1495701934.925,
+ '8.0390625'
+ ],
+ [
+ 1495701994.925,
+ '8.0390625'
+ ],
+ [
+ 1495702054.925,
+ '8.0390625'
+ ],
+ [
+ 1495702114.925,
+ '8.0390625'
+ ],
+ [
+ 1495702174.925,
+ '8.0390625'
+ ],
+ [
+ 1495702234.925,
+ '8.0390625'
+ ],
+ [
+ 1495702294.925,
+ '8.0390625'
+ ],
+ [
+ 1495702354.925,
+ '8.0390625'
+ ],
+ [
+ 1495702414.925,
+ '8.0390625'
+ ],
+ [
+ 1495702474.925,
+ '8.0390625'
+ ],
+ [
+ 1495702534.925,
+ '8.0390625'
+ ],
+ [
+ 1495702594.925,
+ '8.0390625'
+ ],
+ [
+ 1495702654.925,
+ '8.0390625'
+ ],
+ [
+ 1495702714.925,
+ '8.0390625'
+ ],
+ [
+ 1495702774.925,
+ '8.0390625'
+ ],
+ [
+ 1495702834.925,
+ '8.0390625'
+ ],
+ [
+ 1495702894.925,
+ '8.0390625'
+ ],
+ [
+ 1495702954.925,
+ '8.0390625'
+ ],
+ [
+ 1495703014.925,
+ '8.0390625'
+ ],
+ [
+ 1495703074.925,
+ '8.0390625'
+ ],
+ [
+ 1495703134.925,
+ '8.0390625'
+ ],
+ [
+ 1495703194.925,
+ '8.0390625'
+ ],
+ [
+ 1495703254.925,
+ '8.03515625'
+ ],
+ [
+ 1495703314.925,
+ '8.03515625'
+ ],
+ [
+ 1495703374.925,
+ '8.03515625'
+ ],
+ [
+ 1495703434.925,
+ '8.03515625'
+ ],
+ [
+ 1495703494.925,
+ '8.03515625'
+ ],
+ [
+ 1495703554.925,
+ '8.03515625'
+ ],
+ [
+ 1495703614.925,
+ '8.03515625'
+ ],
+ [
+ 1495703674.925,
+ '8.03515625'
+ ],
+ [
+ 1495703734.925,
+ '8.03515625'
+ ],
+ [
+ 1495703794.925,
+ '8.03515625'
+ ],
+ [
+ 1495703854.925,
+ '8.03515625'
+ ],
+ [
+ 1495703914.925,
+ '8.03515625'
+ ],
+ [
+ 1495703974.925,
+ '8.03515625'
+ ],
+ [
+ 1495704034.925,
+ '8.03515625'
+ ],
+ [
+ 1495704094.925,
+ '8.03515625'
+ ],
+ [
+ 1495704154.925,
+ '8.03515625'
+ ],
+ [
+ 1495704214.925,
+ '7.9296875'
+ ],
+ [
+ 1495704274.925,
+ '7.9296875'
+ ],
+ [
+ 1495704334.925,
+ '7.9296875'
+ ],
+ [
+ 1495704394.925,
+ '7.9296875'
+ ],
+ [
+ 1495704454.925,
+ '7.9296875'
+ ],
+ [
+ 1495704514.925,
+ '7.9296875'
+ ],
+ [
+ 1495704574.925,
+ '7.9296875'
+ ],
+ [
+ 1495704634.925,
+ '7.9296875'
+ ],
+ [
+ 1495704694.925,
+ '7.9296875'
+ ],
+ [
+ 1495704754.925,
+ '7.9296875'
+ ],
+ [
+ 1495704814.925,
+ '7.9296875'
+ ],
+ [
+ 1495704874.925,
+ '7.9296875'
+ ],
+ [
+ 1495704934.925,
+ '7.9296875'
+ ],
+ [
+ 1495704994.925,
+ '7.9296875'
+ ],
+ [
+ 1495705054.925,
+ '7.9296875'
+ ],
+ [
+ 1495705114.925,
+ '7.9296875'
+ ],
+ [
+ 1495705174.925,
+ '7.9296875'
+ ],
+ [
+ 1495705234.925,
+ '7.9296875'
+ ],
+ [
+ 1495705294.925,
+ '7.9296875'
+ ],
+ [
+ 1495705354.925,
+ '7.9296875'
+ ],
+ [
+ 1495705414.925,
+ '7.9296875'
+ ],
+ [
+ 1495705474.925,
+ '7.9296875'
+ ],
+ [
+ 1495705534.925,
+ '7.9296875'
+ ],
+ [
+ 1495705594.925,
+ '7.9296875'
+ ],
+ [
+ 1495705654.925,
+ '7.9296875'
+ ],
+ [
+ 1495705714.925,
+ '7.9296875'
+ ],
+ [
+ 1495705774.925,
+ '7.9296875'
+ ],
+ [
+ 1495705834.925,
+ '7.9296875'
+ ],
+ [
+ 1495705894.925,
+ '7.9296875'
+ ],
+ [
+ 1495705954.925,
+ '7.9296875'
+ ],
+ [
+ 1495706014.925,
+ '7.9296875'
+ ],
+ [
+ 1495706074.925,
+ '7.9296875'
+ ],
+ [
+ 1495706134.925,
+ '7.9296875'
+ ],
+ [
+ 1495706194.925,
+ '7.9296875'
+ ],
+ [
+ 1495706254.925,
+ '7.9296875'
+ ],
+ [
+ 1495706314.925,
+ '7.9296875'
+ ],
+ [
+ 1495706374.925,
+ '7.9296875'
+ ],
+ [
+ 1495706434.925,
+ '7.9296875'
+ ],
+ [
+ 1495706494.925,
+ '7.9296875'
+ ],
+ [
+ 1495706554.925,
+ '7.9296875'
+ ],
+ [
+ 1495706614.925,
+ '7.9296875'
+ ],
+ [
+ 1495706674.925,
+ '7.9296875'
+ ],
+ [
+ 1495706734.925,
+ '7.9296875'
+ ],
+ [
+ 1495706794.925,
+ '7.9296875'
+ ],
+ [
+ 1495706854.925,
+ '7.9296875'
+ ],
+ [
+ 1495706914.925,
+ '7.9296875'
+ ],
+ [
+ 1495706974.925,
+ '7.9296875'
+ ],
+ [
+ 1495707034.925,
+ '7.9296875'
+ ],
+ [
+ 1495707094.925,
+ '7.9296875'
+ ],
+ [
+ 1495707154.925,
+ '7.9296875'
+ ],
+ [
+ 1495707214.925,
+ '7.9296875'
+ ],
+ [
+ 1495707274.925,
+ '7.9296875'
+ ],
+ [
+ 1495707334.925,
+ '7.9296875'
+ ],
+ [
+ 1495707394.925,
+ '7.9296875'
+ ],
+ [
+ 1495707454.925,
+ '7.9296875'
+ ],
+ [
+ 1495707514.925,
+ '7.9296875'
+ ],
+ [
+ 1495707574.925,
+ '7.9296875'
+ ],
+ [
+ 1495707634.925,
+ '7.9296875'
+ ],
+ [
+ 1495707694.925,
+ '7.9296875'
+ ],
+ [
+ 1495707754.925,
+ '7.9296875'
+ ],
+ [
+ 1495707814.925,
+ '7.9296875'
+ ],
+ [
+ 1495707874.925,
+ '7.9296875'
+ ],
+ [
+ 1495707934.925,
+ '7.9296875'
+ ],
+ [
+ 1495707994.925,
+ '7.9296875'
+ ],
+ [
+ 1495708054.925,
+ '7.9296875'
+ ],
+ [
+ 1495708114.925,
+ '7.9296875'
+ ],
+ [
+ 1495708174.925,
+ '7.9296875'
+ ],
+ [
+ 1495708234.925,
+ '7.9296875'
+ ],
+ [
+ 1495708294.925,
+ '7.9296875'
+ ],
+ [
+ 1495708354.925,
+ '7.9296875'
+ ],
+ [
+ 1495708414.925,
+ '7.9296875'
+ ],
+ [
+ 1495708474.925,
+ '7.9296875'
+ ],
+ [
+ 1495708534.925,
+ '7.9296875'
+ ],
+ [
+ 1495708594.925,
+ '7.9296875'
+ ],
+ [
+ 1495708654.925,
+ '7.9296875'
+ ],
+ [
+ 1495708714.925,
+ '7.9296875'
+ ],
+ [
+ 1495708774.925,
+ '7.9296875'
+ ],
+ [
+ 1495708834.925,
+ '7.9296875'
+ ],
+ [
+ 1495708894.925,
+ '7.9296875'
+ ],
+ [
+ 1495708954.925,
+ '7.8984375'
+ ],
+ [
+ 1495709014.925,
+ '7.8984375'
+ ],
+ [
+ 1495709074.925,
+ '7.8984375'
+ ],
+ [
+ 1495709134.925,
+ '7.8984375'
+ ],
+ [
+ 1495709194.925,
+ '7.8984375'
+ ],
+ [
+ 1495709254.925,
+ '7.89453125'
+ ],
+ [
+ 1495709314.925,
+ '7.89453125'
+ ],
+ [
+ 1495709374.925,
+ '7.89453125'
+ ],
+ [
+ 1495709434.925,
+ '7.89453125'
+ ],
+ [
+ 1495709494.925,
+ '7.89453125'
+ ],
+ [
+ 1495709554.925,
+ '7.89453125'
+ ],
+ [
+ 1495709614.925,
+ '7.89453125'
+ ],
+ [
+ 1495709674.925,
+ '7.89453125'
+ ],
+ [
+ 1495709734.925,
+ '7.89453125'
+ ],
+ [
+ 1495709794.925,
+ '7.89453125'
+ ],
+ [
+ 1495709854.925,
+ '7.89453125'
+ ],
+ [
+ 1495709914.925,
+ '7.89453125'
+ ],
+ [
+ 1495709974.925,
+ '7.89453125'
+ ],
+ [
+ 1495710034.925,
+ '7.89453125'
+ ],
+ [
+ 1495710094.925,
+ '7.89453125'
+ ],
+ [
+ 1495710154.925,
+ '7.89453125'
+ ],
+ [
+ 1495710214.925,
+ '7.89453125'
+ ],
+ [
+ 1495710274.925,
+ '7.89453125'
+ ],
+ [
+ 1495710334.925,
+ '7.89453125'
+ ],
+ [
+ 1495710394.925,
+ '7.89453125'
+ ],
+ [
+ 1495710454.925,
+ '7.89453125'
+ ],
+ [
+ 1495710514.925,
+ '7.89453125'
+ ],
+ [
+ 1495710574.925,
+ '7.89453125'
+ ],
+ [
+ 1495710634.925,
+ '7.89453125'
+ ],
+ [
+ 1495710694.925,
+ '7.89453125'
+ ],
+ [
+ 1495710754.925,
+ '7.89453125'
+ ],
+ [
+ 1495710814.925,
+ '7.89453125'
+ ],
+ [
+ 1495710874.925,
+ '7.89453125'
+ ],
+ [
+ 1495710934.925,
+ '7.89453125'
+ ],
+ [
+ 1495710994.925,
+ '7.89453125'
+ ],
+ [
+ 1495711054.925,
+ '7.89453125'
+ ],
+ [
+ 1495711114.925,
+ '7.89453125'
+ ],
+ [
+ 1495711174.925,
+ '7.8515625'
+ ],
+ [
+ 1495711234.925,
+ '7.8515625'
+ ],
+ [
+ 1495711294.925,
+ '7.8515625'
+ ],
+ [
+ 1495711354.925,
+ '7.8515625'
+ ],
+ [
+ 1495711414.925,
+ '7.8515625'
+ ],
+ [
+ 1495711474.925,
+ '7.8515625'
+ ],
+ [
+ 1495711534.925,
+ '7.8515625'
+ ],
+ [
+ 1495711594.925,
+ '7.8515625'
+ ],
+ [
+ 1495711654.925,
+ '7.8515625'
+ ],
+ [
+ 1495711714.925,
+ '7.8515625'
+ ],
+ [
+ 1495711774.925,
+ '7.8515625'
+ ],
+ [
+ 1495711834.925,
+ '7.8515625'
+ ],
+ [
+ 1495711894.925,
+ '7.8515625'
+ ],
+ [
+ 1495711954.925,
+ '7.8515625'
+ ],
+ [
+ 1495712014.925,
+ '7.8515625'
+ ],
+ [
+ 1495712074.925,
+ '7.8515625'
+ ],
+ [
+ 1495712134.925,
+ '7.8515625'
+ ],
+ [
+ 1495712194.925,
+ '7.8515625'
+ ],
+ [
+ 1495712254.925,
+ '7.8515625'
+ ],
+ [
+ 1495712314.925,
+ '7.8515625'
+ ],
+ [
+ 1495712374.925,
+ '7.8515625'
+ ],
+ [
+ 1495712434.925,
+ '7.83203125'
+ ],
+ [
+ 1495712494.925,
+ '7.83203125'
+ ],
+ [
+ 1495712554.925,
+ '7.83203125'
+ ],
+ [
+ 1495712614.925,
+ '7.83203125'
+ ],
+ [
+ 1495712674.925,
+ '7.83203125'
+ ],
+ [
+ 1495712734.925,
+ '7.83203125'
+ ],
+ [
+ 1495712794.925,
+ '7.83203125'
+ ],
+ [
+ 1495712854.925,
+ '7.83203125'
+ ],
+ [
+ 1495712914.925,
+ '7.83203125'
+ ],
+ [
+ 1495712974.925,
+ '7.83203125'
+ ],
+ [
+ 1495713034.925,
+ '7.83203125'
+ ],
+ [
+ 1495713094.925,
+ '7.83203125'
+ ],
+ [
+ 1495713154.925,
+ '7.83203125'
+ ],
+ [
+ 1495713214.925,
+ '7.83203125'
+ ],
+ [
+ 1495713274.925,
+ '7.83203125'
+ ],
+ [
+ 1495713334.925,
+ '7.83203125'
+ ],
+ [
+ 1495713394.925,
+ '7.8125'
+ ],
+ [
+ 1495713454.925,
+ '7.8125'
+ ],
+ [
+ 1495713514.925,
+ '7.8125'
+ ],
+ [
+ 1495713574.925,
+ '7.8125'
+ ],
+ [
+ 1495713634.925,
+ '7.8125'
+ ],
+ [
+ 1495713694.925,
+ '7.8125'
+ ],
+ [
+ 1495713754.925,
+ '7.8125'
+ ],
+ [
+ 1495713814.925,
+ '7.8125'
+ ],
+ [
+ 1495713874.925,
+ '7.8125'
+ ],
+ [
+ 1495713934.925,
+ '7.8125'
+ ],
+ [
+ 1495713994.925,
+ '7.8125'
+ ],
+ [
+ 1495714054.925,
+ '7.8125'
+ ],
+ [
+ 1495714114.925,
+ '7.8125'
+ ],
+ [
+ 1495714174.925,
+ '7.8125'
+ ],
+ [
+ 1495714234.925,
+ '7.8125'
+ ],
+ [
+ 1495714294.925,
+ '7.8125'
+ ],
+ [
+ 1495714354.925,
+ '7.80859375'
+ ],
+ [
+ 1495714414.925,
+ '7.80859375'
+ ],
+ [
+ 1495714474.925,
+ '7.80859375'
+ ],
+ [
+ 1495714534.925,
+ '7.80859375'
+ ],
+ [
+ 1495714594.925,
+ '7.80859375'
+ ],
+ [
+ 1495714654.925,
+ '7.80859375'
+ ],
+ [
+ 1495714714.925,
+ '7.80859375'
+ ],
+ [
+ 1495714774.925,
+ '7.80859375'
+ ],
+ [
+ 1495714834.925,
+ '7.80859375'
+ ],
+ [
+ 1495714894.925,
+ '7.80859375'
+ ],
+ [
+ 1495714954.925,
+ '7.80859375'
+ ],
+ [
+ 1495715014.925,
+ '7.80859375'
+ ],
+ [
+ 1495715074.925,
+ '7.80859375'
+ ],
+ [
+ 1495715134.925,
+ '7.80859375'
+ ],
+ [
+ 1495715194.925,
+ '7.80859375'
+ ],
+ [
+ 1495715254.925,
+ '7.80859375'
+ ],
+ [
+ 1495715314.925,
+ '7.80859375'
+ ],
+ [
+ 1495715374.925,
+ '7.80859375'
+ ],
+ [
+ 1495715434.925,
+ '7.80859375'
+ ],
+ [
+ 1495715494.925,
+ '7.80859375'
+ ],
+ [
+ 1495715554.925,
+ '7.80859375'
+ ],
+ [
+ 1495715614.925,
+ '7.80859375'
+ ],
+ [
+ 1495715674.925,
+ '7.80859375'
+ ],
+ [
+ 1495715734.925,
+ '7.80859375'
+ ],
+ [
+ 1495715794.925,
+ '7.80859375'
+ ],
+ [
+ 1495715854.925,
+ '7.80859375'
+ ],
+ [
+ 1495715914.925,
+ '7.80078125'
+ ],
+ [
+ 1495715974.925,
+ '7.80078125'
+ ],
+ [
+ 1495716034.925,
+ '7.80078125'
+ ],
+ [
+ 1495716094.925,
+ '7.80078125'
+ ],
+ [
+ 1495716154.925,
+ '7.80078125'
+ ],
+ [
+ 1495716214.925,
+ '7.796875'
+ ],
+ [
+ 1495716274.925,
+ '7.796875'
+ ],
+ [
+ 1495716334.925,
+ '7.796875'
+ ],
+ [
+ 1495716394.925,
+ '7.796875'
+ ],
+ [
+ 1495716454.925,
+ '7.796875'
+ ],
+ [
+ 1495716514.925,
+ '7.796875'
+ ],
+ [
+ 1495716574.925,
+ '7.796875'
+ ],
+ [
+ 1495716634.925,
+ '7.796875'
+ ],
+ [
+ 1495716694.925,
+ '7.796875'
+ ],
+ [
+ 1495716754.925,
+ '7.796875'
+ ],
+ [
+ 1495716814.925,
+ '7.796875'
+ ],
+ [
+ 1495716874.925,
+ '7.79296875'
+ ],
+ [
+ 1495716934.925,
+ '7.79296875'
+ ],
+ [
+ 1495716994.925,
+ '7.79296875'
+ ],
+ [
+ 1495717054.925,
+ '7.79296875'
+ ],
+ [
+ 1495717114.925,
+ '7.79296875'
+ ],
+ [
+ 1495717174.925,
+ '7.7890625'
+ ],
+ [
+ 1495717234.925,
+ '7.7890625'
+ ],
+ [
+ 1495717294.925,
+ '7.7890625'
+ ],
+ [
+ 1495717354.925,
+ '7.7890625'
+ ],
+ [
+ 1495717414.925,
+ '7.7890625'
+ ],
+ [
+ 1495717474.925,
+ '7.7890625'
+ ],
+ [
+ 1495717534.925,
+ '7.7890625'
+ ],
+ [
+ 1495717594.925,
+ '7.7890625'
+ ],
+ [
+ 1495717654.925,
+ '7.7890625'
+ ],
+ [
+ 1495717714.925,
+ '7.7890625'
+ ],
+ [
+ 1495717774.925,
+ '7.7890625'
+ ],
+ [
+ 1495717834.925,
+ '7.77734375'
+ ],
+ [
+ 1495717894.925,
+ '7.77734375'
+ ],
+ [
+ 1495717954.925,
+ '7.77734375'
+ ],
+ [
+ 1495718014.925,
+ '7.77734375'
+ ],
+ [
+ 1495718074.925,
+ '7.77734375'
+ ],
+ [
+ 1495718134.925,
+ '7.7421875'
+ ],
+ [
+ 1495718194.925,
+ '7.7421875'
+ ],
+ [
+ 1495718254.925,
+ '7.7421875'
+ ],
+ [
+ 1495718314.925,
+ '7.7421875'
+ ]
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ {
+ 'title': 'CPU usage',
+ 'weight': 1,
+ 'queries': [
+ {
+ 'query_range': 'avg(rate(container_cpu_usage_seconds_total{%{environment_filter}}[2m])) * 100',
+ 'result': [
+ {
+ 'metric': {},
+ 'values': [
+ [
+ 1495700554.925,
+ '0.0010794445585559514'
+ ],
+ [
+ 1495700614.925,
+ '0.003927214935433527'
+ ],
+ [
+ 1495700674.925,
+ '0.0053045219047619975'
+ ],
+ [
+ 1495700734.925,
+ '0.0048892095238097155'
+ ],
+ [
+ 1495700794.925,
+ '0.005827140952381137'
+ ],
+ [
+ 1495700854.925,
+ '0.00569846906219937'
+ ],
+ [
+ 1495700914.925,
+ '0.004972616802849382'
+ ],
+ [
+ 1495700974.925,
+ '0.005117509523809902'
+ ],
+ [
+ 1495701034.925,
+ '0.00512389061919564'
+ ],
+ [
+ 1495701094.925,
+ '0.005199100501890691'
+ ],
+ [
+ 1495701154.925,
+ '0.005415746394885837'
+ ],
+ [
+ 1495701214.925,
+ '0.005607682788146286'
+ ],
+ [
+ 1495701274.925,
+ '0.005641300000000118'
+ ],
+ [
+ 1495701334.925,
+ '0.0071166279368766495'
+ ],
+ [
+ 1495701394.925,
+ '0.0063242138095234044'
+ ],
+ [
+ 1495701454.925,
+ '0.005793314698235304'
+ ],
+ [
+ 1495701514.925,
+ '0.00703934942237556'
+ ],
+ [
+ 1495701574.925,
+ '0.006357007076123191'
+ ],
+ [
+ 1495701634.925,
+ '0.003753167300126738'
+ ],
+ [
+ 1495701694.925,
+ '0.005018469678430698'
+ ],
+ [
+ 1495701754.925,
+ '0.0045217153371887'
+ ],
+ [
+ 1495701814.925,
+ '0.006140104285714119'
+ ],
+ [
+ 1495701874.925,
+ '0.004818684285714102'
+ ],
+ [
+ 1495701934.925,
+ '0.005079509718955242'
+ ],
+ [
+ 1495701994.925,
+ '0.005059981142498263'
+ ],
+ [
+ 1495702054.925,
+ '0.005269098389538773'
+ ],
+ [
+ 1495702114.925,
+ '0.005269954285714175'
+ ],
+ [
+ 1495702174.925,
+ '0.014199241435795856'
+ ],
+ [
+ 1495702234.925,
+ '0.01511936843111017'
+ ],
+ [
+ 1495702294.925,
+ '0.0060933692920682875'
+ ],
+ [
+ 1495702354.925,
+ '0.004945682380952493'
+ ],
+ [
+ 1495702414.925,
+ '0.005641266666666565'
+ ],
+ [
+ 1495702474.925,
+ '0.005223752857142996'
+ ],
+ [
+ 1495702534.925,
+ '0.005743098505699831'
+ ],
+ [
+ 1495702594.925,
+ '0.00538493380952391'
+ ],
+ [
+ 1495702654.925,
+ '0.005507793883751339'
+ ],
+ [
+ 1495702714.925,
+ '0.005666705714285466'
+ ],
+ [
+ 1495702774.925,
+ '0.006231530000000112'
+ ],
+ [
+ 1495702834.925,
+ '0.006570768635394899'
+ ],
+ [
+ 1495702894.925,
+ '0.005551146666666895'
+ ],
+ [
+ 1495702954.925,
+ '0.005602604737098058'
+ ],
+ [
+ 1495703014.925,
+ '0.00613993580402159'
+ ],
+ [
+ 1495703074.925,
+ '0.004770258764368832'
+ ],
+ [
+ 1495703134.925,
+ '0.005512376671364914'
+ ],
+ [
+ 1495703194.925,
+ '0.005254436666666674'
+ ],
+ [
+ 1495703254.925,
+ '0.0050109839141320505'
+ ],
+ [
+ 1495703314.925,
+ '0.0049478019256960016'
+ ],
+ [
+ 1495703374.925,
+ '0.0037666860965123463'
+ ],
+ [
+ 1495703434.925,
+ '0.004813526061656314'
+ ],
+ [
+ 1495703494.925,
+ '0.005047748095238278'
+ ],
+ [
+ 1495703554.925,
+ '0.00386494081008772'
+ ],
+ [
+ 1495703614.925,
+ '0.004304037408111405'
+ ],
+ [
+ 1495703674.925,
+ '0.004999466661587168'
+ ],
+ [
+ 1495703734.925,
+ '0.004689140476190834'
+ ],
+ [
+ 1495703794.925,
+ '0.004746126153582475'
+ ],
+ [
+ 1495703854.925,
+ '0.004482706382572302'
+ ],
+ [
+ 1495703914.925,
+ '0.004032808931864524'
+ ],
+ [
+ 1495703974.925,
+ '0.005728319047618988'
+ ],
+ [
+ 1495704034.925,
+ '0.004436139179627006'
+ ],
+ [
+ 1495704094.925,
+ '0.004553455714285617'
+ ],
+ [
+ 1495704154.925,
+ '0.003455244285714341'
+ ],
+ [
+ 1495704214.925,
+ '0.004742244761904621'
+ ],
+ [
+ 1495704274.925,
+ '0.005366978571428422'
+ ],
+ [
+ 1495704334.925,
+ '0.004257954837665058'
+ ],
+ [
+ 1495704394.925,
+ '0.005431603259831257'
+ ],
+ [
+ 1495704454.925,
+ '0.0052009214498621986'
+ ],
+ [
+ 1495704514.925,
+ '0.004317201904761618'
+ ],
+ [
+ 1495704574.925,
+ '0.004307384285714157'
+ ],
+ [
+ 1495704634.925,
+ '0.004789801146644822'
+ ],
+ [
+ 1495704694.925,
+ '0.0051429795906706485'
+ ],
+ [
+ 1495704754.925,
+ '0.005322495714285479'
+ ],
+ [
+ 1495704814.925,
+ '0.004512809333244233'
+ ],
+ [
+ 1495704874.925,
+ '0.004953843582568726'
+ ],
+ [
+ 1495704934.925,
+ '0.005812690120858119'
+ ],
+ [
+ 1495704994.925,
+ '0.004997024285714838'
+ ],
+ [
+ 1495705054.925,
+ '0.005246216154439592'
+ ],
+ [
+ 1495705114.925,
+ '0.0063494966618726795'
+ ],
+ [
+ 1495705174.925,
+ '0.005306004342898225'
+ ],
+ [
+ 1495705234.925,
+ '0.005081412857142978'
+ ],
+ [
+ 1495705294.925,
+ '0.00511409523809522'
+ ],
+ [
+ 1495705354.925,
+ '0.0047861001481192'
+ ],
+ [
+ 1495705414.925,
+ '0.005107688228042962'
+ ],
+ [
+ 1495705474.925,
+ '0.005271929582294012'
+ ],
+ [
+ 1495705534.925,
+ '0.004453254502681249'
+ ],
+ [
+ 1495705594.925,
+ '0.005799134293959226'
+ ],
+ [
+ 1495705654.925,
+ '0.005340865929502478'
+ ],
+ [
+ 1495705714.925,
+ '0.004911654761904942'
+ ],
+ [
+ 1495705774.925,
+ '0.005888234873953261'
+ ],
+ [
+ 1495705834.925,
+ '0.005565283333332954'
+ ],
+ [
+ 1495705894.925,
+ '0.005522869047618869'
+ ],
+ [
+ 1495705954.925,
+ '0.005177549737621646'
+ ],
+ [
+ 1495706014.925,
+ '0.0053145810232096465'
+ ],
+ [
+ 1495706074.925,
+ '0.004751095238095275'
+ ],
+ [
+ 1495706134.925,
+ '0.006242077142856976'
+ ],
+ [
+ 1495706194.925,
+ '0.00621034406957871'
+ ],
+ [
+ 1495706254.925,
+ '0.006887592738978596'
+ ],
+ [
+ 1495706314.925,
+ '0.006328128779726213'
+ ],
+ [
+ 1495706374.925,
+ '0.007488363809523927'
+ ],
+ [
+ 1495706434.925,
+ '0.006193758571428157'
+ ],
+ [
+ 1495706494.925,
+ '0.0068798371839706935'
+ ],
+ [
+ 1495706554.925,
+ '0.005757034340423128'
+ ],
+ [
+ 1495706614.925,
+ '0.004571388497294698'
+ ],
+ [
+ 1495706674.925,
+ '0.00620283044923395'
+ ],
+ [
+ 1495706734.925,
+ '0.005607562380952455'
+ ],
+ [
+ 1495706794.925,
+ '0.005506969933620308'
+ ],
+ [
+ 1495706854.925,
+ '0.005621118095238131'
+ ],
+ [
+ 1495706914.925,
+ '0.004876606098698849'
+ ],
+ [
+ 1495706974.925,
+ '0.0047871205988517206'
+ ],
+ [
+ 1495707034.925,
+ '0.00526405939458784'
+ ],
+ [
+ 1495707094.925,
+ '0.005716323800605852'
+ ],
+ [
+ 1495707154.925,
+ '0.005301459523809575'
+ ],
+ [
+ 1495707214.925,
+ '0.0051613042857144905'
+ ],
+ [
+ 1495707274.925,
+ '0.005384792857142714'
+ ],
+ [
+ 1495707334.925,
+ '0.005259719047619222'
+ ],
+ [
+ 1495707394.925,
+ '0.00584101142857182'
+ ],
+ [
+ 1495707454.925,
+ '0.0060066121920326326'
+ ],
+ [
+ 1495707514.925,
+ '0.006359978571428453'
+ ],
+ [
+ 1495707574.925,
+ '0.006315876322151109'
+ ],
+ [
+ 1495707634.925,
+ '0.005590012517198831'
+ ],
+ [
+ 1495707694.925,
+ '0.005517419877137072'
+ ],
+ [
+ 1495707754.925,
+ '0.006089813430348506'
+ ],
+ [
+ 1495707814.925,
+ '0.00466754476190479'
+ ],
+ [
+ 1495707874.925,
+ '0.006059954380517721'
+ ],
+ [
+ 1495707934.925,
+ '0.005085657142856972'
+ ],
+ [
+ 1495707994.925,
+ '0.005897665238095296'
+ ],
+ [
+ 1495708054.925,
+ '0.0062282023199555885'
+ ],
+ [
+ 1495708114.925,
+ '0.00526214553236979'
+ ],
+ [
+ 1495708174.925,
+ '0.0044803300000000644'
+ ],
+ [
+ 1495708234.925,
+ '0.005421443333333592'
+ ],
+ [
+ 1495708294.925,
+ '0.005694326244512144'
+ ],
+ [
+ 1495708354.925,
+ '0.005527721904761457'
+ ],
+ [
+ 1495708414.925,
+ '0.005988819523809819'
+ ],
+ [
+ 1495708474.925,
+ '0.005484704285714448'
+ ],
+ [
+ 1495708534.925,
+ '0.005041123649230085'
+ ],
+ [
+ 1495708594.925,
+ '0.005717767639612059'
+ ],
+ [
+ 1495708654.925,
+ '0.005412954417342863'
+ ],
+ [
+ 1495708714.925,
+ '0.005833343333333254'
+ ],
+ [
+ 1495708774.925,
+ '0.005448135238094969'
+ ],
+ [
+ 1495708834.925,
+ '0.005117341428571432'
+ ],
+ [
+ 1495708894.925,
+ '0.005888345825277833'
+ ],
+ [
+ 1495708954.925,
+ '0.005398543809524135'
+ ],
+ [
+ 1495709014.925,
+ '0.005325611428571416'
+ ],
+ [
+ 1495709074.925,
+ '0.005848668571428527'
+ ],
+ [
+ 1495709134.925,
+ '0.005135003105145044'
+ ],
+ [
+ 1495709194.925,
+ '0.0054551400000003'
+ ],
+ [
+ 1495709254.925,
+ '0.005319472937322171'
+ ],
+ [
+ 1495709314.925,
+ '0.00585677857142792'
+ ],
+ [
+ 1495709374.925,
+ '0.0062146261904759215'
+ ],
+ [
+ 1495709434.925,
+ '0.0067105060904182265'
+ ],
+ [
+ 1495709494.925,
+ '0.005829691904762108'
+ ],
+ [
+ 1495709554.925,
+ '0.005719280952381261'
+ ],
+ [
+ 1495709614.925,
+ '0.005682603793416407'
+ ],
+ [
+ 1495709674.925,
+ '0.0055272846277326934'
+ ],
+ [
+ 1495709734.925,
+ '0.0057123680952386735'
+ ],
+ [
+ 1495709794.925,
+ '0.00520597958075818'
+ ],
+ [
+ 1495709854.925,
+ '0.005584358957263837'
+ ],
+ [
+ 1495709914.925,
+ '0.005601104275197466'
+ ],
+ [
+ 1495709974.925,
+ '0.005991657142857066'
+ ],
+ [
+ 1495710034.925,
+ '0.00553722238095218'
+ ],
+ [
+ 1495710094.925,
+ '0.005127883122696293'
+ ],
+ [
+ 1495710154.925,
+ '0.005498111927534584'
+ ],
+ [
+ 1495710214.925,
+ '0.005609934069084202'
+ ],
+ [
+ 1495710274.925,
+ '0.00459206285714307'
+ ],
+ [
+ 1495710334.925,
+ '0.0047910828571428084'
+ ],
+ [
+ 1495710394.925,
+ '0.0056014671288845685'
+ ],
+ [
+ 1495710454.925,
+ '0.005686936791078528'
+ ],
+ [
+ 1495710514.925,
+ '0.00444480476190448'
+ ],
+ [
+ 1495710574.925,
+ '0.005780394696738921'
+ ],
+ [
+ 1495710634.925,
+ '0.0053107227550210365'
+ ],
+ [
+ 1495710694.925,
+ '0.005096031495761817'
+ ],
+ [
+ 1495710754.925,
+ '0.005451377979091524'
+ ],
+ [
+ 1495710814.925,
+ '0.005328136666667083'
+ ],
+ [
+ 1495710874.925,
+ '0.006020612857143043'
+ ],
+ [
+ 1495710934.925,
+ '0.0061063585714285365'
+ ],
+ [
+ 1495710994.925,
+ '0.006018346015752312'
+ ],
+ [
+ 1495711054.925,
+ '0.005069130952381193'
+ ],
+ [
+ 1495711114.925,
+ '0.005458406190476052'
+ ],
+ [
+ 1495711174.925,
+ '0.00577219190476179'
+ ],
+ [
+ 1495711234.925,
+ '0.005760814645658314'
+ ],
+ [
+ 1495711294.925,
+ '0.005371875716579101'
+ ],
+ [
+ 1495711354.925,
+ '0.0064232666666665834'
+ ],
+ [
+ 1495711414.925,
+ '0.009369806836906667'
+ ],
+ [
+ 1495711474.925,
+ '0.008956864761904692'
+ ],
+ [
+ 1495711534.925,
+ '0.005266849368559271'
+ ],
+ [
+ 1495711594.925,
+ '0.005335111364934262'
+ ],
+ [
+ 1495711654.925,
+ '0.006461778319586945'
+ ],
+ [
+ 1495711714.925,
+ '0.004687939890762393'
+ ],
+ [
+ 1495711774.925,
+ '0.004438831245760684'
+ ],
+ [
+ 1495711834.925,
+ '0.005142786666666613'
+ ],
+ [
+ 1495711894.925,
+ '0.007257734212054963'
+ ],
+ [
+ 1495711954.925,
+ '0.005621991904761494'
+ ],
+ [
+ 1495712014.925,
+ '0.007868689999999862'
+ ],
+ [
+ 1495712074.925,
+ '0.00910970215275738'
+ ],
+ [
+ 1495712134.925,
+ '0.006151004285714278'
+ ],
+ [
+ 1495712194.925,
+ '0.005447120924961522'
+ ],
+ [
+ 1495712254.925,
+ '0.005150705153929503'
+ ],
+ [
+ 1495712314.925,
+ '0.006358108714969314'
+ ],
+ [
+ 1495712374.925,
+ '0.0057725354795696475'
+ ],
+ [
+ 1495712434.925,
+ '0.005232139047619015'
+ ],
+ [
+ 1495712494.925,
+ '0.004932809617949037'
+ ],
+ [
+ 1495712554.925,
+ '0.004511607508499662'
+ ],
+ [
+ 1495712614.925,
+ '0.00440487701522666'
+ ],
+ [
+ 1495712674.925,
+ '0.005479113333333174'
+ ],
+ [
+ 1495712734.925,
+ '0.004726317619047547'
+ ],
+ [
+ 1495712794.925,
+ '0.005582041102958029'
+ ],
+ [
+ 1495712854.925,
+ '0.006381481216082099'
+ ],
+ [
+ 1495712914.925,
+ '0.005474260014095208'
+ ],
+ [
+ 1495712974.925,
+ '0.00567597142857188'
+ ],
+ [
+ 1495713034.925,
+ '0.0064741233333332985'
+ ],
+ [
+ 1495713094.925,
+ '0.005467475714285271'
+ ],
+ [
+ 1495713154.925,
+ '0.004868648393824457'
+ ],
+ [
+ 1495713214.925,
+ '0.005254923286444893'
+ ],
+ [
+ 1495713274.925,
+ '0.005599217150312865'
+ ],
+ [
+ 1495713334.925,
+ '0.005105413720618919'
+ ],
+ [
+ 1495713394.925,
+ '0.007246073333333279'
+ ],
+ [
+ 1495713454.925,
+ '0.005990312380952272'
+ ],
+ [
+ 1495713514.925,
+ '0.005594601853351101'
+ ],
+ [
+ 1495713574.925,
+ '0.004739258673727054'
+ ],
+ [
+ 1495713634.925,
+ '0.003932121428571783'
+ ],
+ [
+ 1495713694.925,
+ '0.005018188268459395'
+ ],
+ [
+ 1495713754.925,
+ '0.004538238095237985'
+ ],
+ [
+ 1495713814.925,
+ '0.00561816643265435'
+ ],
+ [
+ 1495713874.925,
+ '0.0063132584495033586'
+ ],
+ [
+ 1495713934.925,
+ '0.00442385238095213'
+ ],
+ [
+ 1495713994.925,
+ '0.004181795887658453'
+ ],
+ [
+ 1495714054.925,
+ '0.004437759047619037'
+ ],
+ [
+ 1495714114.925,
+ '0.006421748157178241'
+ ],
+ [
+ 1495714174.925,
+ '0.006525143809523842'
+ ],
+ [
+ 1495714234.925,
+ '0.004715904935144247'
+ ],
+ [
+ 1495714294.925,
+ '0.005966040152763461'
+ ],
+ [
+ 1495714354.925,
+ '0.005614535466921674'
+ ],
+ [
+ 1495714414.925,
+ '0.004934375119415906'
+ ],
+ [
+ 1495714474.925,
+ '0.0054122933333327385'
+ ],
+ [
+ 1495714534.925,
+ '0.004926540699612279'
+ ],
+ [
+ 1495714594.925,
+ '0.006124649517134237'
+ ],
+ [
+ 1495714654.925,
+ '0.004629427092013995'
+ ],
+ [
+ 1495714714.925,
+ '0.005117951257607005'
+ ],
+ [
+ 1495714774.925,
+ '0.004868774512685422'
+ ],
+ [
+ 1495714834.925,
+ '0.005310093333333399'
+ ],
+ [
+ 1495714894.925,
+ '0.0054907752286127345'
+ ],
+ [
+ 1495714954.925,
+ '0.004597678117351089'
+ ],
+ [
+ 1495715014.925,
+ '0.0059622552380952'
+ ],
+ [
+ 1495715074.925,
+ '0.005352457072655368'
+ ],
+ [
+ 1495715134.925,
+ '0.005491630952381143'
+ ],
+ [
+ 1495715194.925,
+ '0.006391770078379791'
+ ],
+ [
+ 1495715254.925,
+ '0.005933472857142518'
+ ],
+ [
+ 1495715314.925,
+ '0.005301314285714163'
+ ],
+ [
+ 1495715374.925,
+ '0.0058352959724814165'
+ ],
+ [
+ 1495715434.925,
+ '0.006154755147867044'
+ ],
+ [
+ 1495715494.925,
+ '0.009391935637482038'
+ ],
+ [
+ 1495715554.925,
+ '0.007846462857142592'
+ ],
+ [
+ 1495715614.925,
+ '0.00477608215316353'
+ ],
+ [
+ 1495715674.925,
+ '0.006132865238094998'
+ ],
+ [
+ 1495715734.925,
+ '0.006159762457649516'
+ ],
+ [
+ 1495715794.925,
+ '0.005957307073265968'
+ ],
+ [
+ 1495715854.925,
+ '0.006652319091792501'
+ ],
+ [
+ 1495715914.925,
+ '0.005493557402895287'
+ ],
+ [
+ 1495715974.925,
+ '0.0058652434829145166'
+ ],
+ [
+ 1495716034.925,
+ '0.005627400430468021'
+ ],
+ [
+ 1495716094.925,
+ '0.006240656190475609'
+ ],
+ [
+ 1495716154.925,
+ '0.006305997676168624'
+ ],
+ [
+ 1495716214.925,
+ '0.005388057732783248'
+ ],
+ [
+ 1495716274.925,
+ '0.0052814916048421244'
+ ],
+ [
+ 1495716334.925,
+ '0.00699498614272497'
+ ],
+ [
+ 1495716394.925,
+ '0.00627768693035141'
+ ],
+ [
+ 1495716454.925,
+ '0.0042411487048161145'
+ ],
+ [
+ 1495716514.925,
+ '0.005348647473627653'
+ ],
+ [
+ 1495716574.925,
+ '0.0047176657142853975'
+ ],
+ [
+ 1495716634.925,
+ '0.004437898571428686'
+ ],
+ [
+ 1495716694.925,
+ '0.004923527366927261'
+ ],
+ [
+ 1495716754.925,
+ '0.005131935066048421'
+ ],
+ [
+ 1495716814.925,
+ '0.005046949523809611'
+ ],
+ [
+ 1495716874.925,
+ '0.00547184095238092'
+ ],
+ [
+ 1495716934.925,
+ '0.005224140016380444'
+ ],
+ [
+ 1495716994.925,
+ '0.005297991171665292'
+ ],
+ [
+ 1495717054.925,
+ '0.005492965995623498'
+ ],
+ [
+ 1495717114.925,
+ '0.005754660000000403'
+ ],
+ [
+ 1495717174.925,
+ '0.005949557138639285'
+ ],
+ [
+ 1495717234.925,
+ '0.006091816112534666'
+ ],
+ [
+ 1495717294.925,
+ '0.005554210080192063'
+ ],
+ [
+ 1495717354.925,
+ '0.006411504395279871'
+ ],
+ [
+ 1495717414.925,
+ '0.006319643996609606'
+ ],
+ [
+ 1495717474.925,
+ '0.005539174405717675'
+ ],
+ [
+ 1495717534.925,
+ '0.0053157078842772255'
+ ],
+ [
+ 1495717594.925,
+ '0.005247480952381066'
+ ],
+ [
+ 1495717654.925,
+ '0.004820141620396252'
+ ],
+ [
+ 1495717714.925,
+ '0.005906173868322844'
+ ],
+ [
+ 1495717774.925,
+ '0.006173117219570961'
+ ],
+ [
+ 1495717834.925,
+ '0.005963340952380661'
+ ],
+ [
+ 1495717894.925,
+ '0.005698976627681527'
+ ],
+ [
+ 1495717954.925,
+ '0.004751279096346378'
+ ],
+ [
+ 1495718014.925,
+ '0.005733142379359711'
+ ],
+ [
+ 1495718074.925,
+ '0.004831689010348035'
+ ],
+ [
+ 1495718134.925,
+ '0.005188370476191092'
+ ],
+ [
+ 1495718194.925,
+ '0.004793227554547938'
+ ],
+ [
+ 1495718254.925,
+ '0.003997442857142731'
+ ],
+ [
+ 1495718314.925,
+ '0.004386040132951264'
+ ]
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ 'last_update': '2017-05-25T13:18:34.949Z'
+};
+
+export default metricsGroupsAPIResponse;
+
+const responseMockData = {
+ 'GET': {
+ '/root/hello-prometheus/environments/30/additional_metrics.json': metricsGroupsAPIResponse,
+ 'http://test.host/frontend-fixtures/environments-project/environments/1/additional_metrics.json': metricsGroupsAPIResponse, // TODO: MAke sure this works in the monitoring_bundle_spec
+ },
+};
+
+export const deploymentData = [
+ {
+ id: 111,
+ iid: 3,
+ sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ ref: {
+ name: 'master'
+ },
+ created_at: '2017-05-31T21:23:37.881Z',
+ tag: false,
+ 'last?': true
+ },
+ {
+ id: 110,
+ iid: 2,
+ sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ ref: {
+ name: 'master'
+ },
+ created_at: '2017-05-30T20:08:04.629Z',
+ tag: false,
+ 'last?': false
+ },
+ {
+ id: 109,
+ iid: 1,
+ sha: '6511e58faafaa7ad2228990ec57f19d66f7db7c2',
+ ref: {
+ name: 'update2-readme'
+ },
+ created_at: '2017-05-30T17:42:38.409Z',
+ tag: false,
+ 'last?': false
+ }
+];
+
+export const statePaths = {
+ settingsPath: '/root/hello-prometheus/services/prometheus/edit',
+ documentationPath: '/help/administration/monitoring/prometheus/index.md',
+};
+
+export const singleRowMetrics = [
+ {
+ 'title': 'CPU usage',
+ 'weight': 1,
+ 'y_label': 'Memory',
+ 'queries': [
+ {
+ 'query_range': 'avg(rate(container_cpu_usage_seconds_total{%{environment_filter}}[2m])) * 100',
+ 'result': [
+ {
+ 'metric': {
+
+ },
+ 'values': [
+ {
+ 'time': '2017-06-04T21:22:59.508Z',
+ 'value': '0.06335544298150002'
+ },
+ {
+ 'time': '2017-06-04T21:23:59.508Z',
+ 'value': '0.0420347312480917'
+ },
+ {
+ 'time': '2017-06-04T21:24:59.508Z',
+ 'value': '0.0023175131665412706'
+ },
+ {
+ 'time': '2017-06-04T21:25:59.508Z',
+ 'value': '0.002315870476190476'
+ },
+ {
+ 'time': '2017-06-04T21:26:59.508Z',
+ 'value': '0.0025005961904761894'
+ },
+ {
+ 'time': '2017-06-04T21:27:59.508Z',
+ 'value': '0.0024612605834341264'
+ },
+ {
+ 'time': '2017-06-04T21:28:59.508Z',
+ 'value': '0.002313129398767631'
+ },
+ {
+ 'time': '2017-06-04T21:29:59.508Z',
+ 'value': '0.002411067353663882'
+ },
+ {
+ 'time': '2017-06-04T21:30:59.508Z',
+ 'value': '0.002577309263721303'
+ },
+ {
+ 'time': '2017-06-04T21:31:59.508Z',
+ 'value': '0.00242688307730403'
+ },
+ {
+ 'time': '2017-06-04T21:32:59.508Z',
+ 'value': '0.0024168360301330457'
+ },
+ {
+ 'time': '2017-06-04T21:33:59.508Z',
+ 'value': '0.0020449528090743714'
+ },
+ {
+ 'time': '2017-06-04T21:34:59.508Z',
+ 'value': '0.0019149619047619036'
+ },
+ {
+ 'time': '2017-06-04T21:35:59.508Z',
+ 'value': '0.0024491714364625094'
+ },
+ {
+ 'time': '2017-06-04T21:36:59.508Z',
+ 'value': '0.002728773131172677'
+ },
+ {
+ 'time': '2017-06-04T21:37:59.508Z',
+ 'value': '0.0028439119047618997'
+ },
+ {
+ 'time': '2017-06-04T21:38:59.508Z',
+ 'value': '0.0026307480952380917'
+ },
+ {
+ 'time': '2017-06-04T21:39:59.508Z',
+ 'value': '0.0025024842620546446'
+ },
+ {
+ 'time': '2017-06-04T21:40:59.508Z',
+ 'value': '0.002300662387260825'
+ },
+ {
+ 'time': '2017-06-04T21:41:59.508Z',
+ 'value': '0.002052890924848337'
+ },
+ {
+ 'time': '2017-06-04T21:42:59.508Z',
+ 'value': '0.0023711195238095275'
+ },
+ {
+ 'time': '2017-06-04T21:43:59.508Z',
+ 'value': '0.002513477619047618'
+ },
+ {
+ 'time': '2017-06-04T21:44:59.508Z',
+ 'value': '0.0023489776287844897'
+ },
+ {
+ 'time': '2017-06-04T21:45:59.508Z',
+ 'value': '0.002542572310212481'
+ },
+ {
+ 'time': '2017-06-04T21:46:59.508Z',
+ 'value': '0.0024579470671707952'
+ },
+ {
+ 'time': '2017-06-04T21:47:59.508Z',
+ 'value': '0.0028725150236664403'
+ },
+ {
+ 'time': '2017-06-04T21:48:59.508Z',
+ 'value': '0.0024356089105610525'
+ },
+ {
+ 'time': '2017-06-04T21:49:59.508Z',
+ 'value': '0.002544015828269929'
+ },
+ {
+ 'time': '2017-06-04T21:50:59.508Z',
+ 'value': '0.0029595013380824906'
+ },
+ {
+ 'time': '2017-06-04T21:51:59.508Z',
+ 'value': '0.0023084015085858'
+ },
+ {
+ 'time': '2017-06-04T21:52:59.508Z',
+ 'value': '0.0021070500000000083'
+ },
+ {
+ 'time': '2017-06-04T21:53:59.508Z',
+ 'value': '0.0022950066191106617'
+ },
+ {
+ 'time': '2017-06-04T21:54:59.508Z',
+ 'value': '0.002492719454470995'
+ },
+ {
+ 'time': '2017-06-04T21:55:59.508Z',
+ 'value': '0.00244312761904762'
+ },
+ {
+ 'time': '2017-06-04T21:56:59.508Z',
+ 'value': '0.0023495500000000028'
+ },
+ {
+ 'time': '2017-06-04T21:57:59.508Z',
+ 'value': '0.0020597072353070005'
+ },
+ {
+ 'time': '2017-06-04T21:58:59.508Z',
+ 'value': '0.0021482352044800866'
+ },
+ {
+ 'time': '2017-06-04T21:59:59.508Z',
+ 'value': '0.002333490000000004'
+ },
+ {
+ 'time': '2017-06-04T22:00:59.508Z',
+ 'value': '0.0025899442857142815'
+ },
+ {
+ 'time': '2017-06-04T22:01:59.508Z',
+ 'value': '0.002430299999999999'
+ },
+ {
+ 'time': '2017-06-04T22:02:59.508Z',
+ 'value': '0.0023550328092113476'
+ },
+ {
+ 'time': '2017-06-04T22:03:59.508Z',
+ 'value': '0.0026521871636872793'
+ },
+ {
+ 'time': '2017-06-04T22:04:59.508Z',
+ 'value': '0.0023080671428571398'
+ },
+ {
+ 'time': '2017-06-04T22:05:59.508Z',
+ 'value': '0.0024108401032390896'
+ },
+ {
+ 'time': '2017-06-04T22:06:59.508Z',
+ 'value': '0.002433249366678738'
+ },
+ {
+ 'time': '2017-06-04T22:07:59.508Z',
+ 'value': '0.0023242202306688682'
+ },
+ {
+ 'time': '2017-06-04T22:08:59.508Z',
+ 'value': '0.002388222857142859'
+ },
+ {
+ 'time': '2017-06-04T22:09:59.508Z',
+ 'value': '0.002115974914046794'
+ },
+ {
+ 'time': '2017-06-04T22:10:59.508Z',
+ 'value': '0.0025090043331269917'
+ },
+ {
+ 'time': '2017-06-04T22:11:59.508Z',
+ 'value': '0.002445507057277277'
+ },
+ {
+ 'time': '2017-06-04T22:12:59.508Z',
+ 'value': '0.0026348773751130976'
+ },
+ {
+ 'time': '2017-06-04T22:13:59.508Z',
+ 'value': '0.0025616258583088104'
+ },
+ {
+ 'time': '2017-06-04T22:14:59.508Z',
+ 'value': '0.0021544093415751505'
+ },
+ {
+ 'time': '2017-06-04T22:15:59.508Z',
+ 'value': '0.002649394767668881'
+ },
+ {
+ 'time': '2017-06-04T22:16:59.508Z',
+ 'value': '0.0024023332666685705'
+ },
+ {
+ 'time': '2017-06-04T22:17:59.508Z',
+ 'value': '0.0025444105294235306'
+ },
+ {
+ 'time': '2017-06-04T22:18:59.508Z',
+ 'value': '0.0027298872305772806'
+ },
+ {
+ 'time': '2017-06-04T22:19:59.508Z',
+ 'value': '0.0022880104956379287'
+ },
+ {
+ 'time': '2017-06-04T22:20:59.508Z',
+ 'value': '0.002473246666666661'
+ },
+ {
+ 'time': '2017-06-04T22:21:59.508Z',
+ 'value': '0.002259948381935587'
+ },
+ {
+ 'time': '2017-06-04T22:22:59.508Z',
+ 'value': '0.0025778470886268835'
+ },
+ {
+ 'time': '2017-06-04T22:23:59.508Z',
+ 'value': '0.002246127910852894'
+ },
+ {
+ 'time': '2017-06-04T22:24:59.508Z',
+ 'value': '0.0020697466666666758'
+ },
+ {
+ 'time': '2017-06-04T22:25:59.508Z',
+ 'value': '0.00225859722473547'
+ },
+ {
+ 'time': '2017-06-04T22:26:59.508Z',
+ 'value': '0.0026466728254554814'
+ },
+ {
+ 'time': '2017-06-04T22:27:59.508Z',
+ 'value': '0.002151247619047619'
+ },
+ {
+ 'time': '2017-06-04T22:28:59.508Z',
+ 'value': '0.002324161444543914'
+ },
+ {
+ 'time': '2017-06-04T22:29:59.508Z',
+ 'value': '0.002476474313796452'
+ },
+ {
+ 'time': '2017-06-04T22:30:59.508Z',
+ 'value': '0.0023922184232080517'
+ },
+ {
+ 'time': '2017-06-04T22:31:59.508Z',
+ 'value': '0.0025094934237468933'
+ },
+ {
+ 'time': '2017-06-04T22:32:59.508Z',
+ 'value': '0.0025665311098200883'
+ },
+ {
+ 'time': '2017-06-04T22:33:59.508Z',
+ 'value': '0.0024154900681661374'
+ },
+ {
+ 'time': '2017-06-04T22:34:59.508Z',
+ 'value': '0.0023267450166192037'
+ },
+ {
+ 'time': '2017-06-04T22:35:59.508Z',
+ 'value': '0.002156521904761904'
+ },
+ {
+ 'time': '2017-06-04T22:36:59.508Z',
+ 'value': '0.0025474356898637007'
+ },
+ {
+ 'time': '2017-06-04T22:37:59.508Z',
+ 'value': '0.0025989409624670233'
+ },
+ {
+ 'time': '2017-06-04T22:38:59.508Z',
+ 'value': '0.002348336664762987'
+ },
+ {
+ 'time': '2017-06-04T22:39:59.508Z',
+ 'value': '0.002665888246554726'
+ },
+ {
+ 'time': '2017-06-04T22:40:59.508Z',
+ 'value': '0.002652684787474174'
+ },
+ {
+ 'time': '2017-06-04T22:41:59.508Z',
+ 'value': '0.002472620430865355'
+ },
+ {
+ 'time': '2017-06-04T22:42:59.508Z',
+ 'value': '0.0020616469210110247'
+ },
+ {
+ 'time': '2017-06-04T22:43:59.508Z',
+ 'value': '0.0022434546372311934'
+ },
+ {
+ 'time': '2017-06-04T22:44:59.508Z',
+ 'value': '0.0024469386784827982'
+ },
+ {
+ 'time': '2017-06-04T22:45:59.508Z',
+ 'value': '0.0026192823809523787'
+ },
+ {
+ 'time': '2017-06-04T22:46:59.508Z',
+ 'value': '0.003451999542852798'
+ },
+ {
+ 'time': '2017-06-04T22:47:59.508Z',
+ 'value': '0.0031780314285714288'
+ },
+ {
+ 'time': '2017-06-04T22:48:59.508Z',
+ 'value': '0.0024403352380952415'
+ },
+ {
+ 'time': '2017-06-04T22:49:59.508Z',
+ 'value': '0.001998824761904764'
+ },
+ {
+ 'time': '2017-06-04T22:50:59.508Z',
+ 'value': '0.0023792404761904806'
+ },
+ {
+ 'time': '2017-06-04T22:51:59.508Z',
+ 'value': '0.002725906190476185'
+ },
+ {
+ 'time': '2017-06-04T22:52:59.508Z',
+ 'value': '0.0020989528671155624'
+ },
+ {
+ 'time': '2017-06-04T22:53:59.508Z',
+ 'value': '0.00228808226745016'
+ },
+ {
+ 'time': '2017-06-04T22:54:59.508Z',
+ 'value': '0.0019860807413192147'
+ },
+ {
+ 'time': '2017-06-04T22:55:59.508Z',
+ 'value': '0.0022698085714285897'
+ },
+ {
+ 'time': '2017-06-04T22:56:59.508Z',
+ 'value': '0.0022839098467604415'
+ },
+ {
+ 'time': '2017-06-04T22:57:59.508Z',
+ 'value': '0.002531114761904749'
+ },
+ {
+ 'time': '2017-06-04T22:58:59.508Z',
+ 'value': '0.0028941072550999016'
+ },
+ {
+ 'time': '2017-06-04T22:59:59.508Z',
+ 'value': '0.002547169523809506'
+ },
+ {
+ 'time': '2017-06-04T23:00:59.508Z',
+ 'value': '0.0024062999999999958'
+ },
+ {
+ 'time': '2017-06-04T23:01:59.508Z',
+ 'value': '0.0026939518471604386'
+ },
+ {
+ 'time': '2017-06-04T23:02:59.508Z',
+ 'value': '0.002362901428571429'
+ },
+ {
+ 'time': '2017-06-04T23:03:59.508Z',
+ 'value': '0.002663927142857154'
+ },
+ {
+ 'time': '2017-06-04T23:04:59.508Z',
+ 'value': '0.0026173314285714354'
+ },
+ {
+ 'time': '2017-06-04T23:05:59.508Z',
+ 'value': '0.002326527366406044'
+ },
+ {
+ 'time': '2017-06-04T23:06:59.508Z',
+ 'value': '0.002035313809523809'
+ },
+ {
+ 'time': '2017-06-04T23:07:59.508Z',
+ 'value': '0.002421447414786533'
+ },
+ {
+ 'time': '2017-06-04T23:08:59.508Z',
+ 'value': '0.002898313809523804'
+ },
+ {
+ 'time': '2017-06-04T23:09:59.508Z',
+ 'value': '0.002544891856112907'
+ },
+ {
+ 'time': '2017-06-04T23:10:59.508Z',
+ 'value': '0.002290625356938882'
+ },
+ {
+ 'time': '2017-06-04T23:11:59.508Z',
+ 'value': '0.002483028095238096'
+ },
+ {
+ 'time': '2017-06-04T23:12:59.508Z',
+ 'value': '0.0023396832350784237'
+ },
+ {
+ 'time': '2017-06-04T23:13:59.508Z',
+ 'value': '0.002085529248176153'
+ },
+ {
+ 'time': '2017-06-04T23:14:59.508Z',
+ 'value': '0.0022417815068428012'
+ },
+ {
+ 'time': '2017-06-04T23:15:59.508Z',
+ 'value': '0.002660293333333341'
+ },
+ {
+ 'time': '2017-06-04T23:16:59.508Z',
+ 'value': '0.0029845149093818226'
+ },
+ {
+ 'time': '2017-06-04T23:17:59.508Z',
+ 'value': '0.0027716655079475464'
+ },
+ {
+ 'time': '2017-06-04T23:18:59.508Z',
+ 'value': '0.0025217708908741128'
+ },
+ {
+ 'time': '2017-06-04T23:19:59.508Z',
+ 'value': '0.0025811235131094055'
+ },
+ {
+ 'time': '2017-06-04T23:20:59.508Z',
+ 'value': '0.002209904761904762'
+ },
+ {
+ 'time': '2017-06-04T23:21:59.508Z',
+ 'value': '0.0025053322926383344'
+ },
+ {
+ 'time': '2017-06-04T23:22:59.508Z',
+ 'value': '0.002350917636526411'
+ },
+ {
+ 'time': '2017-06-04T23:23:59.508Z',
+ 'value': '0.0018477500000000078'
+ },
+ {
+ 'time': '2017-06-04T23:24:59.508Z',
+ 'value': '0.002427629523809527'
+ },
+ {
+ 'time': '2017-06-04T23:25:59.508Z',
+ 'value': '0.0019305498147601655'
+ },
+ {
+ 'time': '2017-06-04T23:26:59.508Z',
+ 'value': '0.002097250000000006'
+ },
+ {
+ 'time': '2017-06-04T23:27:59.508Z',
+ 'value': '0.002675020952780041'
+ },
+ {
+ 'time': '2017-06-04T23:28:59.508Z',
+ 'value': '0.0023142214285714374'
+ },
+ {
+ 'time': '2017-06-04T23:29:59.508Z',
+ 'value': '0.0023644723809523737'
+ },
+ {
+ 'time': '2017-06-04T23:30:59.508Z',
+ 'value': '0.002108696190476198'
+ },
+ {
+ 'time': '2017-06-04T23:31:59.508Z',
+ 'value': '0.0019918289697997194'
+ },
+ {
+ 'time': '2017-06-04T23:32:59.508Z',
+ 'value': '0.001583584285714283'
+ },
+ {
+ 'time': '2017-06-04T23:33:59.508Z',
+ 'value': '0.002073770226383112'
+ },
+ {
+ 'time': '2017-06-04T23:34:59.508Z',
+ 'value': '0.0025877664234966818'
+ },
+ {
+ 'time': '2017-06-04T23:35:59.508Z',
+ 'value': '0.0021138238095238147'
+ },
+ {
+ 'time': '2017-06-04T23:36:59.508Z',
+ 'value': '0.0022140838095238303'
+ },
+ {
+ 'time': '2017-06-04T23:37:59.508Z',
+ 'value': '0.0018592674425248847'
+ },
+ {
+ 'time': '2017-06-04T23:38:59.508Z',
+ 'value': '0.0020461969533657016'
+ },
+ {
+ 'time': '2017-06-04T23:39:59.508Z',
+ 'value': '0.0021593628571428543'
+ },
+ {
+ 'time': '2017-06-04T23:40:59.508Z',
+ 'value': '0.0024330682564928188'
+ },
+ {
+ 'time': '2017-06-04T23:41:59.508Z',
+ 'value': '0.0021501804779093174'
+ },
+ {
+ 'time': '2017-06-04T23:42:59.508Z',
+ 'value': '0.0025787493928397945'
+ },
+ {
+ 'time': '2017-06-04T23:43:59.508Z',
+ 'value': '0.002593657082448396'
+ },
+ {
+ 'time': '2017-06-04T23:44:59.508Z',
+ 'value': '0.0021316752380952306'
+ },
+ {
+ 'time': '2017-06-04T23:45:59.508Z',
+ 'value': '0.0026972905019952086'
+ },
+ {
+ 'time': '2017-06-04T23:46:59.508Z',
+ 'value': '0.002580250764292983'
+ },
+ {
+ 'time': '2017-06-04T23:47:59.508Z',
+ 'value': '0.00227103000000001'
+ },
+ {
+ 'time': '2017-06-04T23:48:59.508Z',
+ 'value': '0.0023678515647321146'
+ },
+ {
+ 'time': '2017-06-04T23:49:59.508Z',
+ 'value': '0.002371472857142866'
+ },
+ {
+ 'time': '2017-06-04T23:50:59.508Z',
+ 'value': '0.0026181353688500978'
+ },
+ {
+ 'time': '2017-06-04T23:51:59.508Z',
+ 'value': '0.0025609667711121217'
+ },
+ {
+ 'time': '2017-06-04T23:52:59.508Z',
+ 'value': '0.0027145308139922557'
+ },
+ {
+ 'time': '2017-06-04T23:53:59.508Z',
+ 'value': '0.0024249397613310512'
+ },
+ {
+ 'time': '2017-06-04T23:54:59.508Z',
+ 'value': '0.002399907142857147'
+ },
+ {
+ 'time': '2017-06-04T23:55:59.508Z',
+ 'value': '0.0024753357142857195'
+ },
+ {
+ 'time': '2017-06-04T23:56:59.508Z',
+ 'value': '0.0026179149325231575'
+ },
+ {
+ 'time': '2017-06-04T23:57:59.508Z',
+ 'value': '0.0024261340368186956'
+ },
+ {
+ 'time': '2017-06-04T23:58:59.508Z',
+ 'value': '0.0021061071428571517'
+ },
+ {
+ 'time': '2017-06-04T23:59:59.508Z',
+ 'value': '0.0024033971105037015'
+ },
+ {
+ 'time': '2017-06-05T00:00:59.508Z',
+ 'value': '0.0028287676190475956'
+ },
+ {
+ 'time': '2017-06-05T00:01:59.508Z',
+ 'value': '0.002499719050294778'
+ },
+ {
+ 'time': '2017-06-05T00:02:59.508Z',
+ 'value': '0.0026726102153353856'
+ },
+ {
+ 'time': '2017-06-05T00:03:59.508Z',
+ 'value': '0.00262582619047618'
+ },
+ {
+ 'time': '2017-06-05T00:04:59.508Z',
+ 'value': '0.002280473147363316'
+ },
+ {
+ 'time': '2017-06-05T00:05:59.508Z',
+ 'value': '0.002095581470652675'
+ },
+ {
+ 'time': '2017-06-05T00:06:59.508Z',
+ 'value': '0.002270768490828408'
+ },
+ {
+ 'time': '2017-06-05T00:07:59.508Z',
+ 'value': '0.002728577415023017'
+ },
+ {
+ 'time': '2017-06-05T00:08:59.508Z',
+ 'value': '0.002652512857142863'
+ },
+ {
+ 'time': '2017-06-05T00:09:59.508Z',
+ 'value': '0.0022781033924455674'
+ },
+ {
+ 'time': '2017-06-05T00:10:59.508Z',
+ 'value': '0.0025345038095238234'
+ },
+ {
+ 'time': '2017-06-05T00:11:59.508Z',
+ 'value': '0.002376050020000397'
+ },
+ {
+ 'time': '2017-06-05T00:12:59.508Z',
+ 'value': '0.002455068143506122'
+ },
+ {
+ 'time': '2017-06-05T00:13:59.508Z',
+ 'value': '0.002826705714285719'
+ },
+ {
+ 'time': '2017-06-05T00:14:59.508Z',
+ 'value': '0.002343833692070314'
+ },
+ {
+ 'time': '2017-06-05T00:15:59.508Z',
+ 'value': '0.00264853297122164'
+ },
+ {
+ 'time': '2017-06-05T00:16:59.508Z',
+ 'value': '0.0027656335117426257'
+ },
+ {
+ 'time': '2017-06-05T00:17:59.508Z',
+ 'value': '0.0025896543842439564'
+ },
+ {
+ 'time': '2017-06-05T00:18:59.508Z',
+ 'value': '0.002180053237081201'
+ },
+ {
+ 'time': '2017-06-05T00:19:59.508Z',
+ 'value': '0.002475245002333342'
+ },
+ {
+ 'time': '2017-06-05T00:20:59.508Z',
+ 'value': '0.0027559767805101065'
+ },
+ {
+ 'time': '2017-06-05T00:21:59.508Z',
+ 'value': '0.0022294836141296607'
+ },
+ {
+ 'time': '2017-06-05T00:22:59.508Z',
+ 'value': '0.0021383590476190643'
+ },
+ {
+ 'time': '2017-06-05T00:23:59.508Z',
+ 'value': '0.002085417956361494'
+ },
+ {
+ 'time': '2017-06-05T00:24:59.508Z',
+ 'value': '0.0024140319047619013'
+ },
+ {
+ 'time': '2017-06-05T00:25:59.508Z',
+ 'value': '0.0024513114285714304'
+ },
+ {
+ 'time': '2017-06-05T00:26:59.508Z',
+ 'value': '0.0026932152380952446'
+ },
+ {
+ 'time': '2017-06-05T00:27:59.508Z',
+ 'value': '0.0022656844350898517'
+ },
+ {
+ 'time': '2017-06-05T00:28:59.508Z',
+ 'value': '0.0024483785714285704'
+ },
+ {
+ 'time': '2017-06-05T00:29:59.508Z',
+ 'value': '0.002559505804817207'
+ },
+ {
+ 'time': '2017-06-05T00:30:59.508Z',
+ 'value': '0.0019485681088751649'
+ },
+ {
+ 'time': '2017-06-05T00:31:59.508Z',
+ 'value': '0.00228367984456996'
+ },
+ {
+ 'time': '2017-06-05T00:32:59.508Z',
+ 'value': '0.002522149047619049'
+ },
+ {
+ 'time': '2017-06-05T00:33:59.508Z',
+ 'value': '0.0026860117715406737'
+ },
+ {
+ 'time': '2017-06-05T00:34:59.508Z',
+ 'value': '0.002679669523809523'
+ },
+ {
+ 'time': '2017-06-05T00:35:59.508Z',
+ 'value': '0.0022201920970675937'
+ },
+ {
+ 'time': '2017-06-05T00:36:59.508Z',
+ 'value': '0.0022917647619047615'
+ },
+ {
+ 'time': '2017-06-05T00:37:59.508Z',
+ 'value': '0.0021774059294673576'
+ },
+ {
+ 'time': '2017-06-05T00:38:59.508Z',
+ 'value': '0.0024637766666666763'
+ },
+ {
+ 'time': '2017-06-05T00:39:59.508Z',
+ 'value': '0.002470468290174195'
+ },
+ {
+ 'time': '2017-06-05T00:40:59.508Z',
+ 'value': '0.0022188616082057812'
+ },
+ {
+ 'time': '2017-06-05T00:41:59.508Z',
+ 'value': '0.002421840744373875'
+ },
+ {
+ 'time': '2017-06-05T00:42:59.508Z',
+ 'value': '0.0023918266666666547'
+ },
+ {
+ 'time': '2017-06-05T00:43:59.508Z',
+ 'value': '0.002195743809523809'
+ },
+ {
+ 'time': '2017-06-05T00:44:59.508Z',
+ 'value': '0.0025514828571428687'
+ },
+ {
+ 'time': '2017-06-05T00:45:59.508Z',
+ 'value': '0.0027981709349612694'
+ },
+ {
+ 'time': '2017-06-05T00:46:59.508Z',
+ 'value': '0.002557977142857146'
+ },
+ {
+ 'time': '2017-06-05T00:47:59.508Z',
+ 'value': '0.002213244285714286'
+ },
+ {
+ 'time': '2017-06-05T00:48:59.508Z',
+ 'value': '0.0025706738095238046'
+ },
+ {
+ 'time': '2017-06-05T00:49:59.508Z',
+ 'value': '0.002210976666666671'
+ },
+ {
+ 'time': '2017-06-05T00:50:59.508Z',
+ 'value': '0.002055377091646749'
+ },
+ {
+ 'time': '2017-06-05T00:51:59.508Z',
+ 'value': '0.002308368095238119'
+ },
+ {
+ 'time': '2017-06-05T00:52:59.508Z',
+ 'value': '0.0024687939885141615'
+ },
+ {
+ 'time': '2017-06-05T00:53:59.508Z',
+ 'value': '0.002563018571428578'
+ },
+ {
+ 'time': '2017-06-05T00:54:59.508Z',
+ 'value': '0.00240563291078959'
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ {
+ 'title': 'Memory usage',
+ 'weight': 1,
+ 'y_label': 'Values',
+ 'queries': [
+ {
+ 'query_range': 'avg(container_memory_usage_bytes{%{environment_filter}}) / 2^20',
+ 'label': 'Container memory',
+ 'unit': 'MiB',
+ 'result': [
+ {
+ 'metric': {
+
+ },
+ 'values': [
+ {
+ 'time': '2017-06-04T21:22:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:23:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:24:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:25:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:26:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:27:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:28:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:29:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:30:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:31:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:32:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:33:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:34:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:35:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:36:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:37:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:38:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:39:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:40:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:41:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:42:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:43:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:44:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:45:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:46:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:47:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:48:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:49:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:50:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:51:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:52:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:53:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:54:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:55:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:56:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:57:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:58:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T21:59:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:00:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:01:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:02:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:03:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:04:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:05:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:06:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:07:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:08:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:09:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:10:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:11:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:12:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:13:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:14:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:15:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:16:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:17:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:18:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:19:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:20:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:21:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:22:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:23:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:24:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:25:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:26:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:27:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:28:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:29:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:30:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:31:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:32:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:33:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:34:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:35:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:36:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:37:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:38:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:39:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:40:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:41:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:42:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:43:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:44:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:45:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:46:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:47:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:48:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:49:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:50:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:51:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:52:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:53:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:54:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:55:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:56:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:57:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:58:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T22:59:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:00:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:01:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:02:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:03:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:04:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:05:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:06:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:07:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:08:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:09:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:10:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:11:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:12:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:13:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:14:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:15:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:16:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:17:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:18:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:19:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:20:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:21:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:22:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:23:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:24:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:25:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:26:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:27:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:28:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:29:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:30:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:31:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:32:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:33:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:34:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:35:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:36:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:37:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:38:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:39:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:40:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:41:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:42:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:43:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:44:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:45:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:46:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:47:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:48:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:49:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:50:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:51:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:52:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:53:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:54:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:55:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:56:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:57:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:58:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-04T23:59:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:00:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:01:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:02:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:03:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:04:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:05:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:06:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:07:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:08:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:09:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:10:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:11:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:12:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:13:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:14:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:15:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:16:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:17:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:18:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:19:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:20:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:21:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:22:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:23:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:24:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:25:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:26:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:27:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:28:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:29:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:30:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:31:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:32:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:33:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:34:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:35:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:36:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:37:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:38:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:39:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:40:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:41:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:42:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:43:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:44:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:45:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:46:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:47:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:48:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:49:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:50:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:51:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:52:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:53:59.508Z',
+ 'value': '15.0859375'
+ },
+ {
+ 'time': '2017-06-05T00:54:59.508Z',
+ 'value': '15.0859375'
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+];
+
+export function MonitorMockInterceptor(request, next) {
+ const body = responseMockData[request.method.toUpperCase()][request.url];
+
+ next(request.respondWith(JSON.stringify(body), {
+ status: 200,
+ }));
+}
diff --git a/spec/javascripts/monitoring/monitoring_column_spec.js b/spec/javascripts/monitoring/monitoring_column_spec.js
new file mode 100644
index 00000000000..b3bc97adc9f
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_column_spec.js
@@ -0,0 +1,108 @@
+import Vue from 'vue';
+import _ from 'underscore';
+import MonitoringColumn from '~/monitoring/components/monitoring_column.vue';
+import MonitoringMixins from '~/monitoring/mixins/monitoring_mixins';
+import eventHub from '~/monitoring/event_hub';
+import { deploymentData, singleRowMetrics } from './mock_data';
+
+const createComponent = (propsData) => {
+ const Component = Vue.extend(MonitoringColumn);
+
+ return new Component({
+ propsData,
+ }).$mount();
+};
+
+describe('MonitoringColumn', () => {
+ beforeEach(() => {
+ spyOn(MonitoringMixins.methods, 'formatDeployments').and.callFake(function fakeFormat() {
+ return {};
+ });
+ });
+
+ it('has a title', () => {
+ const component = createComponent({
+ columnData: singleRowMetrics[0],
+ classType: 'col-md-6',
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ expect(component.$el.querySelector('.text-center').innerText.trim()).toBe(component.columnData.title);
+ });
+
+ it('creates a path for the line and area of the graph', (done) => {
+ const component = createComponent({
+ columnData: singleRowMetrics[0],
+ classType: 'col-md-6',
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ Vue.nextTick(() => {
+ expect(component.area).toBeDefined();
+ expect(component.line).toBeDefined();
+ expect(typeof component.area).toEqual('string');
+ expect(typeof component.line).toEqual('string');
+ expect(_.isFunction(component.xScale)).toBe(true);
+ expect(_.isFunction(component.yScale)).toBe(true);
+ done();
+ });
+ });
+
+ describe('Computed props', () => {
+ it('axisTransform translates an element Y position depending of its height', () => {
+ const component = createComponent({
+ columnData: singleRowMetrics[0],
+ classType: 'col-md-6',
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ const transformedHeight = `${component.graphHeight - 100}`;
+ expect(component.axisTransform.indexOf(transformedHeight))
+ .not.toEqual(-1);
+ });
+
+ it('outterViewBox gets a width and height property based on the DOM size of the element', () => {
+ const component = createComponent({
+ columnData: singleRowMetrics[0],
+ classType: 'col-md-6',
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ const viewBoxArray = component.outterViewBox.split(' ');
+ expect(typeof component.outterViewBox).toEqual('string');
+ expect(viewBoxArray[2]).toEqual(component.graphWidth.toString());
+ expect(viewBoxArray[3]).toEqual(component.graphHeight.toString());
+ });
+ });
+
+ it('sends an event to the eventhub when it has finished resizing', (done) => {
+ const component = createComponent({
+ columnData: singleRowMetrics[0],
+ classType: 'col-md-6',
+ updateAspectRatio: false,
+ deploymentData,
+ });
+ spyOn(eventHub, '$emit');
+
+ component.updateAspectRatio = true;
+ Vue.nextTick(() => {
+ expect(eventHub.$emit).toHaveBeenCalled();
+ done();
+ });
+ });
+
+ it('has a title for the y-axis that comes from the backend', () => {
+ const component = createComponent({
+ columnData: singleRowMetrics[0],
+ classType: 'col-md-6',
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ expect(component.yAxisLabel).toEqual(component.columnData.y_label);
+ });
+});
diff --git a/spec/javascripts/monitoring/monitoring_deployment_spec.js b/spec/javascripts/monitoring/monitoring_deployment_spec.js
new file mode 100644
index 00000000000..5cc5b514824
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_deployment_spec.js
@@ -0,0 +1,137 @@
+import Vue from 'vue';
+import MonitoringState from '~/monitoring/components/monitoring_deployment.vue';
+import { deploymentData } from './mock_data';
+
+const createComponent = (propsData) => {
+ const Component = Vue.extend(MonitoringState);
+
+ return new Component({
+ propsData,
+ }).$mount();
+};
+
+describe('MonitoringDeployment', () => {
+ const reducedDeploymentData = [deploymentData[0]];
+ reducedDeploymentData[0].ref = reducedDeploymentData[0].ref.name;
+ reducedDeploymentData[0].xPos = 10;
+ reducedDeploymentData[0].time = new Date(reducedDeploymentData[0].created_at);
+ describe('Methods', () => {
+ it('refText shows the ref when a tag is available', () => {
+ reducedDeploymentData[0].tag = '1.0';
+ const component = createComponent({
+ showDeployInfo: false,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(
+ component.refText(reducedDeploymentData[0]),
+ ).toEqual(reducedDeploymentData[0].ref);
+ });
+
+ it('refText shows the sha when no tag is available', () => {
+ reducedDeploymentData[0].tag = null;
+ const component = createComponent({
+ showDeployInfo: false,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(
+ component.refText(reducedDeploymentData[0]),
+ ).toContain('f5bcd1');
+ });
+
+ it('nameDeploymentClass creates a class with the prefix deploy-info-', () => {
+ const component = createComponent({
+ showDeployInfo: false,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(
+ component.nameDeploymentClass(reducedDeploymentData[0]),
+ ).toContain('deploy-info');
+ });
+
+ it('transformDeploymentGroup translates an available deployment', () => {
+ const component = createComponent({
+ showDeployInfo: false,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(
+ component.transformDeploymentGroup(reducedDeploymentData[0]),
+ ).toContain('translate(11, 20)');
+ });
+
+ it('hides the deployment flag', () => {
+ reducedDeploymentData[0].showDeploymentFlag = false;
+ const component = createComponent({
+ showDeployInfo: true,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(component.$el.querySelector('.js-deploy-info-box')).toBeNull();
+ });
+
+ it('shows the deployment flag', () => {
+ reducedDeploymentData[0].showDeploymentFlag = true;
+ const component = createComponent({
+ showDeployInfo: true,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(
+ component.$el.querySelector('.js-deploy-info-box').style.display,
+ ).not.toEqual('display: none;');
+ });
+
+ it('shows the refText inside a text element with the deploy-info-text class', () => {
+ reducedDeploymentData[0].showDeploymentFlag = true;
+ const component = createComponent({
+ showDeployInfo: true,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(
+ component.$el.querySelector('.deploy-info-text').firstChild.nodeValue.trim(),
+ ).toEqual(component.refText(reducedDeploymentData[0]));
+ });
+
+ it('should contain a hidden gradient', () => {
+ const component = createComponent({
+ showDeployInfo: true,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(component.$el.querySelector('#shadow-gradient')).not.toBeNull();
+ });
+
+ describe('Computed props', () => {
+ it('calculatedHeight', () => {
+ const component = createComponent({
+ showDeployInfo: true,
+ deploymentData: reducedDeploymentData,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(component.calculatedHeight).toEqual(180);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/monitoring_flag_spec.js b/spec/javascripts/monitoring/monitoring_flag_spec.js
new file mode 100644
index 00000000000..3861a95ff07
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_flag_spec.js
@@ -0,0 +1,76 @@
+import Vue from 'vue';
+import MonitoringFlag from '~/monitoring/components/monitoring_flag.vue';
+
+const createComponent = (propsData) => {
+ const Component = Vue.extend(MonitoringFlag);
+
+ return new Component({
+ propsData,
+ }).$mount();
+};
+
+function getCoordinate(component, selector, coordinate) {
+ const coordinateVal = component.$el.querySelector(selector).getAttribute(coordinate);
+ return parseInt(coordinateVal, 10);
+}
+
+describe('MonitoringFlag', () => {
+ it('has a line and a circle located at the currentXCoordinate and currentYCoordinate', () => {
+ const component = createComponent({
+ currentXCoordinate: 200,
+ currentYCoordinate: 100,
+ currentFlagPosition: 100,
+ currentData: {
+ time: new Date('2017-06-04T18:17:33.501Z'),
+ value: '1.49609375',
+ },
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(getCoordinate(component, '.selected-metric-line', 'x1'))
+ .toEqual(component.currentXCoordinate);
+ expect(getCoordinate(component, '.selected-metric-line', 'x2'))
+ .toEqual(component.currentXCoordinate);
+ expect(getCoordinate(component, '.circle-metric', 'cx'))
+ .toEqual(component.currentXCoordinate);
+ expect(getCoordinate(component, '.circle-metric', 'cy'))
+ .toEqual(component.currentYCoordinate);
+ });
+
+ it('has a SVG with the class rect-text-metric at the currentFlagPosition', () => {
+ const component = createComponent({
+ currentXCoordinate: 200,
+ currentYCoordinate: 100,
+ currentFlagPosition: 100,
+ currentData: {
+ time: new Date('2017-06-04T18:17:33.501Z'),
+ value: '1.49609375',
+ },
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ const svg = component.$el.querySelector('.rect-text-metric');
+ expect(svg.tagName).toEqual('svg');
+ expect(parseInt(svg.getAttribute('x'), 10)).toEqual(component.currentFlagPosition);
+ });
+
+ describe('Computed props', () => {
+ it('calculatedHeight', () => {
+ const component = createComponent({
+ currentXCoordinate: 200,
+ currentYCoordinate: 100,
+ currentFlagPosition: 100,
+ currentData: {
+ time: new Date('2017-06-04T18:17:33.501Z'),
+ value: '1.49609375',
+ },
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(component.calculatedHeight).toEqual(180);
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/monitoring_legends_spec.js b/spec/javascripts/monitoring/monitoring_legends_spec.js
new file mode 100644
index 00000000000..4c69b81e650
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_legends_spec.js
@@ -0,0 +1,111 @@
+import Vue from 'vue';
+import MonitoringLegends from '~/monitoring/components/monitoring_legends.vue';
+import measurements from '~/monitoring/utils/measurements';
+
+const createComponent = (propsData) => {
+ const Component = Vue.extend(MonitoringLegends);
+
+ return new Component({
+ propsData,
+ }).$mount();
+};
+
+function getTextFromNode(component, selector) {
+ return component.$el.querySelector(selector).firstChild.nodeValue.trim();
+}
+
+describe('MonitoringLegends', () => {
+ describe('Computed props', () => {
+ it('textTransform', () => {
+ const component = createComponent({
+ graphWidth: 500,
+ graphHeight: 300,
+ margin: measurements.large.margin,
+ measurements: measurements.large,
+ areaColorRgb: '#f0f0f0',
+ legendTitle: 'Title',
+ yAxisLabel: 'Values',
+ metricUsage: 'Value',
+ });
+
+ expect(component.textTransform).toContain('translate(15, 120) rotate(-90)');
+ });
+
+ it('xPosition', () => {
+ const component = createComponent({
+ graphWidth: 500,
+ graphHeight: 300,
+ margin: measurements.large.margin,
+ measurements: measurements.large,
+ areaColorRgb: '#f0f0f0',
+ legendTitle: 'Title',
+ yAxisLabel: 'Values',
+ metricUsage: 'Value',
+ });
+
+ expect(component.xPosition).toEqual(180);
+ });
+
+ it('yPosition', () => {
+ const component = createComponent({
+ graphWidth: 500,
+ graphHeight: 300,
+ margin: measurements.large.margin,
+ measurements: measurements.large,
+ areaColorRgb: '#f0f0f0',
+ legendTitle: 'Title',
+ yAxisLabel: 'Values',
+ metricUsage: 'Value',
+ });
+
+ expect(component.yPosition).toEqual(240);
+ });
+
+ it('rectTransform', () => {
+ const component = createComponent({
+ graphWidth: 500,
+ graphHeight: 300,
+ margin: measurements.large.margin,
+ measurements: measurements.large,
+ areaColorRgb: '#f0f0f0',
+ legendTitle: 'Title',
+ yAxisLabel: 'Values',
+ metricUsage: 'Value',
+ });
+
+ expect(component.rectTransform).toContain('translate(0, 120) rotate(-90)');
+ });
+ });
+
+ it('has 2 rect-axis-text rect svg elements', () => {
+ const component = createComponent({
+ graphWidth: 500,
+ graphHeight: 300,
+ margin: measurements.large.margin,
+ measurements: measurements.large,
+ areaColorRgb: '#f0f0f0',
+ legendTitle: 'Title',
+ yAxisLabel: 'Values',
+ metricUsage: 'Value',
+ });
+
+ expect(component.$el.querySelectorAll('.rect-axis-text').length).toEqual(2);
+ });
+
+ it('contains text to signal the usage, title and time', () => {
+ const component = createComponent({
+ graphWidth: 500,
+ graphHeight: 300,
+ margin: measurements.large.margin,
+ measurements: measurements.large,
+ areaColorRgb: '#f0f0f0',
+ legendTitle: 'Title',
+ yAxisLabel: 'Values',
+ metricUsage: 'Value',
+ });
+
+ expect(getTextFromNode(component, '.text-metric-title')).toEqual(component.legendTitle);
+ expect(getTextFromNode(component, '.text-metric-usage')).toEqual(component.metricUsage);
+ expect(getTextFromNode(component, '.label-axis-text')).toEqual(component.yAxisLabel);
+ });
+});
diff --git a/spec/javascripts/monitoring/monitoring_row_spec.js b/spec/javascripts/monitoring/monitoring_row_spec.js
new file mode 100644
index 00000000000..a82480e8342
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_row_spec.js
@@ -0,0 +1,57 @@
+import Vue from 'vue';
+import MonitoringRow from '~/monitoring/components/monitoring_row.vue';
+import { deploymentData, singleRowMetrics } from './mock_data';
+
+const createComponent = (propsData) => {
+ const Component = Vue.extend(MonitoringRow);
+
+ return new Component({
+ propsData,
+ }).$mount();
+};
+
+describe('MonitoringRow', () => {
+ describe('Computed props', () => {
+ it('bootstrapClass is set to col-md-6 when rowData is higher/equal to 2', () => {
+ const component = createComponent({
+ rowData: singleRowMetrics,
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ expect(component.bootstrapClass).toEqual('col-md-6');
+ });
+
+ it('bootstrapClass is set to col-md-12 when rowData is lower than 2', () => {
+ const component = createComponent({
+ rowData: [singleRowMetrics[0]],
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ expect(component.bootstrapClass).toEqual('col-md-12');
+ });
+ });
+
+ it('has one column', () => {
+ const component = createComponent({
+ rowData: singleRowMetrics,
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ expect(component.$el.querySelectorAll('.prometheus-svg-container').length)
+ .toEqual(component.rowData.length);
+ });
+
+ it('has two columns', () => {
+ const component = createComponent({
+ rowData: singleRowMetrics,
+ updateAspectRatio: false,
+ deploymentData,
+ });
+
+ expect(component.$el.querySelectorAll('.col-md-6').length)
+ .toEqual(component.rowData.length);
+ });
+});
diff --git a/spec/javascripts/monitoring/monitoring_spec.js b/spec/javascripts/monitoring/monitoring_spec.js
new file mode 100644
index 00000000000..6c7b691baa4
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_spec.js
@@ -0,0 +1,49 @@
+import Vue from 'vue';
+import Monitoring from '~/monitoring/components/monitoring.vue';
+import { MonitorMockInterceptor } from './mock_data';
+
+describe('Monitoring', () => {
+ const fixtureName = 'environments/metrics/metrics.html.raw';
+ let MonitoringComponent;
+ let component;
+ preloadFixtures(fixtureName);
+
+ beforeEach(() => {
+ loadFixtures(fixtureName);
+ MonitoringComponent = Vue.extend(Monitoring);
+ });
+
+ describe('no metrics are available yet', () => {
+ it('shows a getting started empty state when no metrics are present', () => {
+ component = new MonitoringComponent({
+ el: document.querySelector('#prometheus-graphs'),
+ });
+
+ component.$mount();
+ expect(component.$el.querySelector('#prometheus-graphs')).toBe(null);
+ expect(component.state).toEqual('gettingStarted');
+ });
+ });
+
+ describe('requests information to the server', () => {
+ beforeEach(() => {
+ document.querySelector('#prometheus-graphs').setAttribute('data-has-metrics', 'true');
+ Vue.http.interceptors.push(MonitorMockInterceptor);
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, MonitorMockInterceptor);
+ });
+
+ it('shows up a loading state', (done) => {
+ component = new MonitoringComponent({
+ el: document.querySelector('#prometheus-graphs'),
+ });
+ component.$mount();
+ Vue.nextTick(() => {
+ expect(component.state).toEqual('loading');
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/monitoring_state_spec.js b/spec/javascripts/monitoring/monitoring_state_spec.js
new file mode 100644
index 00000000000..4c0c558502f
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_state_spec.js
@@ -0,0 +1,110 @@
+import Vue from 'vue';
+import MonitoringState from '~/monitoring/components/monitoring_state.vue';
+import { statePaths } from './mock_data';
+
+const createComponent = (propsData) => {
+ const Component = Vue.extend(MonitoringState);
+
+ return new Component({
+ propsData,
+ }).$mount();
+};
+
+function getTextFromNode(component, selector) {
+ return component.$el.querySelector(selector).firstChild.nodeValue.trim();
+}
+
+describe('MonitoringState', () => {
+ describe('Computed props', () => {
+ it('currentState', () => {
+ const component = createComponent({
+ selectedState: 'gettingStarted',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.currentState).toBe(component.states.gettingStarted);
+ });
+
+ it('buttonPath returns settings path for the state "gettingStarted"', () => {
+ const component = createComponent({
+ selectedState: 'gettingStarted',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.buttonPath).toEqual(statePaths.settingsPath);
+ expect(component.buttonPath).not.toEqual(statePaths.documentationPath);
+ });
+
+ it('buttonPath returns documentation path for any of the other states', () => {
+ const component = createComponent({
+ selectedState: 'loading',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.buttonPath).toEqual(statePaths.documentationPath);
+ expect(component.buttonPath).not.toEqual(statePaths.settingsPath);
+ });
+
+ it('showButtonDescription returns a description with a link for the unableToConnect state', () => {
+ const component = createComponent({
+ selectedState: 'unableToConnect',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.showButtonDescription).toEqual(true);
+ });
+
+ it('showButtonDescription returns the description without a link for any other state', () => {
+ const component = createComponent({
+ selectedState: 'loading',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.showButtonDescription).toEqual(false);
+ });
+ });
+
+ it('should show the gettingStarted state', () => {
+ const component = createComponent({
+ selectedState: 'gettingStarted',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.$el.querySelector('svg')).toBeDefined();
+ expect(getTextFromNode(component, '.state-title')).toEqual(component.states.gettingStarted.title);
+ expect(getTextFromNode(component, '.state-description')).toEqual(component.states.gettingStarted.description);
+ expect(getTextFromNode(component, '.btn-success')).toEqual(component.states.gettingStarted.buttonText);
+ });
+
+ it('should show the loading state', () => {
+ const component = createComponent({
+ selectedState: 'loading',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.$el.querySelector('svg')).toBeDefined();
+ expect(getTextFromNode(component, '.state-title')).toEqual(component.states.loading.title);
+ expect(getTextFromNode(component, '.state-description')).toEqual(component.states.loading.description);
+ expect(getTextFromNode(component, '.btn-success')).toEqual(component.states.loading.buttonText);
+ });
+
+ it('should show the unableToConnect state', () => {
+ const component = createComponent({
+ selectedState: 'unableToConnect',
+ settingsPath: statePaths.settingsPath,
+ documentationPath: statePaths.documentationPath,
+ });
+
+ expect(component.$el.querySelector('svg')).toBeDefined();
+ expect(getTextFromNode(component, '.state-title')).toEqual(component.states.unableToConnect.title);
+ expect(component.$el.querySelector('.state-description a')).toBeDefined();
+ expect(getTextFromNode(component, '.btn-success')).toEqual(component.states.unableToConnect.buttonText);
+ });
+});
diff --git a/spec/javascripts/monitoring/monitoring_store_spec.js b/spec/javascripts/monitoring/monitoring_store_spec.js
new file mode 100644
index 00000000000..20c1e6a0005
--- /dev/null
+++ b/spec/javascripts/monitoring/monitoring_store_spec.js
@@ -0,0 +1,24 @@
+import MonitoringStore from '~/monitoring/stores/monitoring_store';
+import MonitoringMock, { deploymentData } from './mock_data';
+
+describe('MonitoringStore', () => {
+ this.store = new MonitoringStore();
+ this.store.storeMetrics(MonitoringMock.data);
+
+ it('contains one group that contains two queries sorted by priority in one row', () => {
+ expect(this.store.groups).toBeDefined();
+ expect(this.store.groups.length).toEqual(1);
+ expect(this.store.groups[0].metrics.length).toEqual(1);
+ });
+
+ it('gets the metrics count for every group', () => {
+ expect(this.store.getMetricsCount()).toEqual(2);
+ });
+
+ it('contains deployment data', () => {
+ this.store.storeDeploymentData(deploymentData);
+ expect(this.store.deploymentData).toBeDefined();
+ expect(this.store.deploymentData.length).toEqual(3);
+ expect(typeof this.store.deploymentData[0]).toEqual('object');
+ });
+});
diff --git a/spec/javascripts/monitoring/prometheus_graph_spec.js b/spec/javascripts/monitoring/prometheus_graph_spec.js
deleted file mode 100644
index 25578bf1c6e..00000000000
--- a/spec/javascripts/monitoring/prometheus_graph_spec.js
+++ /dev/null
@@ -1,98 +0,0 @@
-import 'jquery';
-import PrometheusGraph from '~/monitoring/prometheus_graph';
-import { prometheusMockData } from './prometheus_mock_data';
-
-describe('PrometheusGraph', () => {
- const fixtureName = 'environments/metrics/metrics.html.raw';
- const prometheusGraphContainer = '.prometheus-graph';
- const prometheusGraphContents = `${prometheusGraphContainer}[graph-type=cpu_values]`;
-
- preloadFixtures(fixtureName);
-
- beforeEach(() => {
- loadFixtures(fixtureName);
- $('.prometheus-container').data('has-metrics', 'true');
- this.prometheusGraph = new PrometheusGraph();
- const self = this;
- const fakeInit = (metricsResponse) => {
- self.prometheusGraph.transformData(metricsResponse);
- self.prometheusGraph.createGraph();
- };
- spyOn(this.prometheusGraph, 'init').and.callFake(fakeInit);
- });
-
- it('initializes graph properties', () => {
- // Test for the measurements
- expect(this.prometheusGraph.margin).toBeDefined();
- expect(this.prometheusGraph.marginLabelContainer).toBeDefined();
- expect(this.prometheusGraph.originalWidth).toBeDefined();
- expect(this.prometheusGraph.originalHeight).toBeDefined();
- expect(this.prometheusGraph.height).toBeDefined();
- expect(this.prometheusGraph.width).toBeDefined();
- expect(this.prometheusGraph.backOffRequestCounter).toBeDefined();
- // Test for the graph properties (colors, radius, etc.)
- expect(this.prometheusGraph.graphSpecificProperties).toBeDefined();
- expect(this.prometheusGraph.commonGraphProperties).toBeDefined();
- });
-
- it('transforms the data', () => {
- this.prometheusGraph.init(prometheusMockData.metrics);
- Object.keys(this.prometheusGraph.graphSpecificProperties, (key) => {
- const graphProps = this.prometheusGraph.graphSpecificProperties[key];
- expect(graphProps.data).toBeDefined();
- expect(graphProps.data.length).toBe(121);
- });
- });
-
- it('creates two graphs', () => {
- this.prometheusGraph.init(prometheusMockData.metrics);
- expect($(prometheusGraphContainer).length).toBe(2);
- });
-
- describe('Graph contents', () => {
- beforeEach(() => {
- this.prometheusGraph.init(prometheusMockData.metrics);
- });
-
- it('has axis, an area, a line and a overlay', () => {
- const $graphContainer = $(prometheusGraphContents).find('.x-axis').parent();
- expect($graphContainer.find('.x-axis')).toBeDefined();
- expect($graphContainer.find('.y-axis')).toBeDefined();
- expect($graphContainer.find('.prometheus-graph-overlay')).toBeDefined();
- expect($graphContainer.find('.metric-line')).toBeDefined();
- expect($graphContainer.find('.metric-area')).toBeDefined();
- });
-
- it('has legends, labels and an extra axis that labels the metrics', () => {
- const $prometheusGraphContents = $(prometheusGraphContents);
- const $axisLabelContainer = $(prometheusGraphContents).find('.label-x-axis-line').parent();
- expect($prometheusGraphContents.find('.label-x-axis-line')).toBeDefined();
- expect($prometheusGraphContents.find('.label-y-axis-line')).toBeDefined();
- expect($prometheusGraphContents.find('.label-axis-text')).toBeDefined();
- expect($prometheusGraphContents.find('.rect-axis-text')).toBeDefined();
- expect($axisLabelContainer.find('rect').length).toBe(3);
- expect($axisLabelContainer.find('text').length).toBe(4);
- });
- });
-});
-
-describe('PrometheusGraphs UX states', () => {
- const fixtureName = 'environments/metrics/metrics.html.raw';
- preloadFixtures(fixtureName);
-
- beforeEach(() => {
- loadFixtures(fixtureName);
- this.prometheusGraph = new PrometheusGraph();
- });
-
- it('shows a specified state', () => {
- this.prometheusGraph.state = '.js-getting-started';
- this.prometheusGraph.updateState();
- const $state = $('.js-getting-started');
- expect($state).toBeDefined();
- expect($('.state-title', $state)).toBeDefined();
- expect($('.state-svg', $state)).toBeDefined();
- expect($('.state-description', $state)).toBeDefined();
- expect($('.state-button', $state)).toBeDefined();
- });
-});
diff --git a/spec/javascripts/monitoring/prometheus_mock_data.js b/spec/javascripts/monitoring/prometheus_mock_data.js
deleted file mode 100644
index 1cdc14faaa8..00000000000
--- a/spec/javascripts/monitoring/prometheus_mock_data.js
+++ /dev/null
@@ -1,1014 +0,0 @@
-/* eslint-disable import/prefer-default-export*/
-export const prometheusMockData = {
- status: 200,
- metrics: {
- success: true,
- metrics: {
- memory_values: [
- {
- metric: {
- },
- values: [
- [
- 1488462917.256,
- '10.12890625',
- ],
- [
- 1488462977.256,
- '10.140625',
- ],
- [
- 1488463037.256,
- '10.140625',
- ],
- [
- 1488463097.256,
- '10.14453125',
- ],
- [
- 1488463157.256,
- '10.1484375',
- ],
- [
- 1488463217.256,
- '10.15625',
- ],
- [
- 1488463277.256,
- '10.15625',
- ],
- [
- 1488463337.256,
- '10.15625',
- ],
- [
- 1488463397.256,
- '10.1640625',
- ],
- [
- 1488463457.256,
- '10.171875',
- ],
- [
- 1488463517.256,
- '10.171875',
- ],
- [
- 1488463577.256,
- '10.171875',
- ],
- [
- 1488463637.256,
- '10.18359375',
- ],
- [
- 1488463697.256,
- '10.1953125',
- ],
- [
- 1488463757.256,
- '10.203125',
- ],
- [
- 1488463817.256,
- '10.20703125',
- ],
- [
- 1488463877.256,
- '10.20703125',
- ],
- [
- 1488463937.256,
- '10.20703125',
- ],
- [
- 1488463997.256,
- '10.20703125',
- ],
- [
- 1488464057.256,
- '10.2109375',
- ],
- [
- 1488464117.256,
- '10.2109375',
- ],
- [
- 1488464177.256,
- '10.2109375',
- ],
- [
- 1488464237.256,
- '10.2109375',
- ],
- [
- 1488464297.256,
- '10.21484375',
- ],
- [
- 1488464357.256,
- '10.22265625',
- ],
- [
- 1488464417.256,
- '10.22265625',
- ],
- [
- 1488464477.256,
- '10.2265625',
- ],
- [
- 1488464537.256,
- '10.23046875',
- ],
- [
- 1488464597.256,
- '10.23046875',
- ],
- [
- 1488464657.256,
- '10.234375',
- ],
- [
- 1488464717.256,
- '10.234375',
- ],
- [
- 1488464777.256,
- '10.234375',
- ],
- [
- 1488464837.256,
- '10.234375',
- ],
- [
- 1488464897.256,
- '10.234375',
- ],
- [
- 1488464957.256,
- '10.234375',
- ],
- [
- 1488465017.256,
- '10.23828125',
- ],
- [
- 1488465077.256,
- '10.23828125',
- ],
- [
- 1488465137.256,
- '10.2421875',
- ],
- [
- 1488465197.256,
- '10.2421875',
- ],
- [
- 1488465257.256,
- '10.2421875',
- ],
- [
- 1488465317.256,
- '10.2421875',
- ],
- [
- 1488465377.256,
- '10.2421875',
- ],
- [
- 1488465437.256,
- '10.2421875',
- ],
- [
- 1488465497.256,
- '10.2421875',
- ],
- [
- 1488465557.256,
- '10.2421875',
- ],
- [
- 1488465617.256,
- '10.2421875',
- ],
- [
- 1488465677.256,
- '10.2421875',
- ],
- [
- 1488465737.256,
- '10.2421875',
- ],
- [
- 1488465797.256,
- '10.24609375',
- ],
- [
- 1488465857.256,
- '10.25',
- ],
- [
- 1488465917.256,
- '10.25390625',
- ],
- [
- 1488465977.256,
- '9.98828125',
- ],
- [
- 1488466037.256,
- '9.9921875',
- ],
- [
- 1488466097.256,
- '9.9921875',
- ],
- [
- 1488466157.256,
- '9.99609375',
- ],
- [
- 1488466217.256,
- '10',
- ],
- [
- 1488466277.256,
- '10.00390625',
- ],
- [
- 1488466337.256,
- '10.0078125',
- ],
- [
- 1488466397.256,
- '10.01171875',
- ],
- [
- 1488466457.256,
- '10.0234375',
- ],
- [
- 1488466517.256,
- '10.02734375',
- ],
- [
- 1488466577.256,
- '10.02734375',
- ],
- [
- 1488466637.256,
- '10.03125',
- ],
- [
- 1488466697.256,
- '10.03125',
- ],
- [
- 1488466757.256,
- '10.03125',
- ],
- [
- 1488466817.256,
- '10.03125',
- ],
- [
- 1488466877.256,
- '10.03125',
- ],
- [
- 1488466937.256,
- '10.03125',
- ],
- [
- 1488466997.256,
- '10.03125',
- ],
- [
- 1488467057.256,
- '10.0390625',
- ],
- [
- 1488467117.256,
- '10.0390625',
- ],
- [
- 1488467177.256,
- '10.04296875',
- ],
- [
- 1488467237.256,
- '10.05078125',
- ],
- [
- 1488467297.256,
- '10.05859375',
- ],
- [
- 1488467357.256,
- '10.06640625',
- ],
- [
- 1488467417.256,
- '10.06640625',
- ],
- [
- 1488467477.256,
- '10.0703125',
- ],
- [
- 1488467537.256,
- '10.07421875',
- ],
- [
- 1488467597.256,
- '10.0859375',
- ],
- [
- 1488467657.256,
- '10.0859375',
- ],
- [
- 1488467717.256,
- '10.09765625',
- ],
- [
- 1488467777.256,
- '10.1015625',
- ],
- [
- 1488467837.256,
- '10.10546875',
- ],
- [
- 1488467897.256,
- '10.10546875',
- ],
- [
- 1488467957.256,
- '10.125',
- ],
- [
- 1488468017.256,
- '10.13671875',
- ],
- [
- 1488468077.256,
- '10.1484375',
- ],
- [
- 1488468137.256,
- '10.15625',
- ],
- [
- 1488468197.256,
- '10.16796875',
- ],
- [
- 1488468257.256,
- '10.171875',
- ],
- [
- 1488468317.256,
- '10.171875',
- ],
- [
- 1488468377.256,
- '10.171875',
- ],
- [
- 1488468437.256,
- '10.171875',
- ],
- [
- 1488468497.256,
- '10.171875',
- ],
- [
- 1488468557.256,
- '10.171875',
- ],
- [
- 1488468617.256,
- '10.171875',
- ],
- [
- 1488468677.256,
- '10.17578125',
- ],
- [
- 1488468737.256,
- '10.17578125',
- ],
- [
- 1488468797.256,
- '10.265625',
- ],
- [
- 1488468857.256,
- '10.19921875',
- ],
- [
- 1488468917.256,
- '10.19921875',
- ],
- [
- 1488468977.256,
- '10.19921875',
- ],
- [
- 1488469037.256,
- '10.19921875',
- ],
- [
- 1488469097.256,
- '10.19921875',
- ],
- [
- 1488469157.256,
- '10.203125',
- ],
- [
- 1488469217.256,
- '10.43359375',
- ],
- [
- 1488469277.256,
- '10.20703125',
- ],
- [
- 1488469337.256,
- '10.2109375',
- ],
- [
- 1488469397.256,
- '10.22265625',
- ],
- [
- 1488469457.256,
- '10.21484375',
- ],
- [
- 1488469517.256,
- '10.21484375',
- ],
- [
- 1488469577.256,
- '10.21484375',
- ],
- [
- 1488469637.256,
- '10.22265625',
- ],
- [
- 1488469697.256,
- '10.234375',
- ],
- [
- 1488469757.256,
- '10.234375',
- ],
- [
- 1488469817.256,
- '10.234375',
- ],
- [
- 1488469877.256,
- '10.2421875',
- ],
- [
- 1488469937.256,
- '10.25',
- ],
- [
- 1488469997.256,
- '10.25390625',
- ],
- [
- 1488470057.256,
- '10.26171875',
- ],
- [
- 1488470117.256,
- '10.2734375',
- ],
- ],
- },
- ],
- memory_current: [
- {
- metric: {
- },
- value: [
- 1488470117.737,
- '10.2734375',
- ],
- },
- ],
- cpu_values: [
- {
- metric: {
- },
- values: [
- [
- 1488462918.15,
- '0.0002996458625058103',
- ],
- [
- 1488462978.15,
- '0.0002652382333333314',
- ],
- [
- 1488463038.15,
- '0.0003485461333333421',
- ],
- [
- 1488463098.15,
- '0.0003420421999999886',
- ],
- [
- 1488463158.15,
- '0.00023107150000001297',
- ],
- [
- 1488463218.15,
- '0.00030463981666664826',
- ],
- [
- 1488463278.15,
- '0.0002477177833333677',
- ],
- [
- 1488463338.15,
- '0.00026936656666665115',
- ],
- [
- 1488463398.15,
- '0.000406264750000022',
- ],
- [
- 1488463458.15,
- '0.00029592802026561453',
- ],
- [
- 1488463518.15,
- '0.00023426999683316343',
- ],
- [
- 1488463578.15,
- '0.0003057080666666915',
- ],
- [
- 1488463638.15,
- '0.0003408470500000149',
- ],
- [
- 1488463698.15,
- '0.00025497336666665166',
- ],
- [
- 1488463758.15,
- '0.0003009282833333534',
- ],
- [
- 1488463818.15,
- '0.0003119383499999924',
- ],
- [
- 1488463878.15,
- '0.00028719019999998705',
- ],
- [
- 1488463938.15,
- '0.000327864749999988',
- ],
- [
- 1488463998.15,
- '0.0002514917333333422',
- ],
- [
- 1488464058.15,
- '0.0003614651166666742',
- ],
- [
- 1488464118.15,
- '0.0003221668000000122',
- ],
- [
- 1488464178.15,
- '0.00023323083333330884',
- ],
- [
- 1488464238.15,
- '0.00028531499475009274',
- ],
- [
- 1488464298.15,
- '0.0002627695294921391',
- ],
- [
- 1488464358.15,
- '0.00027145463333333453',
- ],
- [
- 1488464418.15,
- '0.00025669488333335266',
- ],
- [
- 1488464478.15,
- '0.00022307761666665965',
- ],
- [
- 1488464538.15,
- '0.0003307265833333517',
- ],
- [
- 1488464598.15,
- '0.0002817050666666709',
- ],
- [
- 1488464658.15,
- '0.00022357458333332285',
- ],
- [
- 1488464718.15,
- '0.00032648590000000275',
- ],
- [
- 1488464778.15,
- '0.00028410750000000816',
- ],
- [
- 1488464838.15,
- '0.0003038076999999954',
- ],
- [
- 1488464898.15,
- '0.00037568226666667335',
- ],
- [
- 1488464958.15,
- '0.00020160354999999202',
- ],
- [
- 1488465018.15,
- '0.0003229403333333399',
- ],
- [
- 1488465078.15,
- '0.00033516069999999236',
- ],
- [
- 1488465138.15,
- '0.0003365978333333371',
- ],
- [
- 1488465198.15,
- '0.00020262178333331585',
- ],
- [
- 1488465258.15,
- '0.00040567498333331876',
- ],
- [
- 1488465318.15,
- '0.00029114155000001436',
- ],
- [
- 1488465378.15,
- '0.0002498841000000122',
- ],
- [
- 1488465438.15,
- '0.00027296763333331715',
- ],
- [
- 1488465498.15,
- '0.0002958794000000135',
- ],
- [
- 1488465558.15,
- '0.0002922354666666867',
- ],
- [
- 1488465618.15,
- '0.00034186624999999653',
- ],
- [
- 1488465678.15,
- '0.0003397984166666627',
- ],
- [
- 1488465738.15,
- '0.0002658284166666469',
- ],
- [
- 1488465798.15,
- '0.00026221139999999346',
- ],
- [
- 1488465858.15,
- '0.00029467960000001034',
- ],
- [
- 1488465918.15,
- '0.0002634141333333358',
- ],
- [
- 1488465978.15,
- '0.0003202958333333209',
- ],
- [
- 1488466038.15,
- '0.00037890760000000394',
- ],
- [
- 1488466098.15,
- '0.00023453356666666518',
- ],
- [
- 1488466158.15,
- '0.0002866827333333433',
- ],
- [
- 1488466218.15,
- '0.0003335935499999998',
- ],
- [
- 1488466278.15,
- '0.00022787131666666125',
- ],
- [
- 1488466338.15,
- '0.00033821938333333064',
- ],
- [
- 1488466398.15,
- '0.00029233375000001043',
- ],
- [
- 1488466458.15,
- '0.00026562758333333514',
- ],
- [
- 1488466518.15,
- '0.0003142600999999819',
- ],
- [
- 1488466578.15,
- '0.00027392178333333444',
- ],
- [
- 1488466638.15,
- '0.00028178598333334173',
- ],
- [
- 1488466698.15,
- '0.0002463400666666911',
- ],
- [
- 1488466758.15,
- '0.00040234373333332125',
- ],
- [
- 1488466818.15,
- '0.00023677453333332822',
- ],
- [
- 1488466878.15,
- '0.00030852703333333523',
- ],
- [
- 1488466938.15,
- '0.0003582272833333455',
- ],
- [
- 1488466998.15,
- '0.0002176380833332973',
- ],
- [
- 1488467058.15,
- '0.00026180203333335447',
- ],
- [
- 1488467118.15,
- '0.00027862966666667436',
- ],
- [
- 1488467178.15,
- '0.0002769731166666567',
- ],
- [
- 1488467238.15,
- '0.0002832899166666477',
- ],
- [
- 1488467298.15,
- '0.0003446533500000311',
- ],
- [
- 1488467358.15,
- '0.0002691345999999761',
- ],
- [
- 1488467418.15,
- '0.000284919933333357',
- ],
- [
- 1488467478.15,
- '0.0002396026166666528',
- ],
- [
- 1488467538.15,
- '0.00035625295000002075',
- ],
- [
- 1488467598.15,
- '0.00036759816666664946',
- ],
- [
- 1488467658.15,
- '0.00030326608333333855',
- ],
- [
- 1488467718.15,
- '0.00023584972418043393',
- ],
- [
- 1488467778.15,
- '0.00025744508892115107',
- ],
- [
- 1488467838.15,
- '0.00036737541666663395',
- ],
- [
- 1488467898.15,
- '0.00034325741666666094',
- ],
- [
- 1488467958.15,
- '0.00026390046666667407',
- ],
- [
- 1488468018.15,
- '0.0003302534500000102',
- ],
- [
- 1488468078.15,
- '0.00035243794999999527',
- ],
- [
- 1488468138.15,
- '0.00020149738333333407',
- ],
- [
- 1488468198.15,
- '0.0003183469666666679',
- ],
- [
- 1488468258.15,
- '0.0003835329166666845',
- ],
- [
- 1488468318.15,
- '0.0002485075333333124',
- ],
- [
- 1488468378.15,
- '0.0003011457166666768',
- ],
- [
- 1488468438.15,
- '0.00032242785497684965',
- ],
- [
- 1488468498.15,
- '0.0002659713747457531',
- ],
- [
- 1488468558.15,
- '0.0003476860333333202',
- ],
- [
- 1488468618.15,
- '0.00028336403333334794',
- ],
- [
- 1488468678.15,
- '0.00017132354999998728',
- ],
- [
- 1488468738.15,
- '0.0003001915833333276',
- ],
- [
- 1488468798.15,
- '0.0003025715666666725',
- ],
- [
- 1488468858.15,
- '0.0003012370166666815',
- ],
- [
- 1488468918.15,
- '0.00030203619999997025',
- ],
- [
- 1488468978.15,
- '0.0002804355000000314',
- ],
- [
- 1488469038.15,
- '0.00033194884999998564',
- ],
- [
- 1488469098.15,
- '0.00025201496666665455',
- ],
- [
- 1488469158.15,
- '0.0002777531500000189',
- ],
- [
- 1488469218.15,
- '0.0003314885833333392',
- ],
- [
- 1488469278.15,
- '0.0002234891422095589',
- ],
- [
- 1488469338.15,
- '0.000349117355867791',
- ],
- [
- 1488469398.15,
- '0.0004036731333333303',
- ],
- [
- 1488469458.15,
- '0.00024553911666667835',
- ],
- [
- 1488469518.15,
- '0.0003056456833333184',
- ],
- [
- 1488469578.15,
- '0.0002618737166666681',
- ],
- [
- 1488469638.15,
- '0.00022972643333331414',
- ],
- [
- 1488469698.15,
- '0.0003713522500000307',
- ],
- [
- 1488469758.15,
- '0.00018322576666666515',
- ],
- [
- 1488469818.15,
- '0.00034534762753952466',
- ],
- [
- 1488469878.15,
- '0.00028200510008501677',
- ],
- [
- 1488469938.15,
- '0.0002773708499999768',
- ],
- [
- 1488469998.15,
- '0.00027547160000001013',
- ],
- [
- 1488470058.15,
- '0.00031713610000000023',
- ],
- [
- 1488470118.15,
- '0.00035276853333332525',
- ],
- ],
- },
- ],
- cpu_current: [
- {
- metric: {
- },
- value: [
- 1488470118.566,
- '0.00035276853333332525',
- ],
- },
- ],
- last_update: '2017-03-02T15:55:18.981Z',
- },
- },
-};
diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js
index 5ece4ed080b..2c096ed08a8 100644
--- a/spec/javascripts/notes_spec.js
+++ b/spec/javascripts/notes_spec.js
@@ -523,6 +523,51 @@ import '~/notes';
});
});
+ describe('postComment with Slash commands', () => {
+ const sampleComment = '/assign @root\n/award :100:';
+ const note = {
+ commands_changes: {
+ assignee_id: 1,
+ emoji_award: '100'
+ },
+ errors: {
+ commands_only: ['Commands applied']
+ },
+ valid: false
+ };
+ let $form;
+ let $notesContainer;
+
+ beforeEach(() => {
+ this.notes = new Notes('', []);
+ window.gon.current_username = 'root';
+ window.gon.current_user_fullname = 'Administrator';
+ gl.awardsHandler = {
+ addAwardToEmojiBar: () => {},
+ scrollToAwards: () => {}
+ };
+ gl.GfmAutoComplete = {
+ dataSources: {
+ commands: '/root/test-project/autocomplete_sources/commands'
+ }
+ };
+ $form = $('form.js-main-target-form');
+ $notesContainer = $('ul.main-notes-list');
+ $form.find('textarea.js-note-text').val(sampleComment);
+ });
+
+ it('should remove slash command placeholder when comment with slash commands is done posting', () => {
+ const deferred = $.Deferred();
+ spyOn($, 'ajax').and.returnValue(deferred.promise());
+ spyOn(gl.awardsHandler, 'addAwardToEmojiBar').and.callThrough();
+ $('.js-comment-button').click();
+
+ expect($notesContainer.find('.system-note.being-posted').length).toEqual(1); // Placeholder shown
+ deferred.resolve(note);
+ expect($notesContainer.find('.system-note.being-posted').length).toEqual(0); // Placeholder removed
+ });
+ });
+
describe('update comment with script tags', () => {
const sampleComment = '<script></script>';
const updatedComment = '<script></script>';
diff --git a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
index a4bb043f8f1..b7d82c36ddd 100644
--- a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
@@ -88,12 +88,12 @@ describe Banzai::Filter::ExternalIssueReferenceFilter, lib: true do
it 'queries the collection on the first call' do
expect_any_instance_of(Project).to receive(:default_issues_tracker?).once.and_call_original
- expect_any_instance_of(Project).to receive(:issue_reference_pattern).once.and_call_original
+ expect_any_instance_of(Project).to receive(:external_issue_reference_pattern).once.and_call_original
not_cached = reference_filter.call("look for #{reference}", { project: project })
expect_any_instance_of(Project).not_to receive(:default_issues_tracker?)
- expect_any_instance_of(Project).not_to receive(:issue_reference_pattern)
+ expect_any_instance_of(Project).not_to receive(:external_issue_reference_pattern)
cached = reference_filter.call("look for #{reference}", { project: project })
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
index e5c1deb338b..a79d365d6c5 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
@@ -39,13 +39,6 @@ describe Banzai::Filter::IssueReferenceFilter, lib: true do
let(:reference) { "##{issue.iid}" }
- it 'ignores valid references when using non-default tracker' do
- allow(project).to receive(:default_issues_tracker?).and_return(false)
-
- exp = act = "Issue #{reference}"
- expect(reference_filter(act).to_html).to eq exp
- end
-
it 'links to a valid reference' do
doc = reference_filter("Fixed #{reference}")
@@ -340,24 +333,6 @@ describe Banzai::Filter::IssueReferenceFilter, lib: true do
.to eq({ project => { issue.iid => issue } })
end
end
-
- context 'using an external issue tracker' do
- it 'returns a Hash containing the issues per project' do
- doc = Nokogiri::HTML.fragment('')
- filter = described_class.new(doc, project: project)
-
- expect(project).to receive(:default_issues_tracker?).and_return(false)
-
- expect(filter).to receive(:projects_per_reference)
- .and_return({ project.path_with_namespace => project })
-
- expect(filter).to receive(:references_per_project)
- .and_return({ project.path_with_namespace => Set.new([1]) })
-
- expect(filter.issues_per_project[project][1])
- .to be_an_instance_of(ExternalIssue)
- end
- end
end
describe '.references_in' do
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
new file mode 100644
index 00000000000..2b8c76f2bb8
--- /dev/null
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -0,0 +1,33 @@
+require 'rails_helper'
+
+describe Banzai::Pipeline::GfmPipeline do
+ describe 'integration between parsing regular and external issue references' do
+ let(:project) { create(:redmine_project, :public) }
+
+ it 'allows to use shorthand external reference syntax for Redmine' do
+ markdown = '#12'
+
+ result = described_class.call(markdown, project: project)[:output]
+ link = result.css('a').first
+
+ expect(link['href']).to eq 'http://redmine/projects/project_name_in_redmine/issues/12'
+ end
+
+ it 'parses cross-project references to regular issues' do
+ other_project = create(:empty_project, :public)
+ issue = create(:issue, project: other_project)
+ markdown = issue.to_reference(project, full: true)
+
+ result = described_class.call(markdown, project: project)[:output]
+ link = result.css('a').first
+
+ expect(link['href']).to eq(
+ Gitlab::Routing.url_helpers.namespace_project_issue_path(
+ other_project.namespace,
+ other_project,
+ issue
+ )
+ )
+ end
+ end
+end
diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
index 58e1a0c1bc1..acdd23f81f3 100644
--- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
@@ -39,16 +39,6 @@ describe Banzai::ReferenceParser::IssueParser, lib: true do
expect(subject.nodes_visible_to_user(user, [link])).to eq([])
end
end
-
- context 'when the project uses an external issue tracker' do
- it 'returns all nodes' do
- link = double(:link)
-
- expect(project).to receive(:external_issue_tracker).and_return(true)
-
- expect(subject.nodes_visible_to_user(user, [link])).to eq([link])
- end
- end
end
describe '#referenced_by' do
diff --git a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
index af0e7855a9b..482f03aa0cc 100644
--- a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
+++ b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
@@ -598,8 +598,10 @@ module Ci
describe "Image and service handling" do
context "when extended docker configuration is used" do
it "returns image and service when defined" do
- config = YAML.dump({ image: { name: "ruby:2.1" },
- services: ["mysql", { name: "docker:dind", alias: "docker" }],
+ config = YAML.dump({ image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: ["mysql", { name: "docker:dind", alias: "docker",
+ entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] }],
before_script: ["pwd"],
rspec: { script: "rspec" } })
@@ -614,8 +616,10 @@ module Ci
coverage_regex: nil,
tag_list: [],
options: {
- image: { name: "ruby:2.1" },
- services: [{ name: "mysql" }, { name: "docker:dind", alias: "docker" }]
+ image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: [{ name: "mysql" },
+ { name: "docker:dind", alias: "docker", entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] }]
},
allow_failure: false,
when: "on_success",
@@ -628,8 +632,11 @@ module Ci
config = YAML.dump({ image: "ruby:2.1",
services: ["mysql"],
before_script: ["pwd"],
- rspec: { image: { name: "ruby:2.5" },
- services: [{ name: "postgresql", alias: "db-pg" }, "docker:dind"], script: "rspec" } })
+ rspec: { image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: [{ name: "postgresql", alias: "db-pg",
+ entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
+ script: "rspec" } })
config_processor = GitlabCiYamlProcessor.new(config, path)
@@ -642,8 +649,10 @@ module Ci
coverage_regex: nil,
tag_list: [],
options: {
- image: { name: "ruby:2.5" },
- services: [{ name: "postgresql", alias: "db-pg" }, { name: "docker:dind" }]
+ image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: [{ name: "postgresql", alias: "db-pg", entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] },
+ { name: "docker:dind" }]
},
allow_failure: false,
when: "on_success",
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index bca22e39500..1a4d9ed5517 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -38,7 +38,7 @@ describe Gitlab::Ci::Config::Entry::Image do
end
context 'when configuration is a hash' do
- let(:config) { { name: 'ruby:2.2', entrypoint: '/bin/sh' } }
+ let(:config) { { name: 'ruby:2.2', entrypoint: %w(/bin/sh run) } }
describe '#value' do
it 'returns image hash' do
@@ -66,7 +66,7 @@ describe Gitlab::Ci::Config::Entry::Image do
describe '#entrypoint' do
it "returns image's entrypoint" do
- expect(entry.entrypoint).to eq '/bin/sh'
+ expect(entry.entrypoint).to eq %w(/bin/sh run)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 7202fe525e4..9ebf947a751 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -43,7 +43,7 @@ describe Gitlab::Ci::Config::Entry::Service do
context 'when configuration is a hash' do
let(:config) do
- { name: 'postgresql:9.5', alias: 'db', command: 'cmd', entrypoint: '/bin/sh' }
+ { name: 'postgresql:9.5', alias: 'db', command: %w(cmd run), entrypoint: %w(/bin/sh run) }
end
describe '#valid?' do
@@ -72,13 +72,13 @@ describe Gitlab::Ci::Config::Entry::Service do
describe '#command' do
it "returns service's command" do
- expect(entry.command).to eq 'cmd'
+ expect(entry.command).to eq %w(cmd run)
end
end
describe '#entrypoint' do
it "returns service's entrypoint" do
- expect(entry.entrypoint).to eq '/bin/sh'
+ expect(entry.entrypoint).to eq %w(/bin/sh run)
end
end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 5653cfee686..8813f129ef5 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -6,6 +6,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :trunca
before do
allow(migration).to receive(:say)
+ TestEnv.clean_test_path
end
def migration_namespace(namespace)
@@ -153,6 +154,30 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :trunca
end
end
+ describe '#perform_rename' do
+ describe 'for namespaces' do
+ let(:namespace) { create(:namespace, path: 'the-path') }
+ it 'renames the path' do
+ subject.perform_rename(migration_namespace(namespace), 'the-path', 'renamed')
+
+ expect(namespace.reload.path).to eq('renamed')
+ end
+
+ it 'renames all the routes for the namespace' do
+ child = create(:group, path: 'child', parent: namespace)
+ project = create(:project, namespace: child, path: 'the-project')
+ other_one = create(:namespace, path: 'the-path-is-similar')
+
+ subject.perform_rename(migration_namespace(namespace), 'the-path', 'renamed')
+
+ expect(namespace.reload.route.path).to eq('renamed')
+ expect(child.reload.route.path).to eq('renamed/child')
+ expect(project.reload.route.path).to eq('renamed/child/the-project')
+ expect(other_one.reload.route.path).to eq('the-path-is-similar')
+ end
+ end
+ end
+
describe '#move_pages' do
it 'moves the pages directory' do
expect(subject).to receive(:move_folders)
@@ -203,4 +228,53 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :trunca
expect(File.exist?(expected_file)).to be(true)
end
end
+
+ describe '#track_rename', redis: true do
+ it 'tracks a rename in redis' do
+ key = 'rename:FakeRenameReservedPathMigrationV1:namespace'
+
+ subject.track_rename('namespace', 'path/to/namespace', 'path/to/renamed')
+
+ old_path, new_path = [nil, nil]
+ Gitlab::Redis.with do |redis|
+ rename_info = redis.lpop(key)
+ old_path, new_path = JSON.parse(rename_info)
+ end
+
+ expect(old_path).to eq('path/to/namespace')
+ expect(new_path).to eq('path/to/renamed')
+ end
+ end
+
+ describe '#reverts_for_type', redis: true do
+ it 'yields for each tracked rename' do
+ subject.track_rename('project', 'old_path', 'new_path')
+ subject.track_rename('project', 'old_path2', 'new_path2')
+ subject.track_rename('namespace', 'namespace_path', 'new_namespace_path')
+
+ expect { |b| subject.reverts_for_type('project', &b) }
+ .to yield_successive_args(%w(old_path2 new_path2), %w(old_path new_path))
+ expect { |b| subject.reverts_for_type('namespace', &b) }
+ .to yield_with_args('namespace_path', 'new_namespace_path')
+ end
+
+ it 'keeps the revert in redis if it failed' do
+ subject.track_rename('project', 'old_path', 'new_path')
+
+ subject.reverts_for_type('project') do
+ raise 'whatever happens, keep going!'
+ end
+
+ key = 'rename:FakeRenameReservedPathMigrationV1:project'
+ stored_renames = nil
+ rename_count = 0
+ Gitlab::Redis.with do |redis|
+ stored_renames = redis.lrange(key, 0, 1)
+ rename_count = redis.llen(key)
+ end
+
+ expect(rename_count).to eq(1)
+ expect(JSON.parse(stored_renames.first)).to eq(%w(old_path new_path))
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index 8125dedd3fc..803e923b4a5 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -3,9 +3,11 @@ require 'spec_helper'
describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :truncate do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
+ let(:namespace) { create(:group, name: 'the-path') }
before do
allow(migration).to receive(:say)
+ TestEnv.clean_test_path
end
def migration_namespace(namespace)
@@ -137,8 +139,6 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
end
describe "#rename_namespace" do
- let(:namespace) { create(:group, name: 'the-path') }
-
it 'renames paths & routes for the namespace' do
expect(subject).to receive(:rename_path_for_routable)
.with(namespace)
@@ -149,11 +149,27 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
expect(namespace.reload.path).to eq('the-path0')
end
+ it 'tracks the rename' do
+ expect(subject).to receive(:track_rename)
+ .with('namespace', 'the-path', 'the-path0')
+
+ subject.rename_namespace(namespace)
+ end
+
+ it 'renames things related to the namespace' do
+ expect(subject).to receive(:rename_namespace_dependencies)
+ .with(namespace, 'the-path', 'the-path0')
+
+ subject.rename_namespace(namespace)
+ end
+ end
+
+ describe '#rename_namespace_dependencies' do
it "moves the the repository for a project in the namespace" do
create(:project, namespace: namespace, path: "the-path-project")
expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git")
- subject.rename_namespace(namespace)
+ subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
expect(File.directory?(expected_repo)).to be(true)
end
@@ -161,13 +177,13 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
it "moves the uploads for the namespace" do
expect(subject).to receive(:move_uploads).with("the-path", "the-path0")
- subject.rename_namespace(namespace)
+ subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
end
it "moves the pages for the namespace" do
expect(subject).to receive(:move_pages).with("the-path", "the-path0")
- subject.rename_namespace(namespace)
+ subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
end
it 'invalidates the markdown cache of related projects' do
@@ -175,13 +191,13 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
expect(subject).to receive(:remove_cached_html_for_projects).with([project.id])
- subject.rename_namespace(namespace)
+ subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
end
it "doesn't rename users for other namespaces" do
expect(subject).not_to receive(:rename_user)
- subject.rename_namespace(namespace)
+ subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
end
it 'renames the username of a namespace for a user' do
@@ -189,7 +205,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
expect(subject).to receive(:rename_user).with('the-path', 'the-path0')
- subject.rename_namespace(user.namespace)
+ subject.rename_namespace_dependencies(user.namespace, 'the-path', 'the-path0')
end
end
@@ -224,4 +240,50 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
subject.rename_namespaces(type: :child)
end
end
+
+ describe '#revert_renames', redis: true do
+ it 'renames the routes back to the previous values' do
+ project = create(:project, path: 'a-project', namespace: namespace)
+ subject.rename_namespace(namespace)
+
+ expect(subject).to receive(:perform_rename)
+ .with(
+ kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Namespace),
+ 'the-path0',
+ 'the-path'
+ ).and_call_original
+
+ subject.revert_renames
+
+ expect(namespace.reload.path).to eq('the-path')
+ expect(namespace.reload.route.path).to eq('the-path')
+ expect(project.reload.route.path).to eq('the-path/a-project')
+ end
+
+ it 'moves the repositories back to their original place' do
+ project = create(:project, path: 'a-project', namespace: namespace)
+ project.create_repository
+ subject.rename_namespace(namespace)
+
+ expected_path = File.join(TestEnv.repos_path, 'the-path', 'a-project.git')
+
+ expect(subject).to receive(:rename_namespace_dependencies)
+ .with(
+ kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Namespace),
+ 'the-path0',
+ 'the-path'
+ ).and_call_original
+
+ subject.revert_renames
+
+ expect(File.directory?(expected_path)).to be_truthy
+ end
+
+ it "doesn't break when the namespace was renamed" do
+ subject.rename_namespace(namespace)
+ namespace.update_attributes!(path: 'renamed-afterwards')
+
+ expect { subject.revert_renames }.not_to raise_error
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 802f77ad430..0e240a5ccf1 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -3,9 +3,15 @@ require 'spec_helper'
describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :truncate do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
+ let(:project) do
+ create(:empty_project,
+ path: 'the-path',
+ namespace: create(:namespace, path: 'known-parent' ))
+ end
before do
allow(migration).to receive(:say)
+ TestEnv.clean_test_path
end
describe '#projects_for_paths' do
@@ -47,12 +53,6 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
end
describe '#rename_project' do
- let(:project) do
- create(:empty_project,
- path: 'the-path',
- namespace: create(:namespace, path: 'known-parent' ))
- end
-
it 'renames path & route for the project' do
expect(subject).to receive(:rename_path_for_routable)
.with(project)
@@ -63,27 +63,42 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
expect(project.reload.path).to eq('the-path0')
end
+ it 'tracks the rename' do
+ expect(subject).to receive(:track_rename)
+ .with('project', 'known-parent/the-path', 'known-parent/the-path0')
+
+ subject.rename_project(project)
+ end
+
+ it 'renames the folders for the project' do
+ expect(subject).to receive(:move_project_folders).with(project, 'known-parent/the-path', 'known-parent/the-path0')
+
+ subject.rename_project(project)
+ end
+ end
+
+ describe '#move_project_folders' do
it 'moves the wiki & the repo' do
expect(subject).to receive(:move_repository)
.with(project, 'known-parent/the-path.wiki', 'known-parent/the-path0.wiki')
expect(subject).to receive(:move_repository)
.with(project, 'known-parent/the-path', 'known-parent/the-path0')
- subject.rename_project(project)
+ subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
end
it 'moves uploads' do
expect(subject).to receive(:move_uploads)
.with('known-parent/the-path', 'known-parent/the-path0')
- subject.rename_project(project)
+ subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
end
it 'moves pages' do
expect(subject).to receive(:move_pages)
.with('known-parent/the-path', 'known-parent/the-path0')
- subject.rename_project(project)
+ subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
end
end
@@ -99,4 +114,47 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
expect(File.directory?(expected_path)).to be(true)
end
end
+
+ describe '#revert_renames', redis: true do
+ it 'renames the routes back to the previous values' do
+ subject.rename_project(project)
+
+ expect(subject).to receive(:perform_rename)
+ .with(
+ kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Project),
+ 'known-parent/the-path0',
+ 'known-parent/the-path'
+ ).and_call_original
+
+ subject.revert_renames
+
+ expect(project.reload.path).to eq('the-path')
+ expect(project.route.path).to eq('known-parent/the-path')
+ end
+
+ it 'moves the repositories back to their original place' do
+ project.create_repository
+ subject.rename_project(project)
+
+ expected_path = File.join(TestEnv.repos_path, 'known-parent', 'the-path.git')
+
+ expect(subject).to receive(:move_project_folders)
+ .with(
+ kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Project),
+ 'known-parent/the-path0',
+ 'known-parent/the-path'
+ ).and_call_original
+
+ subject.revert_renames
+
+ expect(File.directory?(expected_path)).to be_truthy
+ end
+
+ it "doesn't break when the project was renamed" do
+ subject.rename_project(project)
+ project.update_attributes!(path: 'renamed-afterwards')
+
+ expect { subject.revert_renames }.not_to raise_error
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
index 1d5e58855c1..7695b95dc57 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
@@ -51,4 +51,26 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1, :truncate do
subject.rename_root_paths('the-path')
end
end
+
+ describe '#revert_renames' do
+ it 'renames namespaces' do
+ rename_namespaces = double
+ expect(described_class::RenameNamespaces)
+ .to receive(:new).with([], subject)
+ .and_return(rename_namespaces)
+ expect(rename_namespaces).to receive(:revert_renames)
+
+ subject.revert_renames
+ end
+
+ it 'renames projects' do
+ rename_projects = double
+ expect(described_class::RenameProjects)
+ .to receive(:new).with([], subject)
+ .and_return(rename_projects)
+ expect(rename_projects).to receive(:revert_renames)
+
+ subject.revert_renames
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/sha_attribute_spec.rb b/spec/lib/gitlab/database/sha_attribute_spec.rb
new file mode 100644
index 00000000000..62c1d37ea1c
--- /dev/null
+++ b/spec/lib/gitlab/database/sha_attribute_spec.rb
@@ -0,0 +1,33 @@
+require 'spec_helper'
+
+describe Gitlab::Database::ShaAttribute do
+ let(:sha) do
+ '9a573a369a5bfbb9a4a36e98852c21af8a44ea8b'
+ end
+
+ let(:binary_sha) do
+ [sha].pack('H*')
+ end
+
+ let(:binary_from_db) do
+ if Gitlab::Database.postgresql?
+ "\\x#{sha}"
+ else
+ binary_sha
+ end
+ end
+
+ let(:attribute) { described_class.new }
+
+ describe '#type_cast_from_database' do
+ it 'converts the binary SHA to a String' do
+ expect(attribute.type_cast_from_database(binary_from_db)).to eq(sha)
+ end
+ end
+
+ describe '#type_cast_for_database' do
+ it 'converts a SHA String to binary data' do
+ expect(attribute.type_cast_for_database(sha).to_s).to eq(binary_sha)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb b/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
index 7e32770f95d..64b233f3e68 100644
--- a/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
@@ -87,5 +87,9 @@ describe Gitlab::DependencyLinker::RequirementsTxtLinker, lib: true do
it 'links URLs' do
expect(subject).to include(link('http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl', 'http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl'))
end
+
+ it 'does not contain link with a newline as package name' do
+ expect(subject).not_to include(link("\n", "https://pypi.python.org/pypi/\n"))
+ end
end
end
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index 8b041ac69b1..66c016d14b3 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -20,6 +20,7 @@ describe Gitlab::Git::Blame, seed_helper: true do
expect(data.size).to eq(95)
expect(data.first[:commit]).to be_kind_of(Gitlab::Git::Commit)
expect(data.first[:line]).to eq("# Contribute to GitLab")
+ expect(data.first[:line]).to be_utf8
end
end
@@ -40,6 +41,7 @@ describe Gitlab::Git::Blame, seed_helper: true do
expect(data.size).to eq(1)
expect(data.first[:commit]).to be_kind_of(Gitlab::Git::Commit)
expect(data.first[:line]).to eq("Ä ü")
+ expect(data.first[:line]).to be_utf8
end
end
@@ -61,6 +63,7 @@ describe Gitlab::Git::Blame, seed_helper: true do
expect(data.size).to eq(1)
expect(data.first[:commit]).to be_kind_of(Gitlab::Git::Commit)
expect(data.first[:line]).to eq(" ")
+ expect(data.first[:line]).to be_utf8
end
end
end
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index 9dba4397e79..d1d7ed1d02a 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -48,7 +48,7 @@ describe Gitlab::Git::Branch, seed_helper: true do
expect(Gitlab::Git::Commit).to receive(:decorate)
.with(hash_including(attributes)).and_call_original
- expect(branch.dereferenced_target.message.encoding).to be(Encoding::UTF_8)
+ expect(branch.dereferenced_target.message).to be_utf8
end
end
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index d50ccb0df30..d97e85364c2 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -180,7 +180,7 @@ EOT
let(:raw_patch) { @raw_diff_hash[:diff].encode(Encoding::ASCII_8BIT) }
it 'encodes diff patch to UTF-8' do
- expect(diff.diff.encoding).to eq(Encoding::UTF_8)
+ expect(diff.diff).to be_utf8
end
end
end
diff --git a/spec/lib/gitlab/git/hook_spec.rb b/spec/lib/gitlab/git/hook_spec.rb
index 3f279c21865..73518656bde 100644
--- a/spec/lib/gitlab/git/hook_spec.rb
+++ b/spec/lib/gitlab/git/hook_spec.rb
@@ -4,18 +4,20 @@ require 'fileutils'
describe Gitlab::Git::Hook, lib: true do
describe "#trigger" do
let(:project) { create(:project, :repository) }
+ let(:repo_path) { project.repository.path }
let(:user) { create(:user) }
+ let(:gl_id) { Gitlab::GlId.gl_id(user) }
def create_hook(name)
- FileUtils.mkdir_p(File.join(project.repository.path, 'hooks'))
- File.open(File.join(project.repository.path, 'hooks', name), 'w', 0755) do |f|
+ FileUtils.mkdir_p(File.join(repo_path, 'hooks'))
+ File.open(File.join(repo_path, 'hooks', name), 'w', 0755) do |f|
f.write('exit 0')
end
end
def create_failing_hook(name)
- FileUtils.mkdir_p(File.join(project.repository.path, 'hooks'))
- File.open(File.join(project.repository.path, 'hooks', name), 'w', 0755) do |f|
+ FileUtils.mkdir_p(File.join(repo_path, 'hooks'))
+ File.open(File.join(repo_path, 'hooks', name), 'w', 0755) do |f|
f.write(<<-HOOK)
echo 'regular message from the hook'
echo 'error message from the hook' 1>&2
@@ -27,13 +29,29 @@ describe Gitlab::Git::Hook, lib: true do
['pre-receive', 'post-receive', 'update'].each do |hook_name|
context "when triggering a #{hook_name} hook" do
context "when the hook is successful" do
+ let(:hook_path) { File.join(repo_path, 'hooks', hook_name) }
+ let(:gl_repository) { Gitlab::GlRepository.gl_repository(project, false) }
+ let(:env) do
+ {
+ 'GL_ID' => gl_id,
+ 'PWD' => repo_path,
+ 'GL_PROTOCOL' => 'web',
+ 'GL_REPOSITORY' => gl_repository
+ }
+ end
+
it "returns success with no errors" do
create_hook(hook_name)
- hook = Gitlab::Git::Hook.new(hook_name, project.repository.path)
+ hook = Gitlab::Git::Hook.new(hook_name, project)
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
- status, errors = hook.trigger(Gitlab::GlId.gl_id(user), blank, blank, ref)
+ if hook_name != 'update'
+ expect(Open3).to receive(:popen3)
+ .with(env, hook_path, chdir: repo_path).and_call_original
+ end
+
+ status, errors = hook.trigger(gl_id, blank, blank, ref)
expect(status).to be true
expect(errors).to be_blank
end
@@ -42,11 +60,11 @@ describe Gitlab::Git::Hook, lib: true do
context "when the hook is unsuccessful" do
it "returns failure with errors" do
create_failing_hook(hook_name)
- hook = Gitlab::Git::Hook.new(hook_name, project.repository.path)
+ hook = Gitlab::Git::Hook.new(hook_name, project)
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
- status, errors = hook.trigger(Gitlab::GlId.gl_id(user), blank, blank, ref)
+ status, errors = hook.trigger(gl_id, blank, blank, ref)
expect(status).to be false
expect(errors).to eq("error message from the hook\n")
end
@@ -56,11 +74,11 @@ describe Gitlab::Git::Hook, lib: true do
context "when the hook doesn't exist" do
it "returns success with no errors" do
- hook = Gitlab::Git::Hook.new('unknown_hook', project.repository.path)
+ hook = Gitlab::Git::Hook.new('unknown_hook', project)
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
- status, errors = hook.trigger(Gitlab::GlId.gl_id(user), blank, blank, ref)
+ status, errors = hook.trigger(gl_id, blank, blank, ref)
expect(status).to be true
expect(errors).to be_nil
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 4894b558e03..f1cdd86edb9 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -26,31 +26,25 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- context 'with gitaly enabled' do
- before do
- stub_gitaly
- end
-
- after do
- Gitlab::GitalyClient.clear_stubs!
- end
+ it 'returns UTF-8' do
+ expect(repository.root_ref).to be_utf8
+ end
- it 'gets the branch name from GitalyClient' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
- repository.root_ref
- end
+ it 'gets the branch name from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
+ repository.root_ref
+ end
- it 'wraps GRPC not found' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
- .and_raise(GRPC::NotFound)
- expect { repository.root_ref }.to raise_error(Gitlab::Git::Repository::NoRepository)
- end
+ it 'wraps GRPC not found' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
+ .and_raise(GRPC::NotFound)
+ expect { repository.root_ref }.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
- it 'wraps GRPC exceptions' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
- .and_raise(GRPC::Unknown)
- expect { repository.root_ref }.to raise_error(Gitlab::Git::CommandError)
- end
+ it 'wraps GRPC exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
+ .and_raise(GRPC::Unknown)
+ expect { repository.root_ref }.to raise_error(Gitlab::Git::CommandError)
end
end
@@ -123,34 +117,29 @@ describe Gitlab::Git::Repository, seed_helper: true do
it 'has SeedRepo::Repo::BRANCHES.size elements' do
expect(subject.size).to eq(SeedRepo::Repo::BRANCHES.size)
end
- it { is_expected.to include("master") }
- it { is_expected.not_to include("branch-from-space") }
- context 'with gitaly enabled' do
- before do
- stub_gitaly
- end
+ it 'returns UTF-8' do
+ expect(subject.first).to be_utf8
+ end
- after do
- Gitlab::GitalyClient.clear_stubs!
- end
+ it { is_expected.to include("master") }
+ it { is_expected.not_to include("branch-from-space") }
- it 'gets the branch names from GitalyClient' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
- subject
- end
+ it 'gets the branch names from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
+ subject
+ end
- it 'wraps GRPC not found' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
- .and_raise(GRPC::NotFound)
- expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository)
- end
+ it 'wraps GRPC not found' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
+ .and_raise(GRPC::NotFound)
+ expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
- it 'wraps GRPC other exceptions' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
- .and_raise(GRPC::Unknown)
- expect { subject }.to raise_error(Gitlab::Git::CommandError)
- end
+ it 'wraps GRPC other exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
+ .and_raise(GRPC::Unknown)
+ expect { subject }.to raise_error(Gitlab::Git::CommandError)
end
end
@@ -158,10 +147,15 @@ describe Gitlab::Git::Repository, seed_helper: true do
subject { repository.tag_names }
it { is_expected.to be_kind_of Array }
+
it 'has SeedRepo::Repo::TAGS.size elements' do
expect(subject.size).to eq(SeedRepo::Repo::TAGS.size)
end
+ it 'returns UTF-8' do
+ expect(subject.first).to be_utf8
+ end
+
describe '#last' do
subject { super().last }
it { is_expected.to eq("v1.2.1") }
@@ -169,31 +163,21 @@ describe Gitlab::Git::Repository, seed_helper: true do
it { is_expected.to include("v1.0.0") }
it { is_expected.not_to include("v5.0.0") }
- context 'with gitaly enabled' do
- before do
- stub_gitaly
- end
-
- after do
- Gitlab::GitalyClient.clear_stubs!
- end
-
- it 'gets the tag names from GitalyClient' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
- subject
- end
+ it 'gets the tag names from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
+ subject
+ end
- it 'wraps GRPC not found' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
- .and_raise(GRPC::NotFound)
- expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository)
- end
+ it 'wraps GRPC not found' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
+ .and_raise(GRPC::NotFound)
+ expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
- it 'wraps GRPC exceptions' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
- .and_raise(GRPC::Unknown)
- expect { subject }.to raise_error(Gitlab::Git::CommandError)
- end
+ it 'wraps GRPC exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
+ .and_raise(GRPC::Unknown)
+ expect { subject }.to raise_error(Gitlab::Git::CommandError)
end
end
@@ -348,7 +332,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
context 'where repo has submodules' do
- let(:submodules) { repository.submodules('master') }
+ let(:submodules) { repository.send(:submodules, 'master') }
let(:submodule) { submodules.first }
it { expect(submodules).to be_kind_of Hash }
@@ -383,12 +367,12 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
it 'should not have an entry for an uncommited submodule dir' do
- submodules = repository.submodules('fix-existing-submodule-dir')
+ submodules = repository.send(:submodules, 'fix-existing-submodule-dir')
expect(submodules).not_to have_key('submodule-existing-dir')
end
it 'should handle tags correctly' do
- submodules = repository.submodules('v1.2.1')
+ submodules = repository.send(:submodules, 'v1.2.1')
expect(submodules.first).to eq([
"six", {
@@ -414,7 +398,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
context 'where repo doesn\'t have submodules' do
- let(:submodules) { repository.submodules('6d39438') }
+ let(:submodules) { repository.send(:submodules, '6d39438') }
it 'should return an empty hash' do
expect(submodules).to be_empty
end
@@ -1267,32 +1251,31 @@ describe Gitlab::Git::Repository, seed_helper: true do
expect(@repo.local_branches.any? { |branch| branch.name == 'local_branch' }).to eq(true)
end
- context 'with gitaly enabled' do
- before do
- stub_gitaly
- end
-
- after do
- Gitlab::GitalyClient.clear_stubs!
+ it 'returns a Branch with UTF-8 fields' do
+ branches = @repo.local_branches.to_a
+ expect(branches.size).to be > 0
+ branches.each do |branch|
+ expect(branch.name).to be_utf8
+ expect(branch.target).to be_utf8 unless branch.target.nil?
end
+ end
- it 'gets the branches from GitalyClient' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches)
- .and_return([])
- @repo.local_branches
- end
+ it 'gets the branches from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches)
+ .and_return([])
+ @repo.local_branches
+ end
- it 'wraps GRPC not found' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches)
- .and_raise(GRPC::NotFound)
- expect { @repo.local_branches }.to raise_error(Gitlab::Git::Repository::NoRepository)
- end
+ it 'wraps GRPC not found' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches)
+ .and_raise(GRPC::NotFound)
+ expect { @repo.local_branches }.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
- it 'wraps GRPC exceptions' do
- expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches)
- .and_raise(GRPC::Unknown)
- expect { @repo.local_branches }.to raise_error(Gitlab::Git::CommandError)
- end
+ it 'wraps GRPC exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches)
+ .and_raise(GRPC::Unknown)
+ expect { @repo.local_branches }.to raise_error(Gitlab::Git::CommandError)
end
end
@@ -1371,11 +1354,4 @@ describe Gitlab::Git::Repository, seed_helper: true do
sha = Rugged::Commit.create(repo, options)
repo.lookup(sha)
end
-
- def stub_gitaly
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true)
-
- stub = double(:stub)
- allow(Gitaly::Ref::Stub).to receive(:new).and_return(stub)
- end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_spec.rb b/spec/lib/gitlab/gitaly_client/ref_spec.rb
index 42dba2ff874..df22fcad902 100644
--- a/spec/lib/gitlab/gitaly_client/ref_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_spec.rb
@@ -6,17 +6,6 @@ describe Gitlab::GitalyClient::Ref do
let(:relative_path) { project.path_with_namespace + '.git' }
let(:client) { described_class.new(project.repository) }
- before do
- allow(Gitlab.config.gitaly).to receive(:enabled).and_return(true)
- end
-
- after do
- # When we say `expect_any_instance_of(Gitaly::Ref::Stub)` a double is created,
- # and because GitalyClient shares stubs these will get passed from example to
- # example, which will cause an error, so we clean the stubs after each example.
- Gitlab::GitalyClient.clear_stubs!
- end
-
describe '#branch_names' do
it 'sends a find_all_branch_names message' do
expect_any_instance_of(Gitaly::Ref::Stub)
@@ -69,8 +58,26 @@ describe Gitlab::GitalyClient::Ref do
client.local_branches(sort_by: 'updated_desc')
end
+ it 'translates known mismatches on sort param values' do
+ expect_any_instance_of(Gitaly::Ref::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
+ .and_return([])
+
+ client.local_branches(sort_by: 'name_asc')
+ end
+
it 'raises an argument error if an invalid sort_by parameter is passed' do
expect { client.local_branches(sort_by: 'invalid_sort') }.to raise_error(ArgumentError)
end
end
+
+ describe '#find_ref_name', seed_helper: true do
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
+ let(:client) { described_class.new(repository) }
+ subject { client.find_ref_name(SeedRepo::Commit::ID, 'refs/heads/master') }
+
+ it { should be_utf8 }
+ it { should eq('refs/heads/master') }
+ end
end
diff --git a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
index 61c10d47434..b333e162909 100644
--- a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
@@ -97,30 +97,40 @@ describe Gitlab::HealthChecks::FsShardsCheck do
}.with_indifferent_access
end
- it { is_expected.to all(have_attributes(labels: { shard: :default })) }
+ # Unsolved intermittent failure in CI https://gitlab.com/gitlab-org/gitlab-ce/issues/31128
+ around(:each) do |example| # rubocop:disable RSpec/AroundBlock
+ times_to_try = ENV['CI'] ? 4 : 1
+ example.run_with_retry retry: times_to_try
+ end
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_accessible, value: 0)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_readable, value: 0)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_writable, value: 0)) }
+ it 'provides metrics' do
+ expect(subject).to all(have_attributes(labels: { shard: :default }))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_accessible, value: 0))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_readable, value: 0))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_writable, value: 0))
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_access_latency, value: be >= 0)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_read_latency, value: be >= 0)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_write_latency, value: be >= 0)) }
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_access_latency, value: be >= 0))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_read_latency, value: be >= 0))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_write_latency, value: be >= 0))
+ end
end
context 'storage points to directory that has both read and write rights' do
before do
FileUtils.chmod_R(0755, tmp_dir)
end
- it { is_expected.to all(have_attributes(labels: { shard: :default })) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_accessible, value: 1)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_readable, value: 1)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_writable, value: 1)) }
+ it 'provides metrics' do
+ expect(subject).to all(have_attributes(labels: { shard: :default }))
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_access_latency, value: be >= 0)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_read_latency, value: be >= 0)) }
- it { is_expected.to include(an_object_having_attributes(name: :filesystem_write_latency, value: be >= 0)) }
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_accessible, value: 1))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_readable, value: 1))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_writable, value: 1))
+
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_access_latency, value: be >= 0))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_read_latency, value: be >= 0))
+ expect(subject).to include(an_object_having_attributes(name: :filesystem_write_latency, value: be >= 0))
+ end
end
end
end
diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
index 168a59e5139..30b6a0d8845 100644
--- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
@@ -34,7 +34,7 @@ describe Gitlab::ImportExport::RepoRestorer, services: true do
it 'has the webhooks' do
restorer.restore
- expect(Gitlab::Git::Hook.new('post-receive', project.repository.path_to_repo)).to exist
+ expect(Gitlab::Git::Hook.new('post-receive', project)).to exist
end
end
end
diff --git a/spec/lib/gitlab/ldap/access_spec.rb b/spec/lib/gitlab/ldap/access_spec.rb
index 9dd997aa7dc..756fcb0fcaf 100644
--- a/spec/lib/gitlab/ldap/access_spec.rb
+++ b/spec/lib/gitlab/ldap/access_spec.rb
@@ -4,6 +4,16 @@ describe Gitlab::LDAP::Access, lib: true do
let(:access) { Gitlab::LDAP::Access.new user }
let(:user) { create(:omniauth_user) }
+ describe '.allowed?' do
+ it 'updates the users `last_credential_check_at' do
+ expect(access).to receive(:allowed?) { true }
+ expect(described_class).to receive(:open).and_yield(access)
+
+ expect { described_class.allowed?(user) }
+ .to change { user.last_credential_check_at }
+ end
+ end
+
describe '#allowed?' do
subject { access.allowed? }
diff --git a/spec/lib/gitlab/popen_spec.rb b/spec/lib/gitlab/popen_spec.rb
index 4ae216d55b0..af50ecdb2ab 100644
--- a/spec/lib/gitlab/popen_spec.rb
+++ b/spec/lib/gitlab/popen_spec.rb
@@ -32,6 +32,17 @@ describe 'Gitlab::Popen', lib: true, no_db: true do
end
end
+ context 'with custom options' do
+ let(:vars) { { 'foobar' => 123, 'PWD' => path } }
+ let(:options) { { chdir: path } }
+
+ it 'calls popen3 with the provided environment variables' do
+ expect(Open3).to receive(:popen3).with(vars, 'ls', options)
+
+ @output, @status = @klass.new.popen(%w(ls), path, { 'foobar' => 123 })
+ end
+ end
+
context 'without a directory argument' do
before do
@output, @status = @klass.new.popen(%w(ls))
@@ -45,7 +56,7 @@ describe 'Gitlab::Popen', lib: true, no_db: true do
before do
@output, @status = @klass.new.popen(%w[cat]) { |stdin| stdin.write 'hello' }
end
-
+
it { expect(@status).to be_zero }
it { expect(@output).to eq('hello') }
end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index a97a0f8452b..5b1b8f9516a 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -4,6 +4,7 @@ require 'stringio'
describe Gitlab::Shell, lib: true do
let(:project) { double('Project', id: 7, path: 'diaspora') }
let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:popen_vars) { { 'GIT_TERMINAL_PROMPT' => ENV['GIT_TERMINAL_PROMPT'] } }
before do
allow(Project).to receive(:find).and_return(project)
@@ -50,7 +51,7 @@ describe Gitlab::Shell, lib: true do
describe '#add_key' do
it 'removes trailing garbage' do
allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(Gitlab::Utils).to receive(:system_silent).with(
+ expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
[:gitlab_shell_keys_path, 'add-key', 'key-123', 'ssh-rsa foobar']
)
@@ -100,17 +101,91 @@ describe Gitlab::Shell, lib: true do
allow(Gitlab.config.gitlab_shell).to receive(:git_timeout).and_return(800)
end
+ describe '#add_repository' do
+ it 'returns true when the command succeeds' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'add-project', 'current/storage', 'project/path.git'],
+ nil, popen_vars).and_return([nil, 0])
+
+ expect(gitlab_shell.add_repository('current/storage', 'project/path')).to be true
+ end
+
+ it 'returns false when the command fails' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'add-project', 'current/storage', 'project/path.git'],
+ nil, popen_vars).and_return(["error", 1])
+
+ expect(gitlab_shell.add_repository('current/storage', 'project/path')).to be false
+ end
+ end
+
+ describe '#remove_repository' do
+ it 'returns true when the command succeeds' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'rm-project', 'current/storage', 'project/path.git'],
+ nil, popen_vars).and_return([nil, 0])
+
+ expect(gitlab_shell.remove_repository('current/storage', 'project/path')).to be true
+ end
+
+ it 'returns false when the command fails' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'rm-project', 'current/storage', 'project/path.git'],
+ nil, popen_vars).and_return(["error", 1])
+
+ expect(gitlab_shell.remove_repository('current/storage', 'project/path')).to be false
+ end
+ end
+
+ describe '#mv_repository' do
+ it 'returns true when the command succeeds' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'mv-project', 'current/storage', 'project/path.git', 'project/newpath.git'],
+ nil, popen_vars).and_return([nil, 0])
+
+ expect(gitlab_shell.mv_repository('current/storage', 'project/path', 'project/newpath')).to be true
+ end
+
+ it 'returns false when the command fails' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'mv-project', 'current/storage', 'project/path.git', 'project/newpath.git'],
+ nil, popen_vars).and_return(["error", 1])
+
+ expect(gitlab_shell.mv_repository('current/storage', 'project/path', 'project/newpath')).to be false
+ end
+ end
+
+ describe '#fork_repository' do
+ it 'returns true when the command succeeds' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'fork-project', 'current/storage', 'project/path.git', 'new/storage', 'new-namespace'],
+ nil, popen_vars).and_return([nil, 0])
+
+ expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'new-namespace')).to be true
+ end
+
+ it 'return false when the command fails' do
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([projects_path, 'fork-project', 'current/storage', 'project/path.git', 'new/storage', 'new-namespace'],
+ nil, popen_vars).and_return(["error", 1])
+
+ expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'new-namespace')).to be false
+ end
+ end
+
describe '#fetch_remote' do
it 'returns true when the command succeeds' do
expect(Gitlab::Popen).to receive(:popen)
- .with([projects_path, 'fetch-remote', 'current/storage', 'project/path.git', 'new/storage', '800']).and_return([nil, 0])
+ .with([projects_path, 'fetch-remote', 'current/storage', 'project/path.git', 'new/storage', '800'],
+ nil, popen_vars).and_return([nil, 0])
expect(gitlab_shell.fetch_remote('current/storage', 'project/path', 'new/storage')).to be true
end
it 'raises an exception when the command fails' do
expect(Gitlab::Popen).to receive(:popen)
- .with([projects_path, 'fetch-remote', 'current/storage', 'project/path.git', 'new/storage', '800']).and_return(["error", 1])
+ .with([projects_path, 'fetch-remote', 'current/storage', 'project/path.git', 'new/storage', '800'],
+ nil, popen_vars).and_return(["error", 1])
expect { gitlab_shell.fetch_remote('current/storage', 'project/path', 'new/storage') }.to raise_error(Gitlab::Shell::Error, "error")
end
@@ -119,14 +194,16 @@ describe Gitlab::Shell, lib: true do
describe '#import_repository' do
it 'returns true when the command succeeds' do
expect(Gitlab::Popen).to receive(:popen)
- .with([projects_path, 'import-project', 'current/storage', 'project/path.git', 'https://gitlab.com/gitlab-org/gitlab-ce.git', "800"]).and_return([nil, 0])
+ .with([projects_path, 'import-project', 'current/storage', 'project/path.git', 'https://gitlab.com/gitlab-org/gitlab-ce.git', "800"],
+ nil, popen_vars).and_return([nil, 0])
expect(gitlab_shell.import_repository('current/storage', 'project/path', 'https://gitlab.com/gitlab-org/gitlab-ce.git')).to be true
end
it 'raises an exception when the command fails' do
expect(Gitlab::Popen).to receive(:popen)
- .with([projects_path, 'import-project', 'current/storage', 'project/path.git', 'https://gitlab.com/gitlab-org/gitlab-ce.git', "800"]).and_return(["error", 1])
+ .with([projects_path, 'import-project', 'current/storage', 'project/path.git', 'https://gitlab.com/gitlab-org/gitlab-ce.git', "800"],
+ nil, popen_vars).and_return(["error", 1])
expect { gitlab_shell.import_repository('current/storage', 'project/path', 'https://gitlab.com/gitlab-org/gitlab-ce.git') }.to raise_error(Gitlab::Shell::Error, "error")
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 3c7c7562b46..c6718827028 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -30,7 +30,8 @@ describe Gitlab::UsageData do
expect(count_data.keys).to match_array(%i(
boards
ci_builds
- ci_pipelines
+ ci_internal_pipelines
+ ci_external_pipelines
ci_runners
ci_triggers
ci_pipeline_schedules
diff --git a/spec/migrations/rename_duplicated_variable_key_spec.rb b/spec/migrations/rename_duplicated_variable_key_spec.rb
new file mode 100644
index 00000000000..11096564dfa
--- /dev/null
+++ b/spec/migrations/rename_duplicated_variable_key_spec.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20170622135451_rename_duplicated_variable_key.rb')
+
+describe RenameDuplicatedVariableKey, :migration do
+ let(:variables) { table(:ci_variables) }
+ let(:projects) { table(:projects) }
+
+ before do
+ projects.create!(id: 1)
+ variables.create!(id: 1, key: 'key1', project_id: 1)
+ variables.create!(id: 2, key: 'key2', project_id: 1)
+ variables.create!(id: 3, key: 'keyX', project_id: 1)
+ variables.create!(id: 4, key: 'keyX', project_id: 1)
+ variables.create!(id: 5, key: 'keyY', project_id: 1)
+ variables.create!(id: 6, key: 'keyX', project_id: 1)
+ variables.create!(id: 7, key: 'key7', project_id: 1)
+ variables.create!(id: 8, key: 'keyY', project_id: 1)
+ end
+
+ it 'correctly remove duplicated records with smaller id' do
+ migrate!
+
+ expect(variables.pluck(:id, :key)).to contain_exactly(
+ [1, 'key1'],
+ [2, 'key2'],
+ [3, 'keyX_3'],
+ [4, 'keyX_4'],
+ [5, 'keyY_5'],
+ [6, 'keyX'],
+ [7, 'key7'],
+ [8, 'keyY']
+ )
+ end
+end
diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb
index 090f9e70c50..dc7a0d80752 100644
--- a/spec/models/ability_spec.rb
+++ b/spec/models/ability_spec.rb
@@ -2,8 +2,8 @@ require 'spec_helper'
describe Ability, lib: true do
context 'using a nil subject' do
- it 'is always empty' do
- expect(Ability.allowed(nil, nil).to_set).to be_empty
+ it 'has no permissions' do
+ expect(Ability.policy_for(nil, nil)).to be_banned
end
end
@@ -255,12 +255,15 @@ describe Ability, lib: true do
describe '.project_disabled_features_rules' do
let(:project) { create(:empty_project, :wiki_disabled) }
- subject { described_class.allowed(project.owner, project) }
+ subject { described_class.policy_for(project.owner, project) }
context 'wiki named abilities' do
it 'disables wiki abilities if the project has no wiki' do
expect(project).to receive(:has_external_wiki?).and_return(false)
- expect(subject).not_to include(:read_wiki, :create_wiki, :update_wiki, :admin_wiki)
+ expect(subject).not_to be_allowed(:read_wiki)
+ expect(subject).not_to be_allowed(:create_wiki)
+ expect(subject).not_to be_allowed(:update_wiki)
+ expect(subject).not_to be_allowed(:admin_wiki)
end
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index dab8e8ca432..55d85a6e228 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -672,6 +672,12 @@ describe Ci::Pipeline, models: true do
end
end
+ describe '.internal_sources' do
+ subject { described_class.internal_sources }
+
+ it { is_expected.to be_an(Array) }
+ end
+
describe '#status' do
let(:build) do
create(:ci_build, :created, pipeline: pipeline, name: 'test')
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index 83494af24ba..50f7c029af8 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -5,12 +5,14 @@ describe Ci::Variable, models: true do
let(:secret_value) { 'secret' }
- it { is_expected.to validate_presence_of(:key) }
- it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id) }
- it { is_expected.to validate_length_of(:key).is_at_most(255) }
- it { is_expected.to allow_value('foo').for(:key) }
- it { is_expected.not_to allow_value('foo bar').for(:key) }
- it { is_expected.not_to allow_value('foo/bar').for(:key) }
+ describe 'validations' do
+ it { is_expected.to include_module(HasVariable) }
+ it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope) }
+ it { is_expected.to validate_length_of(:key).is_at_most(255) }
+ it { is_expected.to allow_value('foo').for(:key) }
+ it { is_expected.not_to allow_value('foo bar').for(:key) }
+ it { is_expected.not_to allow_value('foo/bar').for(:key) }
+ end
describe '.unprotected' do
subject { described_class.unprotected }
@@ -33,36 +35,4 @@ describe Ci::Variable, models: true do
end
end
end
-
- describe '#value' do
- before do
- subject.value = secret_value
- end
-
- it 'stores the encrypted value' do
- expect(subject.encrypted_value).not_to be_nil
- end
-
- it 'stores an iv for value' do
- expect(subject.encrypted_value_iv).not_to be_nil
- end
-
- it 'stores a salt for value' do
- expect(subject.encrypted_value_salt).not_to be_nil
- end
-
- it 'fails to decrypt if iv is incorrect' do
- subject.encrypted_value_iv = SecureRandom.hex
- subject.instance_variable_set(:@value, nil)
- expect { subject.value }
- .to raise_error(OpenSSL::Cipher::CipherError, 'bad decrypt')
- end
- end
-
- describe '#to_runner_variable' do
- it 'returns a hash for the runner' do
- expect(subject.to_runner_variable)
- .to eq(key: subject.key, value: subject.value, public: false)
- end
- end
end
diff --git a/spec/models/concerns/feature_gate_spec.rb b/spec/models/concerns/feature_gate_spec.rb
new file mode 100644
index 00000000000..3f601243245
--- /dev/null
+++ b/spec/models/concerns/feature_gate_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe FeatureGate do
+ describe 'User' do
+ describe '#flipper_id' do
+ context 'when user is not persisted' do
+ let(:user) { build(:user) }
+
+ it { expect(user.flipper_id).to be_nil }
+ end
+
+ context 'when user is persisted' do
+ let(:user) { create(:user) }
+
+ it { expect(user.flipper_id).to eq "User:#{user.id}" }
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/has_variable_spec.rb b/spec/models/concerns/has_variable_spec.rb
new file mode 100644
index 00000000000..f4b24e6d1d9
--- /dev/null
+++ b/spec/models/concerns/has_variable_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe HasVariable do
+ subject { build(:ci_variable) }
+
+ it { is_expected.to validate_presence_of(:key) }
+ it { is_expected.to validate_length_of(:key).is_at_most(255) }
+ it { is_expected.to allow_value('foo').for(:key) }
+ it { is_expected.not_to allow_value('foo bar').for(:key) }
+ it { is_expected.not_to allow_value('foo/bar').for(:key) }
+
+ describe '#value' do
+ before do
+ subject.value = 'secret'
+ end
+
+ it 'stores the encrypted value' do
+ expect(subject.encrypted_value).not_to be_nil
+ end
+
+ it 'stores an iv for value' do
+ expect(subject.encrypted_value_iv).not_to be_nil
+ end
+
+ it 'stores a salt for value' do
+ expect(subject.encrypted_value_salt).not_to be_nil
+ end
+
+ it 'fails to decrypt if iv is incorrect' do
+ subject.encrypted_value_iv = SecureRandom.hex
+ subject.instance_variable_set(:@value, nil)
+ expect { subject.value }
+ .to raise_error(OpenSSL::Cipher::CipherError, 'bad decrypt')
+ end
+ end
+
+ describe '#to_runner_variable' do
+ it 'returns a hash for the runner' do
+ expect(subject.to_runner_variable)
+ .to eq(key: subject.key, value: subject.value, public: false)
+ end
+ end
+end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 65f05121b40..36aedd2f701 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -132,6 +132,19 @@ describe Group, 'Routable' do
end
end
+ describe '#expires_full_path_cache' do
+ context 'with RequestStore active', :request_store do
+ it 'expires the full_path cache' do
+ expect(group.full_path).to eq('foo')
+
+ group.route.update(path: 'bar', name: 'bar')
+ group.expires_full_path_cache
+
+ expect(group.full_path).to eq('bar')
+ end
+ end
+ end
+
describe '#full_name' do
let(:group) { create(:group) }
let(:nested_group) { create(:group, parent: group) }
diff --git a/spec/models/concerns/sha_attribute_spec.rb b/spec/models/concerns/sha_attribute_spec.rb
new file mode 100644
index 00000000000..9e37c2b20c4
--- /dev/null
+++ b/spec/models/concerns/sha_attribute_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+
+describe ShaAttribute do
+ let(:model) { Class.new { include ShaAttribute } }
+
+ before do
+ columns = [
+ double(:column, name: 'name', type: :text),
+ double(:column, name: 'sha1', type: :binary)
+ ]
+
+ allow(model).to receive(:columns).and_return(columns)
+ end
+
+ describe '#sha_attribute' do
+ it 'defines a SHA attribute for a binary column' do
+ expect(model).to receive(:attribute)
+ .with(:sha1, an_instance_of(Gitlab::Database::ShaAttribute))
+
+ model.sha_attribute(:sha1)
+ end
+
+ it 'raises ArgumentError when the column type is not :binary' do
+ expect { model.sha_attribute(:name) }.to raise_error(ArgumentError)
+ end
+ end
+end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index b0635c6a90a..0a2cd8c2957 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -120,28 +120,17 @@ describe Environment, models: true do
let(:head_commit) { project.commit }
let(:commit) { project.commit.parent }
- context 'Gitaly find_ref_name feature disabled' do
- it 'returns deployment id for the environment' do
- expect(environment.first_deployment_for(commit)).to eq deployment1
- end
+ it 'returns deployment id for the environment' do
+ expect(environment.first_deployment_for(commit)).to eq deployment1
+ end
- it 'return nil when no deployment is found' do
- expect(environment.first_deployment_for(head_commit)).to eq nil
- end
+ it 'return nil when no deployment is found' do
+ expect(environment.first_deployment_for(head_commit)).to eq nil
end
- # TODO: Uncomment when feature is reenabled
- # context 'Gitaly find_ref_name feature enabled' do
- # before do
- # allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:find_ref_name).and_return(true)
- # end
- #
- # it 'calls GitalyClient' do
- # expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:find_ref_name)
- #
- # environment.first_deployment_for(commit)
- # end
- # end
+ it 'returns a UTF-8 ref' do
+ expect(environment.first_deployment_for(commit).ref).to be_utf8
+ end
end
describe '#environment_type' do
diff --git a/spec/models/forked_project_link_spec.rb b/spec/models/forked_project_link_spec.rb
index 6e8d43f988c..5c13cf584f9 100644
--- a/spec/models/forked_project_link_spec.rb
+++ b/spec/models/forked_project_link_spec.rb
@@ -2,53 +2,75 @@ require 'spec_helper'
describe ForkedProjectLink, "add link on fork" do
let(:project_from) { create(:project, :repository) }
+ let(:project_to) { fork_project(project_from, user) }
let(:user) { create(:user) }
let(:namespace) { user.namespace }
before do
- create(:project_member, :reporter, user: user, project: project_from)
- @project_to = fork_project(project_from, user)
+ project_from.add_reporter(user)
+ end
+
+ it 'project_from knows its forks' do
+ _ = project_to
+
+ expect(project_from.forks.count).to eq(1)
end
it "project_to knows it is forked" do
- expect(@project_to.forked?).to be_truthy
+ expect(project_to.forked?).to be_truthy
end
it "project knows who it is forked from" do
- expect(@project_to.forked_from_project).to eq(project_from)
+ expect(project_to.forked_from_project).to eq(project_from)
end
-end
-describe '#forked?' do
- let(:forked_project_link) { build(:forked_project_link) }
- let(:project_from) { create(:project, :repository) }
- let(:project_to) { create(:project, forked_project_link: forked_project_link) }
+ context 'project_to is pending_delete' do
+ before do
+ project_to.update!(pending_delete: true)
+ end
- before :each do
- forked_project_link.forked_from_project = project_from
- forked_project_link.forked_to_project = project_to
- forked_project_link.save!
+ it { expect(project_from.forks.count).to eq(0) }
end
- it "project_to knows it is forked" do
- expect(project_to.forked?).to be_truthy
- end
+ context 'project_from is pending_delete' do
+ before do
+ project_from.update!(pending_delete: true)
+ end
- it "project_from is not forked" do
- expect(project_from.forked?).to be_falsey
+ it { expect(project_to.forked_from_project).to be_nil }
end
- it "project_to.destroy destroys fork_link" do
- expect(forked_project_link).to receive(:destroy)
- project_to.destroy
+ describe '#forked?' do
+ let(:project_to) { create(:project, forked_project_link: forked_project_link) }
+ let(:forked_project_link) { build(:forked_project_link) }
+
+ before do
+ forked_project_link.forked_from_project = project_from
+ forked_project_link.forked_to_project = project_to
+ forked_project_link.save!
+ end
+
+ it "project_to knows it is forked" do
+ expect(project_to.forked?).to be_truthy
+ end
+
+ it "project_from is not forked" do
+ expect(project_from.forked?).to be_falsey
+ end
+
+ it "project_to.destroy destroys fork_link" do
+ expect(forked_project_link).to receive(:destroy)
+
+ project_to.destroy
+ end
end
-end
-def fork_project(from_project, user)
- shell = double('gitlab_shell', fork_repository: true)
+ def fork_project(from_project, user)
+ service = Projects::ForkService.new(from_project, user)
+ shell = double('gitlab_shell', fork_repository: true)
- service = Projects::ForkService.new(from_project, user)
- allow(service).to receive(:gitlab_shell).and_return(shell)
+ allow(service).to receive(:gitlab_shell).and_return(shell)
- service.execute
+ service.execute
+ end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index bb5273074a2..587d4b83cb4 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -105,6 +105,22 @@ describe MergeRequest, models: true do
end
end
+ describe '#assignee_ids' do
+ it 'returns an array of the assigned user id' do
+ subject.assignee_id = 123
+
+ expect(subject.assignee_ids).to eq([123])
+ end
+ end
+
+ describe '#assignee_ids=' do
+ it 'sets assignee_id to the last id in the array' do
+ subject.assignee_ids = [123, 456]
+
+ expect(subject.assignee_id).to eq(456)
+ end
+ end
+
describe '#assignee_or_author?' do
let(:user) { create(:user) }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index e7c3acf19eb..62c4ea01ce1 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -323,6 +323,36 @@ describe Namespace, models: true do
end
end
+ describe '#users_with_descendants', :nested_groups do
+ let(:user_a) { create(:user) }
+ let(:user_b) { create(:user) }
+
+ let(:group) { create(:group) }
+ let(:nested_group) { create(:group, parent: group) }
+ let(:deep_nested_group) { create(:group, parent: nested_group) }
+
+ it 'returns member users on every nest level without duplication' do
+ group.add_developer(user_a)
+ nested_group.add_developer(user_b)
+ deep_nested_group.add_developer(user_a)
+
+ expect(group.users_with_descendants).to contain_exactly(user_a, user_b)
+ expect(nested_group.users_with_descendants).to contain_exactly(user_a, user_b)
+ expect(deep_nested_group.users_with_descendants).to contain_exactly(user_a)
+ end
+ end
+
+ describe '#soft_delete_without_removing_associations' do
+ let(:project1) { create(:project_empty_repo, namespace: namespace) }
+
+ it 'updates the deleted_at timestamp but preserves projects' do
+ namespace.soft_delete_without_removing_associations
+
+ expect(Project.all).to include(project1)
+ expect(namespace.deleted_at).not_to be_nil
+ end
+ end
+
describe '#user_ids_for_project_authorizations' do
it 'returns the user IDs for which to refresh authorizations' do
expect(namespace.user_ids_for_project_authorizations)
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index c86f56c55eb..4a1de76f099 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -64,12 +64,12 @@ describe JiraService, models: true do
end
end
- describe '#reference_pattern' do
+ describe '.reference_pattern' do
it_behaves_like 'allows project key on reference pattern'
it 'does not allow # on the code' do
- expect(subject.reference_pattern.match('#123')).to be_nil
- expect(subject.reference_pattern.match('1#23#12')).to be_nil
+ expect(described_class.reference_pattern.match('#123')).to be_nil
+ expect(described_class.reference_pattern.match('1#23#12')).to be_nil
end
end
diff --git a/spec/models/project_services/redmine_service_spec.rb b/spec/models/project_services/redmine_service_spec.rb
index 6631d9040b1..441b3f896ca 100644
--- a/spec/models/project_services/redmine_service_spec.rb
+++ b/spec/models/project_services/redmine_service_spec.rb
@@ -31,11 +31,11 @@ describe RedmineService, models: true do
end
end
- describe '#reference_pattern' do
+ describe '.reference_pattern' do
it_behaves_like 'allows project key on reference pattern'
it 'does allow # on the reference' do
- expect(subject.reference_pattern.match('#123')[:issue]).to eq('123')
+ expect(described_class.reference_pattern.match('#123')[:issue]).to eq('123')
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index d7fcadb895e..ad98b4b669f 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -284,15 +284,6 @@ describe Project, models: true do
end
end
- describe 'default_scope' do
- it 'excludes projects pending deletion from the results' do
- project = create(:empty_project)
- create(:empty_project, pending_delete: true)
-
- expect(Project.all).to eq [project]
- end
- end
-
describe 'project token' do
it 'sets an random token if none provided' do
project = FactoryGirl.create :empty_project, runners_token: ''
@@ -1179,6 +1170,16 @@ describe Project, models: true do
expect(relation.search(project.namespace.name)).to eq([project])
end
+
+ describe 'with pending_delete project' do
+ let(:pending_delete_project) { create(:empty_project, pending_delete: true) }
+
+ it 'shows pending deletion project' do
+ search_result = described_class.search(pending_delete_project.name)
+
+ expect(search_result).to eq([pending_delete_project])
+ end
+ end
end
describe '#rename_repo' do
@@ -1215,6 +1216,8 @@ describe Project, models: true do
expect(project).to receive(:expire_caches_before_rename)
+ expect(project).to receive(:expires_full_path_cache)
+
project.rename_repo
end
@@ -1327,6 +1330,50 @@ describe Project, models: true do
end
end
+ describe '#ensure_repository' do
+ let(:project) { create(:project, :repository) }
+ let(:shell) { Gitlab::Shell.new }
+
+ before do
+ allow(project).to receive(:gitlab_shell).and_return(shell)
+ end
+
+ it 'creates the repository if it not exist' do
+ allow(project).to receive(:repository_exists?)
+ .and_return(false)
+
+ allow(shell).to receive(:add_repository)
+ .with(project.repository_storage_path, project.path_with_namespace)
+ .and_return(true)
+
+ expect(project).to receive(:create_repository).with(force: true)
+
+ project.ensure_repository
+ end
+
+ it 'does not create the repository if it exists' do
+ allow(project).to receive(:repository_exists?)
+ .and_return(true)
+
+ expect(project).not_to receive(:create_repository)
+
+ project.ensure_repository
+ end
+
+ it 'creates the repository if it is a fork' do
+ expect(project).to receive(:forked?).and_return(true)
+
+ allow(project).to receive(:repository_exists?)
+ .and_return(false)
+
+ expect(shell).to receive(:add_repository)
+ .with(project.repository_storage_path, project.path_with_namespace)
+ .and_return(true)
+
+ project.ensure_repository
+ end
+ end
+
describe '#user_can_push_to_empty_repo?' do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
@@ -1478,6 +1525,40 @@ describe Project, models: true do
end
end
+ describe 'project import state transitions' do
+ context 'state transition: [:started] => [:finished]' do
+ let(:housekeeping_service) { spy }
+
+ before do
+ allow(Projects::HousekeepingService).to receive(:new) { housekeeping_service }
+ end
+
+ it 'performs housekeeping when an import of a fresh project is completed' do
+ project = create(:project_empty_repo, :import_started, import_type: :github)
+
+ project.import_finish
+
+ expect(housekeeping_service).to have_received(:execute)
+ end
+
+ it 'does not perform housekeeping when project repository does not exist' do
+ project = create(:empty_project, :import_started, import_type: :github)
+
+ project.import_finish
+
+ expect(housekeeping_service).not_to have_received(:execute)
+ end
+
+ it 'does not perform housekeeping when project does not have a valid import type' do
+ project = create(:empty_project, :import_started, import_type: nil)
+
+ project.import_finish
+
+ expect(housekeeping_service).not_to have_received(:execute)
+ end
+ end
+ end
+
describe '#latest_successful_builds_for' do
def create_pipeline(status = 'success')
create(:ci_pipeline, project: project,
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index bf74ac5ea25..1f314791479 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -278,6 +278,24 @@ describe ProjectWiki, models: true do
end
end
+ describe '#ensure_repository' do
+ it 'creates the repository if it not exist' do
+ allow(subject).to receive(:repository_exists?).and_return(false)
+
+ expect(subject).to receive(:create_repo!)
+
+ subject.ensure_repository
+ end
+
+ it 'does not create the repository if it exists' do
+ allow(subject).to receive(:repository_exists?).and_return(true)
+
+ expect(subject).not_to receive(:create_repo!)
+
+ subject.ensure_repository
+ end
+ end
+
describe '#hook_attrs' do
it 'returns a hash with values' do
expect(subject.hook_attrs).to be_a Hash
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 3e984ec7588..c69f0a495db 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -780,7 +780,7 @@ describe Repository, models: true do
context 'when pre hooks were successful' do
it 'runs without errors' do
expect_any_instance_of(GitHooksService).to receive(:execute)
- .with(user, project.repository.path_to_repo, old_rev, blank_sha, 'refs/heads/feature')
+ .with(user, project, old_rev, blank_sha, 'refs/heads/feature')
expect { repository.rm_branch(user, 'feature') }.not_to raise_error
end
@@ -823,12 +823,7 @@ describe Repository, models: true do
service = GitHooksService.new
expect(GitHooksService).to receive(:new).and_return(service)
expect(service).to receive(:execute)
- .with(
- user,
- repository.path_to_repo,
- old_rev,
- new_rev,
- 'refs/heads/feature')
+ .with(user, project, old_rev, new_rev, 'refs/heads/feature')
.and_yield(service).and_return(true)
end
@@ -1474,9 +1469,9 @@ describe Repository, models: true do
it 'passes commit SHA to pre-receive and update hooks,\
and tag SHA to post-receive hook' do
- pre_receive_hook = Gitlab::Git::Hook.new('pre-receive', repository.path_to_repo)
- update_hook = Gitlab::Git::Hook.new('update', repository.path_to_repo)
- post_receive_hook = Gitlab::Git::Hook.new('post-receive', repository.path_to_repo)
+ pre_receive_hook = Gitlab::Git::Hook.new('pre-receive', project)
+ update_hook = Gitlab::Git::Hook.new('update', project)
+ post_receive_hook = Gitlab::Git::Hook.new('post-receive', project)
allow(Gitlab::Git::Hook).to receive(:new)
.and_return(pre_receive_hook, update_hook, post_receive_hook)
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 8e895ec6634..448555d2190 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -754,42 +754,49 @@ describe User, models: true do
end
describe '.search' do
- let(:user) { create(:user) }
+ let!(:user) { create(:user, name: 'user', username: 'usern', email: 'email@gmail.com') }
+ let!(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@gmail.com') }
- it 'returns users with a matching name' do
- expect(described_class.search(user.name)).to eq([user])
- end
+ describe 'name matching' do
+ it 'returns users with a matching name with exact match first' do
+ expect(described_class.search(user.name)).to eq([user, user2])
+ end
- it 'returns users with a partially matching name' do
- expect(described_class.search(user.name[0..2])).to eq([user])
- end
+ it 'returns users with a partially matching name' do
+ expect(described_class.search(user.name[0..2])).to eq([user2, user])
+ end
- it 'returns users with a matching name regardless of the casing' do
- expect(described_class.search(user.name.upcase)).to eq([user])
+ it 'returns users with a matching name regardless of the casing' do
+ expect(described_class.search(user2.name.upcase)).to eq([user2])
+ end
end
- it 'returns users with a matching Email' do
- expect(described_class.search(user.email)).to eq([user])
- end
+ describe 'email matching' do
+ it 'returns users with a matching Email' do
+ expect(described_class.search(user.email)).to eq([user, user2])
+ end
- it 'returns users with a partially matching Email' do
- expect(described_class.search(user.email[0..2])).to eq([user])
- end
+ it 'returns users with a partially matching Email' do
+ expect(described_class.search(user.email[0..2])).to eq([user2, user])
+ end
- it 'returns users with a matching Email regardless of the casing' do
- expect(described_class.search(user.email.upcase)).to eq([user])
+ it 'returns users with a matching Email regardless of the casing' do
+ expect(described_class.search(user2.email.upcase)).to eq([user2])
+ end
end
- it 'returns users with a matching username' do
- expect(described_class.search(user.username)).to eq([user])
- end
+ describe 'username matching' do
+ it 'returns users with a matching username' do
+ expect(described_class.search(user.username)).to eq([user, user2])
+ end
- it 'returns users with a partially matching username' do
- expect(described_class.search(user.username[0..2])).to eq([user])
- end
+ it 'returns users with a partially matching username' do
+ expect(described_class.search(user.username[0..2])).to eq([user2, user])
+ end
- it 'returns users with a matching username regardless of the casing' do
- expect(described_class.search(user.username.upcase)).to eq([user])
+ it 'returns users with a matching username regardless of the casing' do
+ expect(described_class.search(user2.username.upcase)).to eq([user2])
+ end
end
end
diff --git a/spec/policies/base_policy_spec.rb b/spec/policies/base_policy_spec.rb
index 02acdcb36df..e1963091a72 100644
--- a/spec/policies/base_policy_spec.rb
+++ b/spec/policies/base_policy_spec.rb
@@ -3,17 +3,17 @@ require 'spec_helper'
describe BasePolicy, models: true do
describe '.class_for' do
it 'detects policy class based on the subject ancestors' do
- expect(described_class.class_for(GenericCommitStatus.new)).to eq(CommitStatusPolicy)
+ expect(DeclarativePolicy.class_for(GenericCommitStatus.new)).to eq(CommitStatusPolicy)
end
it 'detects policy class for a presented subject' do
presentee = Ci::BuildPresenter.new(Ci::Build.new)
- expect(described_class.class_for(presentee)).to eq(Ci::BuildPolicy)
+ expect(DeclarativePolicy.class_for(presentee)).to eq(Ci::BuildPolicy)
end
it 'uses GlobalPolicy when :global is given' do
- expect(described_class.class_for(:global)).to eq(GlobalPolicy)
+ expect(DeclarativePolicy.class_for(:global)).to eq(GlobalPolicy)
end
end
end
diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb
index 48a139d4b83..ace95ac7067 100644
--- a/spec/policies/ci/build_policy_spec.rb
+++ b/spec/policies/ci/build_policy_spec.rb
@@ -5,8 +5,8 @@ describe Ci::BuildPolicy, :models do
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
- let(:policies) do
- described_class.abilities(user, build).to_set
+ let(:policy) do
+ described_class.new(user, build)
end
shared_context 'public pipelines disabled' do
@@ -21,7 +21,7 @@ describe Ci::BuildPolicy, :models do
context 'when public builds are enabled' do
it 'does not include ability to read build' do
- expect(policies).not_to include :read_build
+ expect(policy).not_to be_allowed :read_build
end
end
@@ -29,7 +29,7 @@ describe Ci::BuildPolicy, :models do
include_context 'public pipelines disabled'
it 'does not include ability to read build' do
- expect(policies).not_to include :read_build
+ expect(policy).not_to be_allowed :read_build
end
end
end
@@ -39,7 +39,7 @@ describe Ci::BuildPolicy, :models do
context 'when public builds are enabled' do
it 'includes ability to read build' do
- expect(policies).to include :read_build
+ expect(policy).to be_allowed :read_build
end
end
@@ -47,7 +47,7 @@ describe Ci::BuildPolicy, :models do
include_context 'public pipelines disabled'
it 'does not include ability to read build' do
- expect(policies).not_to include :read_build
+ expect(policy).not_to be_allowed :read_build
end
end
end
@@ -62,7 +62,7 @@ describe Ci::BuildPolicy, :models do
context 'when public builds are enabled' do
it 'includes ability to read build' do
- expect(policies).to include :read_build
+ expect(policy).to be_allowed :read_build
end
end
@@ -70,7 +70,7 @@ describe Ci::BuildPolicy, :models do
include_context 'public pipelines disabled'
it 'does not include ability to read build' do
- expect(policies).not_to include :read_build
+ expect(policy).not_to be_allowed :read_build
end
end
end
@@ -82,7 +82,7 @@ describe Ci::BuildPolicy, :models do
context 'when public builds are enabled' do
it 'includes ability to read build' do
- expect(policies).to include :read_build
+ expect(policy).to be_allowed :read_build
end
end
@@ -90,7 +90,7 @@ describe Ci::BuildPolicy, :models do
include_context 'public pipelines disabled'
it 'does not include ability to read build' do
- expect(policies).to include :read_build
+ expect(policy).to be_allowed :read_build
end
end
end
@@ -115,7 +115,7 @@ describe Ci::BuildPolicy, :models do
end
it 'does not include ability to update build' do
- expect(policies).not_to include :update_build
+ expect(policy).to be_disallowed :update_build
end
end
@@ -125,7 +125,7 @@ describe Ci::BuildPolicy, :models do
end
it 'includes ability to update build' do
- expect(policies).to include :update_build
+ expect(policy).to be_allowed :update_build
end
end
end
@@ -135,7 +135,7 @@ describe Ci::BuildPolicy, :models do
let(:build) { create(:ci_build, :manual, pipeline: pipeline) }
it 'includes ability to update build' do
- expect(policies).to include :update_build
+ expect(policy).to be_allowed :update_build
end
end
@@ -143,7 +143,7 @@ describe Ci::BuildPolicy, :models do
let(:build) { create(:ci_build, pipeline: pipeline) }
it 'includes ability to update build' do
- expect(policies).to include :update_build
+ expect(policy).to be_allowed :update_build
end
end
end
diff --git a/spec/policies/ci/trigger_policy_spec.rb b/spec/policies/ci/trigger_policy_spec.rb
index 63ad5eb7322..ed4010e723b 100644
--- a/spec/policies/ci/trigger_policy_spec.rb
+++ b/spec/policies/ci/trigger_policy_spec.rb
@@ -6,36 +6,36 @@ describe Ci::TriggerPolicy, :models do
let(:trigger) { create(:ci_trigger, project: project, owner: owner) }
let(:policies) do
- described_class.abilities(user, trigger).to_set
+ described_class.new(user, trigger)
end
shared_examples 'allows to admin and manage trigger' do
it 'does include ability to admin trigger' do
- expect(policies).to include :admin_trigger
+ expect(policies).to be_allowed :admin_trigger
end
it 'does include ability to manage trigger' do
- expect(policies).to include :manage_trigger
+ expect(policies).to be_allowed :manage_trigger
end
end
shared_examples 'allows to manage trigger' do
it 'does not include ability to admin trigger' do
- expect(policies).not_to include :admin_trigger
+ expect(policies).not_to be_allowed :admin_trigger
end
it 'does include ability to manage trigger' do
- expect(policies).to include :manage_trigger
+ expect(policies).to be_allowed :manage_trigger
end
end
shared_examples 'disallows to admin and manage trigger' do
it 'does not include ability to admin trigger' do
- expect(policies).not_to include :admin_trigger
+ expect(policies).not_to be_allowed :admin_trigger
end
it 'does not include ability to manage trigger' do
- expect(policies).not_to include :manage_trigger
+ expect(policies).not_to be_allowed :manage_trigger
end
end
diff --git a/spec/policies/deploy_key_policy_spec.rb b/spec/policies/deploy_key_policy_spec.rb
index 28e10f0bfe2..f15f4a11f02 100644
--- a/spec/policies/deploy_key_policy_spec.rb
+++ b/spec/policies/deploy_key_policy_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe DeployKeyPolicy, models: true do
- subject { described_class.abilities(current_user, deploy_key).to_set }
+ subject { described_class.new(current_user, deploy_key) }
describe 'updating a deploy_key' do
context 'when a regular user' do
@@ -16,7 +16,7 @@ describe DeployKeyPolicy, models: true do
project.deploy_keys << deploy_key
end
- it { is_expected.to include(:update_deploy_key) }
+ it { is_expected.to be_allowed(:update_deploy_key) }
end
context 'tries to update private deploy key attached to other project' do
@@ -27,13 +27,13 @@ describe DeployKeyPolicy, models: true do
other_project.deploy_keys << deploy_key
end
- it { is_expected.not_to include(:update_deploy_key) }
+ it { is_expected.to be_disallowed(:update_deploy_key) }
end
context 'tries to update public deploy key' do
let(:deploy_key) { create(:another_deploy_key, public: true) }
- it { is_expected.not_to include(:update_deploy_key) }
+ it { is_expected.to be_disallowed(:update_deploy_key) }
end
end
@@ -43,13 +43,13 @@ describe DeployKeyPolicy, models: true do
context ' tries to update private deploy key' do
let(:deploy_key) { create(:deploy_key, public: false) }
- it { is_expected.to include(:update_deploy_key) }
+ it { is_expected.to be_allowed(:update_deploy_key) }
end
context 'when an admin user tries to update public deploy key' do
let(:deploy_key) { create(:another_deploy_key, public: true) }
- it { is_expected.to include(:update_deploy_key) }
+ it { is_expected.to be_allowed(:update_deploy_key) }
end
end
end
diff --git a/spec/policies/environment_policy_spec.rb b/spec/policies/environment_policy_spec.rb
index 650432520bb..035e20c7452 100644
--- a/spec/policies/environment_policy_spec.rb
+++ b/spec/policies/environment_policy_spec.rb
@@ -8,8 +8,8 @@ describe EnvironmentPolicy do
create(:environment, :with_review_app, project: project)
end
- let(:policies) do
- described_class.abilities(user, environment).to_set
+ let(:policy) do
+ described_class.new(user, environment)
end
describe '#rules' do
@@ -17,7 +17,7 @@ describe EnvironmentPolicy do
let(:project) { create(:project, :private) }
it 'does not include ability to stop environment' do
- expect(policies).not_to include :stop_environment
+ expect(policy).to be_disallowed :stop_environment
end
end
@@ -25,7 +25,7 @@ describe EnvironmentPolicy do
let(:project) { create(:project, :public) }
it 'does not include ability to stop environment' do
- expect(policies).not_to include :stop_environment
+ expect(policy).to be_disallowed :stop_environment
end
end
@@ -38,7 +38,7 @@ describe EnvironmentPolicy do
context 'when team member has ability to stop environment' do
it 'does includes ability to stop environment' do
- expect(policies).to include :stop_environment
+ expect(policy).to be_allowed :stop_environment
end
end
@@ -49,7 +49,7 @@ describe EnvironmentPolicy do
end
it 'does not include ability to stop environment' do
- expect(policies).not_to include :stop_environment
+ expect(policy).to be_disallowed :stop_environment
end
end
end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
new file mode 100644
index 00000000000..bb0fa0c0e9c
--- /dev/null
+++ b/spec/policies/global_policy_spec.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+
+describe GlobalPolicy, models: true do
+ let(:current_user) { create(:user) }
+ let(:user) { create(:user) }
+
+ subject { GlobalPolicy.new(current_user, [user]) }
+
+ describe "reading the list of users" do
+ context "for a logged in user" do
+ it { is_expected.to be_allowed(:read_users_list) }
+ end
+
+ context "for an anonymous user" do
+ let(:current_user) { nil }
+
+ context "when the public level is restricted" do
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+ end
+
+ it { is_expected.not_to be_allowed(:read_users_list) }
+ end
+
+ context "when the public level is not restricted" do
+ before do
+ stub_application_setting(restricted_visibility_levels: [])
+ end
+
+ it { is_expected.to be_allowed(:read_users_list) }
+ end
+ end
+ end
+end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index a8331ceb5ff..06db0ea56e3 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -36,16 +36,24 @@ describe GroupPolicy, models: true do
group.add_owner(owner)
end
- subject { described_class.abilities(current_user, group).to_set }
+ subject { described_class.new(current_user, group) }
+
+ def expect_allowed(*permissions)
+ permissions.each { |p| is_expected.to be_allowed(p) }
+ end
+
+ def expect_disallowed(*permissions)
+ permissions.each { |p| is_expected.not_to be_allowed(p) }
+ end
context 'with no user' do
let(:current_user) { nil }
it do
- is_expected.to include(:read_group)
- is_expected.not_to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_disallowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -53,10 +61,10 @@ describe GroupPolicy, models: true do
let(:current_user) { guest }
it do
- is_expected.to include(:read_group)
- is_expected.not_to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_disallowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -64,10 +72,10 @@ describe GroupPolicy, models: true do
let(:current_user) { reporter }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -75,10 +83,10 @@ describe GroupPolicy, models: true do
let(:current_user) { developer }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -86,10 +94,10 @@ describe GroupPolicy, models: true do
let(:current_user) { master }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -97,10 +105,10 @@ describe GroupPolicy, models: true do
let(:current_user) { owner }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*master_permissions)
- is_expected.to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*master_permissions)
+ expect_allowed(*owner_permissions)
end
end
@@ -108,10 +116,10 @@ describe GroupPolicy, models: true do
let(:current_user) { admin }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*master_permissions)
- is_expected.to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*master_permissions)
+ expect_allowed(*owner_permissions)
end
end
@@ -130,16 +138,16 @@ describe GroupPolicy, models: true do
nested_group.add_owner(owner)
end
- subject { described_class.abilities(current_user, nested_group).to_set }
+ subject { described_class.new(current_user, nested_group) }
context 'with no user' do
let(:current_user) { nil }
it do
- is_expected.not_to include(:read_group)
- is_expected.not_to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_disallowed(:read_group)
+ expect_disallowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -147,10 +155,10 @@ describe GroupPolicy, models: true do
let(:current_user) { guest }
it do
- is_expected.to include(:read_group)
- is_expected.not_to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_disallowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -158,10 +166,10 @@ describe GroupPolicy, models: true do
let(:current_user) { reporter }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -169,10 +177,10 @@ describe GroupPolicy, models: true do
let(:current_user) { developer }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -180,10 +188,10 @@ describe GroupPolicy, models: true do
let(:current_user) { master }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -191,10 +199,10 @@ describe GroupPolicy, models: true do
let(:current_user) { owner }
it do
- is_expected.to include(:read_group)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*master_permissions)
- is_expected.to include(*owner_permissions)
+ expect_allowed(:read_group)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*master_permissions)
+ expect_allowed(*owner_permissions)
end
end
end
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 4a07c864428..c978cbd6185 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -9,7 +9,7 @@ describe IssuePolicy, models: true do
let(:reporter_from_group_link) { create(:user) }
def permissions(user, issue)
- described_class.abilities(user, issue).to_set
+ described_class.new(user, issue)
end
context 'a private project' do
@@ -30,42 +30,42 @@ describe IssuePolicy, models: true do
end
it 'does not allow non-members to read issues' do
- expect(permissions(non_member, issue)).not_to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(non_member, issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows guests to read issues' do
- expect(permissions(guest, issue)).to include(:read_issue)
- expect(permissions(guest, issue)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(guest, issue)).to be_allowed(:read_issue)
+ expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue)
- expect(permissions(guest, issue_no_assignee)).to include(:read_issue)
- expect(permissions(guest, issue_no_assignee)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue)
+ expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
end
it 'allows reporters to read, update, and admin issues' do
- expect(permissions(reporter, issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows reporters from group links to read, update, and admin issues' do
- expect(permissions(reporter_from_group_link, issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter_from_group_link, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows issue authors to read and update their issues' do
- expect(permissions(author, issue)).to include(:read_issue, :update_issue)
- expect(permissions(author, issue)).not_to include(:admin_issue)
+ expect(permissions(author, issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(author, issue)).to be_disallowed(:admin_issue)
- expect(permissions(author, issue_no_assignee)).to include(:read_issue)
- expect(permissions(author, issue_no_assignee)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue)
+ expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
end
it 'allows issue assignees to read and update their issues' do
- expect(permissions(assignee, issue)).to include(:read_issue, :update_issue)
- expect(permissions(assignee, issue)).not_to include(:admin_issue)
+ expect(permissions(assignee, issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(assignee, issue)).to be_disallowed(:admin_issue)
- expect(permissions(assignee, issue_no_assignee)).to include(:read_issue)
- expect(permissions(assignee, issue_no_assignee)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue)
+ expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
end
context 'with confidential issues' do
@@ -73,37 +73,37 @@ describe IssuePolicy, models: true do
let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) }
it 'does not allow non-members to read confidential issues' do
- expect(permissions(non_member, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(non_member, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(non_member, confidential_issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(non_member, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
it 'does not allow guests to read confidential issues' do
- expect(permissions(guest, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(guest, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows reporters to read, update, and admin confidential issues' do
- expect(permissions(reporter, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows reporters from group links to read, update, and admin confidential issues' do
- expect(permissions(reporter_from_group_link, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows issue authors to read and update their confidential issues' do
- expect(permissions(author, confidential_issue)).to include(:read_issue, :update_issue)
- expect(permissions(author, confidential_issue)).not_to include(:admin_issue)
+ expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(author, confidential_issue)).to be_disallowed(:admin_issue)
- expect(permissions(author, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows issue assignees to read and update their confidential issues' do
- expect(permissions(assignee, confidential_issue)).to include(:read_issue, :update_issue)
- expect(permissions(assignee, confidential_issue)).not_to include(:admin_issue)
+ expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(assignee, confidential_issue)).to be_disallowed(:admin_issue)
- expect(permissions(assignee, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
end
end
@@ -123,37 +123,37 @@ describe IssuePolicy, models: true do
end
it 'allows guests to read issues' do
- expect(permissions(guest, issue)).to include(:read_issue)
- expect(permissions(guest, issue)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(guest, issue)).to be_allowed(:read_issue)
+ expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue)
- expect(permissions(guest, issue_no_assignee)).to include(:read_issue)
- expect(permissions(guest, issue_no_assignee)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue)
+ expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
end
it 'allows reporters to read, update, and admin issues' do
- expect(permissions(reporter, issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows reporters from group links to read, update, and admin issues' do
- expect(permissions(reporter_from_group_link, issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter_from_group_link, issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows issue authors to read and update their issues' do
- expect(permissions(author, issue)).to include(:read_issue, :update_issue)
- expect(permissions(author, issue)).not_to include(:admin_issue)
+ expect(permissions(author, issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(author, issue)).to be_disallowed(:admin_issue)
- expect(permissions(author, issue_no_assignee)).to include(:read_issue)
- expect(permissions(author, issue_no_assignee)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue)
+ expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
end
it 'allows issue assignees to read and update their issues' do
- expect(permissions(assignee, issue)).to include(:read_issue, :update_issue)
- expect(permissions(assignee, issue)).not_to include(:admin_issue)
+ expect(permissions(assignee, issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(assignee, issue)).to be_disallowed(:admin_issue)
- expect(permissions(assignee, issue_no_assignee)).to include(:read_issue)
- expect(permissions(assignee, issue_no_assignee)).not_to include(:update_issue, :admin_issue)
+ expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue)
+ expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue)
end
context 'with confidential issues' do
@@ -161,32 +161,32 @@ describe IssuePolicy, models: true do
let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) }
it 'does not allow guests to read confidential issues' do
- expect(permissions(guest, confidential_issue)).not_to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(guest, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows reporters to read, update, and admin confidential issues' do
- expect(permissions(reporter, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows reporter from group links to read, update, and admin confidential issues' do
- expect(permissions(reporter_from_group_link, confidential_issue)).to include(:read_issue, :update_issue, :admin_issue)
- expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows issue authors to read and update their confidential issues' do
- expect(permissions(author, confidential_issue)).to include(:read_issue, :update_issue)
- expect(permissions(author, confidential_issue)).not_to include(:admin_issue)
+ expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(author, confidential_issue)).to be_disallowed(:admin_issue)
- expect(permissions(author, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
it 'allows issue assignees to read and update their confidential issues' do
- expect(permissions(assignee, confidential_issue)).to include(:read_issue, :update_issue)
- expect(permissions(assignee, confidential_issue)).not_to include(:admin_issue)
+ expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :update_issue)
+ expect(permissions(assignee, confidential_issue)).to be_disallowed(:admin_issue)
- expect(permissions(assignee, confidential_issue_no_assignee)).not_to include(:read_issue, :update_issue, :admin_issue)
+ expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :update_issue, :admin_issue)
end
end
end
diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb
index 58aa1145c9e..4d6350fc653 100644
--- a/spec/policies/personal_snippet_policy_spec.rb
+++ b/spec/policies/personal_snippet_policy_spec.rb
@@ -14,7 +14,7 @@ describe PersonalSnippetPolicy, models: true do
end
def permissions(user)
- described_class.abilities(user, snippet).to_set
+ described_class.new(user, snippet)
end
context 'public snippet' do
@@ -24,9 +24,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(nil) }
it do
- is_expected.to include(:read_personal_snippet)
- is_expected.not_to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -34,9 +34,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(regular_user) }
it do
- is_expected.to include(:read_personal_snippet)
- is_expected.to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -44,9 +44,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(snippet.author) }
it do
- is_expected.to include(:read_personal_snippet)
- is_expected.to include(:comment_personal_snippet)
- is_expected.to include(*author_permissions)
+ is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:comment_personal_snippet)
+ is_expected.to be_allowed(*author_permissions)
end
end
end
@@ -58,9 +58,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(nil) }
it do
- is_expected.not_to include(:read_personal_snippet)
- is_expected.not_to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -68,9 +68,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(regular_user) }
it do
- is_expected.to include(:read_personal_snippet)
- is_expected.to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -78,9 +78,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(external_user) }
it do
- is_expected.not_to include(:read_personal_snippet)
- is_expected.not_to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -88,9 +88,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(snippet.author) }
it do
- is_expected.to include(:read_personal_snippet)
- is_expected.to include(:comment_personal_snippet)
- is_expected.to include(*author_permissions)
+ is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:comment_personal_snippet)
+ is_expected.to be_allowed(*author_permissions)
end
end
end
@@ -102,9 +102,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(nil) }
it do
- is_expected.not_to include(:read_personal_snippet)
- is_expected.not_to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -112,9 +112,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(regular_user) }
it do
- is_expected.not_to include(:read_personal_snippet)
- is_expected.not_to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -122,9 +122,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(external_user) }
it do
- is_expected.not_to include(:read_personal_snippet)
- is_expected.not_to include(:comment_personal_snippet)
- is_expected.not_to include(*author_permissions)
+ is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(*author_permissions)
end
end
@@ -132,9 +132,9 @@ describe PersonalSnippetPolicy, models: true do
subject { permissions(snippet.author) }
it do
- is_expected.to include(:read_personal_snippet)
- is_expected.to include(:comment_personal_snippet)
- is_expected.to include(*author_permissions)
+ is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:comment_personal_snippet)
+ is_expected.to be_allowed(*author_permissions)
end
end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index d70e15f006b..ca435dd0218 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -73,37 +73,45 @@ describe ProjectPolicy, models: true do
project.team << [reporter, :reporter]
end
+ def expect_allowed(*permissions)
+ permissions.each { |p| is_expected.to be_allowed(p) }
+ end
+
+ def expect_disallowed(*permissions)
+ permissions.each { |p| is_expected.not_to be_allowed(p) }
+ end
+
it 'does not include the read_issue permission when the issue author is not a member of the private project' do
project = create(:empty_project, :private)
issue = create(:issue, project: project)
user = issue.author
- expect(project.team.member?(issue.author)).to eq(false)
+ expect(project.team.member?(issue.author)).to be false
- expect(BasePolicy.class_for(project).abilities(user, project).can_set)
- .not_to include(:read_issue)
-
- expect(Ability.allowed?(user, :read_issue, project)).to be_falsy
+ expect(Ability).not_to be_allowed(user, :read_issue, project)
end
- it 'does not include the wiki permissions when the feature is disabled' do
- project.project_feature.update_attribute(:wiki_access_level, ProjectFeature::DISABLED)
- wiki_permissions = [:read_wiki, :create_wiki, :update_wiki, :admin_wiki, :download_wiki_code]
+ context 'when the feature is disabled' do
+ subject { described_class.new(owner, project) }
- permissions = described_class.abilities(owner, project).to_set
+ before do
+ project.project_feature.update_attribute(:wiki_access_level, ProjectFeature::DISABLED)
+ end
- expect(permissions).not_to include(*wiki_permissions)
+ it 'does not include the wiki permissions' do
+ expect_disallowed :read_wiki, :create_wiki, :update_wiki, :admin_wiki, :download_wiki_code
+ end
end
context 'abilities for non-public projects' do
let(:project) { create(:empty_project, namespace: owner.namespace) }
- subject { described_class.abilities(current_user, project).to_set }
+ subject { described_class.new(current_user, project) }
context 'with no user' do
let(:current_user) { nil }
- it { is_expected.to be_empty }
+ it { is_expected.to be_banned }
end
context 'guests' do
@@ -114,18 +122,18 @@ describe ProjectPolicy, models: true do
end
it do
- is_expected.to include(*guest_permissions)
- is_expected.not_to include(*reporter_public_build_permissions)
- is_expected.not_to include(*team_member_reporter_permissions)
- is_expected.not_to include(*developer_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(*guest_permissions)
+ expect_disallowed(*reporter_public_build_permissions)
+ expect_disallowed(*team_member_reporter_permissions)
+ expect_disallowed(*developer_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
context 'public builds enabled' do
it do
- is_expected.to include(*guest_permissions)
- is_expected.to include(:read_build, :read_pipeline)
+ expect_allowed(*guest_permissions)
+ expect_allowed(:read_build, :read_pipeline)
end
end
@@ -135,8 +143,8 @@ describe ProjectPolicy, models: true do
end
it do
- is_expected.to include(*guest_permissions)
- is_expected.not_to include(:read_build, :read_pipeline)
+ expect_allowed(*guest_permissions)
+ expect_disallowed(:read_build, :read_pipeline)
end
end
@@ -147,8 +155,8 @@ describe ProjectPolicy, models: true do
end
it do
- is_expected.not_to include(:read_build)
- is_expected.to include(:read_pipeline)
+ expect_disallowed(:read_build)
+ expect_allowed(:read_pipeline)
end
end
end
@@ -157,12 +165,13 @@ describe ProjectPolicy, models: true do
let(:current_user) { reporter }
it do
- is_expected.to include(*guest_permissions)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*team_member_reporter_permissions)
- is_expected.not_to include(*developer_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(*guest_permissions)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*team_member_reporter_permissions)
+ expect_disallowed(*developer_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -170,12 +179,12 @@ describe ProjectPolicy, models: true do
let(:current_user) { dev }
it do
- is_expected.to include(*guest_permissions)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*team_member_reporter_permissions)
- is_expected.to include(*developer_permissions)
- is_expected.not_to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(*guest_permissions)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*team_member_reporter_permissions)
+ expect_allowed(*developer_permissions)
+ expect_disallowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -183,12 +192,12 @@ describe ProjectPolicy, models: true do
let(:current_user) { master }
it do
- is_expected.to include(*guest_permissions)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*team_member_reporter_permissions)
- is_expected.to include(*developer_permissions)
- is_expected.to include(*master_permissions)
- is_expected.not_to include(*owner_permissions)
+ expect_allowed(*guest_permissions)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*team_member_reporter_permissions)
+ expect_allowed(*developer_permissions)
+ expect_allowed(*master_permissions)
+ expect_disallowed(*owner_permissions)
end
end
@@ -196,12 +205,12 @@ describe ProjectPolicy, models: true do
let(:current_user) { owner }
it do
- is_expected.to include(*guest_permissions)
- is_expected.to include(*reporter_permissions)
- is_expected.to include(*team_member_reporter_permissions)
- is_expected.to include(*developer_permissions)
- is_expected.to include(*master_permissions)
- is_expected.to include(*owner_permissions)
+ expect_allowed(*guest_permissions)
+ expect_allowed(*reporter_permissions)
+ expect_allowed(*team_member_reporter_permissions)
+ expect_allowed(*developer_permissions)
+ expect_allowed(*master_permissions)
+ expect_allowed(*owner_permissions)
end
end
@@ -209,12 +218,12 @@ describe ProjectPolicy, models: true do
let(:current_user) { admin }
it do
- is_expected.to include(*guest_permissions)
- is_expected.to include(*reporter_permissions)
- is_expected.not_to include(*team_member_reporter_permissions)
- is_expected.to include(*developer_permissions)
- is_expected.to include(*master_permissions)
- is_expected.to include(*owner_permissions)
+ expect_allowed(*guest_permissions)
+ expect_allowed(*reporter_permissions)
+ expect_disallowed(*team_member_reporter_permissions)
+ expect_allowed(*developer_permissions)
+ expect_allowed(*master_permissions)
+ expect_allowed(*owner_permissions)
end
end
end
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index d2b2528c57a..2799f03fb9b 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -15,7 +15,15 @@ describe ProjectSnippetPolicy, models: true do
def abilities(user, snippet_visibility)
snippet = create(:project_snippet, snippet_visibility, project: project)
- described_class.abilities(user, snippet).to_set
+ described_class.new(user, snippet)
+ end
+
+ def expect_allowed(*permissions)
+ permissions.each { |p| is_expected.to be_allowed(p) }
+ end
+
+ def expect_disallowed(*permissions)
+ permissions.each { |p| is_expected.not_to be_allowed(p) }
end
context 'public snippet' do
@@ -23,8 +31,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(nil, :public) }
it do
- is_expected.to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -32,8 +40,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(regular_user, :public) }
it do
- is_expected.to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -41,8 +49,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(external_user, :public) }
it do
- is_expected.to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
end
@@ -52,8 +60,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(nil, :internal) }
it do
- is_expected.not_to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_disallowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -61,8 +69,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(regular_user, :internal) }
it do
- is_expected.to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -70,8 +78,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(external_user, :internal) }
it do
- is_expected.not_to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_disallowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -83,8 +91,8 @@ describe ProjectSnippetPolicy, models: true do
end
it do
- is_expected.to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
end
@@ -94,8 +102,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(nil, :private) }
it do
- is_expected.not_to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_disallowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -103,19 +111,19 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(regular_user, :private) }
it do
- is_expected.not_to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_disallowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
context 'snippet author' do
let(:snippet) { create(:project_snippet, :private, author: regular_user, project: project) }
- subject { described_class.abilities(regular_user, snippet).to_set }
+ subject { described_class.new(regular_user, snippet) }
it do
- is_expected.to include(:read_project_snippet)
- is_expected.to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_allowed(*author_permissions)
end
end
@@ -127,8 +135,8 @@ describe ProjectSnippetPolicy, models: true do
end
it do
- is_expected.to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -140,8 +148,8 @@ describe ProjectSnippetPolicy, models: true do
end
it do
- is_expected.to include(:read_project_snippet)
- is_expected.not_to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_disallowed(*author_permissions)
end
end
@@ -149,8 +157,8 @@ describe ProjectSnippetPolicy, models: true do
subject { abilities(create(:admin), :private) }
it do
- is_expected.to include(:read_project_snippet)
- is_expected.to include(*author_permissions)
+ expect_allowed(:read_project_snippet)
+ expect_allowed(*author_permissions)
end
end
end
diff --git a/spec/policies/user_policy_spec.rb b/spec/policies/user_policy_spec.rb
index d5761390d39..0251d5dcf1c 100644
--- a/spec/policies/user_policy_spec.rb
+++ b/spec/policies/user_policy_spec.rb
@@ -4,34 +4,34 @@ describe UserPolicy, models: true do
let(:current_user) { create(:user) }
let(:user) { create(:user) }
- subject { described_class.abilities(current_user, user).to_set }
+ subject { UserPolicy.new(current_user, user) }
describe "reading a user's information" do
- it { is_expected.to include(:read_user) }
+ it { is_expected.to be_allowed(:read_user) }
end
describe "destroying a user" do
context "when a regular user tries to destroy another regular user" do
- it { is_expected.not_to include(:destroy_user) }
+ it { is_expected.not_to be_allowed(:destroy_user) }
end
context "when a regular user tries to destroy themselves" do
let(:current_user) { user }
- it { is_expected.to include(:destroy_user) }
+ it { is_expected.to be_allowed(:destroy_user) }
end
context "when an admin user tries to destroy a regular user" do
let(:current_user) { create(:user, :admin) }
- it { is_expected.to include(:destroy_user) }
+ it { is_expected.to be_allowed(:destroy_user) }
end
context "when an admin user tries to destroy a ghost user" do
let(:current_user) { create(:user, :admin) }
let(:user) { create(:user, :ghost) }
- it { is_expected.not_to include(:destroy_user) }
+ it { is_expected.not_to be_allowed(:destroy_user) }
end
end
end
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index f169e6661d1..1d8aaeea8f2 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -4,6 +4,13 @@ describe API::Features do
let(:user) { create(:user) }
let(:admin) { create(:admin) }
+ before do
+ Flipper.unregister_groups
+ Flipper.register(:perf_team) do |actor|
+ actor.respond_to?(:admin) && actor.admin?
+ end
+ end
+
describe 'GET /features' do
let(:expected_features) do
[
@@ -16,6 +23,14 @@ describe API::Features do
'name' => 'feature_2',
'state' => 'off',
'gates' => [{ 'key' => 'boolean', 'value' => false }]
+ },
+ {
+ 'name' => 'feature_3',
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'groups', 'value' => ['perf_team'] }
+ ]
}
]
end
@@ -23,6 +38,7 @@ describe API::Features do
before do
Feature.get('feature_1').enable
Feature.get('feature_2').disable
+ Feature.get('feature_3').enable Feature.group(:perf_team)
end
it 'returns a 401 for anonymous users' do
@@ -47,30 +63,70 @@ describe API::Features do
describe 'POST /feature' do
let(:feature_name) { 'my_feature' }
- it 'returns a 401 for anonymous users' do
- post api("/features/#{feature_name}")
- expect(response).to have_http_status(401)
- end
+ context 'when the feature does not exist' do
+ it 'returns a 401 for anonymous users' do
+ post api("/features/#{feature_name}")
- it 'returns a 403 for users' do
- post api("/features/#{feature_name}", user)
+ expect(response).to have_http_status(401)
+ end
- expect(response).to have_http_status(403)
- end
+ it 'returns a 403 for users' do
+ post api("/features/#{feature_name}", user)
- it 'creates an enabled feature if passed true' do
- post api("/features/#{feature_name}", admin), value: 'true'
+ expect(response).to have_http_status(403)
+ end
- expect(response).to have_http_status(201)
- expect(Feature.get(feature_name)).to be_enabled
- end
+ context 'when passed value=true' do
+ it 'creates an enabled feature' do
+ post api("/features/#{feature_name}", admin), value: 'true'
- it 'creates a feature with the given percentage if passed an integer' do
- post api("/features/#{feature_name}", admin), value: '50'
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'on',
+ 'gates' => [{ 'key' => 'boolean', 'value' => true }])
+ end
+
+ it 'creates an enabled feature for the given Flipper group when passed feature_group=perf_team' do
+ post api("/features/#{feature_name}", admin), value: 'true', feature_group: 'perf_team'
+
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'groups', 'value' => ['perf_team'] }
+ ])
+ end
+
+ it 'creates an enabled feature for the given user when passed user=username' do
+ post api("/features/#{feature_name}", admin), value: 'true', user: user.username
- expect(response).to have_http_status(201)
- expect(Feature.get(feature_name).percentage_of_time_value).to be(50)
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
+ ])
+ end
+ end
+
+ it 'creates a feature with the given percentage if passed an integer' do
+ post api("/features/#{feature_name}", admin), value: '50'
+
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'percentage_of_time', 'value' => 50 }
+ ])
+ end
end
context 'when the feature exists' do
@@ -80,11 +136,83 @@ describe API::Features do
feature.disable # This also persists the feature on the DB
end
- it 'enables the feature if passed true' do
- post api("/features/#{feature_name}", admin), value: 'true'
+ context 'when passed value=true' do
+ it 'enables the feature' do
+ post api("/features/#{feature_name}", admin), value: 'true'
- expect(response).to have_http_status(201)
- expect(feature).to be_enabled
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'on',
+ 'gates' => [{ 'key' => 'boolean', 'value' => true }])
+ end
+
+ it 'enables the feature for the given Flipper group when passed feature_group=perf_team' do
+ post api("/features/#{feature_name}", admin), value: 'true', feature_group: 'perf_team'
+
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'groups', 'value' => ['perf_team'] }
+ ])
+ end
+
+ it 'enables the feature for the given user when passed user=username' do
+ post api("/features/#{feature_name}", admin), value: 'true', user: user.username
+
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
+ ])
+ end
+ end
+
+ context 'when feature is enabled and value=false is passed' do
+ it 'disables the feature' do
+ feature.enable
+ expect(feature).to be_enabled
+
+ post api("/features/#{feature_name}", admin), value: 'false'
+
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'off',
+ 'gates' => [{ 'key' => 'boolean', 'value' => false }])
+ end
+
+ it 'disables the feature for the given Flipper group when passed feature_group=perf_team' do
+ feature.enable(Feature.group(:perf_team))
+ expect(Feature.get(feature_name).enabled?(admin)).to be_truthy
+
+ post api("/features/#{feature_name}", admin), value: 'false', feature_group: 'perf_team'
+
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'off',
+ 'gates' => [{ 'key' => 'boolean', 'value' => false }])
+ end
+
+ it 'disables the feature for the given user when passed user=username' do
+ feature.enable(user)
+ expect(Feature.get(feature_name).enabled?(user)).to be_truthy
+
+ post api("/features/#{feature_name}", admin), value: 'false', user: user.username
+
+ expect(response).to have_http_status(201)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'off',
+ 'gates' => [{ 'key' => 'boolean', 'value' => false }])
+ end
end
context 'with a pre-existing percentage value' do
@@ -96,7 +224,13 @@ describe API::Features do
post api("/features/#{feature_name}", admin), value: '30'
expect(response).to have_http_status(201)
- expect(Feature.get(feature_name).percentage_of_time_value).to be(30)
+ expect(json_response).to eq(
+ 'name' => 'my_feature',
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'percentage_of_time', 'value' => 30 }
+ ])
end
end
end
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index 3bf16a3ae27..26cf653ca8e 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -15,6 +15,20 @@ describe API::Namespaces do
end
context "when authenticated as admin" do
+ it "returns correct attributes" do
+ get api("/namespaces", admin)
+
+ group_kind_json_response = json_response.find { |resource| resource['kind'] == 'group' }
+ user_kind_json_response = json_response.find { |resource| resource['kind'] == 'user' }
+
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(group_kind_json_response.keys).to contain_exactly('id', 'kind', 'name', 'path', 'full_path',
+ 'parent_id', 'members_count_with_descendants')
+
+ expect(user_kind_json_response.keys).to contain_exactly('id', 'kind', 'name', 'path', 'full_path', 'parent_id')
+ end
+
it "admin: returns an array of all namespaces" do
get api("/namespaces", admin)
@@ -37,6 +51,27 @@ describe API::Namespaces do
end
context "when authenticated as a regular user" do
+ it "returns correct attributes when user can admin group" do
+ group1.add_owner(user)
+
+ get api("/namespaces", user)
+
+ owned_group_response = json_response.find { |resource| resource['id'] == group1.id }
+
+ expect(owned_group_response.keys).to contain_exactly('id', 'kind', 'name', 'path', 'full_path',
+ 'parent_id', 'members_count_with_descendants')
+ end
+
+ it "returns correct attributes when user cannot admin group" do
+ group1.add_guest(user)
+
+ get api("/namespaces", user)
+
+ guest_group_response = json_response.find { |resource| resource['id'] == group1.id }
+
+ expect(guest_group_response.keys).to contain_exactly('id', 'kind', 'name', 'path', 'full_path', 'parent_id')
+ end
+
it "user: returns an array of namespaces" do
get api("/namespaces", user)
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index fd7ff0b9cff..14dec3d45b1 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -698,7 +698,8 @@ describe API::Projects do
'name' => user.namespace.name,
'path' => user.namespace.path,
'kind' => user.namespace.kind,
- 'full_path' => user.namespace.full_path
+ 'full_path' => user.namespace.full_path,
+ 'parent_id' => nil
})
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index f0edc06cd0b..b8109ce401c 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -13,9 +13,40 @@ describe API::Users do
describe 'GET /users' do
context "when unauthenticated" do
- it "returns authentication error" do
+ it "returns authorization error when the `username` parameter is not passed" do
get api("/users")
- expect(response).to have_http_status(401)
+
+ expect(response).to have_http_status(403)
+ end
+
+ it "returns the user when a valid `username` parameter is passed" do
+ user = create(:user)
+
+ get api("/users"), username: user.username
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(1)
+ expect(json_response[0]['id']).to eq(user.id)
+ expect(json_response[0]['username']).to eq(user.username)
+ end
+
+ it "returns authorization error when the `username` parameter refers to an inaccessible user" do
+ user = create(:user)
+
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+
+ get api("/users"), username: user.username
+
+ expect(response).to have_http_status(403)
+ end
+
+ it "returns an empty response when an invalid `username` parameter is passed" do
+ get api("/users"), username: 'invalid'
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(0)
end
end
@@ -145,6 +176,7 @@ describe API::Users do
describe "GET /users/:id" do
it "returns a user by id" do
get api("/users/#{user.id}", user)
+
expect(response).to have_http_status(200)
expect(json_response['username']).to eq(user.username)
end
@@ -155,9 +187,22 @@ describe API::Users do
expect(json_response['is_admin']).to be_nil
end
- it "returns a 401 if unauthenticated" do
- get api("/users/9998")
- expect(response).to have_http_status(401)
+ context 'for an anonymous user' do
+ it "returns a user by id" do
+ get api("/users/#{user.id}")
+
+ expect(response).to have_http_status(200)
+ expect(json_response['username']).to eq(user.username)
+ end
+
+ it "returns a 404 if the target user is present but inaccessible" do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(nil, :read_user, user).and_return(false)
+
+ get api("/users/#{user.id}")
+
+ expect(response).to have_http_status(404)
+ end
end
it "returns a 404 error if user id not found" do
diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb
index cb74868324c..af44ffa2331 100644
--- a/spec/requests/api/v3/projects_spec.rb
+++ b/spec/requests/api/v3/projects_spec.rb
@@ -734,7 +734,8 @@ describe API::V3::Projects do
'name' => user.namespace.name,
'path' => user.namespace.path,
'kind' => user.namespace.kind,
- 'full_path' => user.namespace.full_path
+ 'full_path' => user.namespace.full_path,
+ 'parent_id' => nil
})
end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 95d40138fea..2f1c3c95e59 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -246,28 +246,13 @@ describe 'project routing' do
end
end
- # diffs_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/diffs(.:format) projects/merge_requests#diffs
- # commits_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/commits(.:format) projects/merge_requests#commits
- # merge_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/merge(.:format) projects/merge_requests#merge
- # ci_status_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/ci_status(.:format) projects/merge_requests#ci_status
- # toggle_subscription_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/toggle_subscription(.:format) projects/merge_requests#toggle_subscription
- # branch_from_namespace_project_merge_requests GET /:namespace_id/:project_id/merge_requests/branch_from(.:format) projects/merge_requests#branch_from
- # branch_to_namespace_project_merge_requests GET /:namespace_id/:project_id/merge_requests/branch_to(.:format) projects/merge_requests#branch_to
- # update_branches_namespace_project_merge_requests GET /:namespace_id/:project_id/merge_requests/update_branches(.:format) projects/merge_requests#update_branches
- # namespace_project_merge_requests GET /:namespace_id/:project_id/merge_requests(.:format) projects/merge_requests#index
- # POST /:namespace_id/:project_id/merge_requests(.:format) projects/merge_requests#create
- # new_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/new(.:format) projects/merge_requests#new
- # edit_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/edit(.:format) projects/merge_requests#edit
- # namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id(.:format) projects/merge_requests#show
- # PATCH /:namespace_id/:project_id/merge_requests/:id(.:format) projects/merge_requests#update
- # PUT /:namespace_id/:project_id/merge_requests/:id(.:format) projects/merge_requests#update
describe Projects::MergeRequestsController, 'routing' do
- it 'to #diffs' do
- expect(get('/gitlab/gitlabhq/merge_requests/1/diffs')).to route_to('projects/merge_requests#diffs', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ it 'to #commits' do
+ expect(get('/gitlab/gitlabhq/merge_requests/1/commits.json')).to route_to('projects/merge_requests#commits', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
end
- it 'to #commits' do
- expect(get('/gitlab/gitlabhq/merge_requests/1/commits')).to route_to('projects/merge_requests#commits', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ it 'to #pipelines' do
+ expect(get('/gitlab/gitlabhq/merge_requests/1/pipelines.json')).to route_to('projects/merge_requests#pipelines', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
end
it 'to #merge' do
@@ -277,25 +262,59 @@ describe 'project routing' do
)
end
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/merge_requests/1.diff')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'diff')
+ expect(get('/gitlab/gitlabhq/merge_requests/1.patch')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'patch')
+ expect(get('/gitlab/gitlabhq/merge_requests/1/diffs')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'diffs')
+ expect(get('/gitlab/gitlabhq/merge_requests/1/commits')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'commits')
+ expect(get('/gitlab/gitlabhq/merge_requests/1/pipelines')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'pipelines')
+ end
+
+ it_behaves_like 'RESTful project resources' do
+ let(:controller) { 'merge_requests' }
+ let(:actions) { [:index, :edit, :show, :update] }
+ end
+ end
+
+ describe Projects::MergeRequests::CreationsController, 'routing' do
+ it 'to #new' do
+ expect(get('/gitlab/gitlabhq/merge_requests/new')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/merge_requests/new/diffs')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq', tab: 'diffs')
+ expect(get('/gitlab/gitlabhq/merge_requests/new/pipelines')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq', tab: 'pipelines')
+ end
+
+ it 'to #create' do
+ expect(post('/gitlab/gitlabhq/merge_requests')).to route_to('projects/merge_requests/creations#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+
it 'to #branch_from' do
- expect(get('/gitlab/gitlabhq/merge_requests/branch_from')).to route_to('projects/merge_requests#branch_from', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/merge_requests/new/branch_from')).to route_to('projects/merge_requests/creations#branch_from', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #branch_to' do
- expect(get('/gitlab/gitlabhq/merge_requests/branch_to')).to route_to('projects/merge_requests#branch_to', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/merge_requests/new/branch_to')).to route_to('projects/merge_requests/creations#branch_to', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
- it 'to #show' do
- expect(get('/gitlab/gitlabhq/merge_requests/1.diff')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'diff')
- expect(get('/gitlab/gitlabhq/merge_requests/1.patch')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'patch')
+ it 'to #pipelines' do
+ expect(get('/gitlab/gitlabhq/merge_requests/new/pipelines.json')).to route_to('projects/merge_requests/creations#pipelines', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'json')
end
- it_behaves_like 'RESTful project resources' do
- let(:controller) { 'merge_requests' }
- let(:actions) { [:index, :create, :new, :edit, :show, :update] }
+ it 'to #diffs' do
+ expect(get('/gitlab/gitlabhq/merge_requests/new/diffs.json')).to route_to('projects/merge_requests/creations#diffs', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'json')
+ end
+ end
+
+ describe Projects::MergeRequests::DiffsController, 'routing' do
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/merge_requests/1/diffs.json')).to route_to('projects/merge_requests/diffs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
end
end
+ describe Projects::MergeRequests::ConflictsController, 'routing' do
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/merge_requests/1/conflicts')).to route_to('projects/merge_requests/conflicts#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ end
+ end
# raw_project_snippet GET /:project_id/snippets/:id/raw(.:format) snippets#raw
# project_snippets GET /:project_id/snippets(.:format) snippets#index
# POST /:project_id/snippets(.:format) snippets#create
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index a1e220c2322..a66cc2cd6e9 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -67,7 +67,7 @@ describe Boards::Issues::ListService, services: true do
issues = described_class.new(project, user, params).execute
- expect(issues).to eq [closed_issue4, closed_issue2, closed_issue3, closed_issue1]
+ expect(issues).to eq [closed_issue4, closed_issue2, closed_issue5, closed_issue3, closed_issue1]
end
it 'returns opened issues that have label list applied when listing issues from a label list' do
diff --git a/spec/services/delete_merged_branches_service_spec.rb b/spec/services/delete_merged_branches_service_spec.rb
index cae74df9c90..fe21ca0b3cb 100644
--- a/spec/services/delete_merged_branches_service_spec.rb
+++ b/spec/services/delete_merged_branches_service_spec.rb
@@ -24,6 +24,14 @@ describe DeleteMergedBranchesService, services: true do
expect(project.repository.branch_names).to include('master')
end
+ it 'keeps protected branches' do
+ create(:protected_branch, project: project, name: 'improve/awesome')
+
+ service.execute
+
+ expect(project.repository.branch_names).to include('improve/awesome')
+ end
+
context 'user without rights' do
let(:user) { create(:user) }
diff --git a/spec/services/git_hooks_service_spec.rb b/spec/services/git_hooks_service_spec.rb
index ac7ccfbaab0..213678c27f5 100644
--- a/spec/services/git_hooks_service_spec.rb
+++ b/spec/services/git_hooks_service_spec.rb
@@ -12,7 +12,6 @@ describe GitHooksService, services: true do
@oldrev = sample_commit.parent_id
@newrev = sample_commit.id
@ref = 'refs/heads/feature'
- @repo_path = project.repository.path_to_repo
end
describe '#execute' do
@@ -21,7 +20,7 @@ describe GitHooksService, services: true do
hook = double(trigger: [true, nil])
expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
- service.execute(user, @repo_path, @blankrev, @newrev, @ref) { }
+ service.execute(user, project, @blankrev, @newrev, @ref) { }
end
end
@@ -31,7 +30,7 @@ describe GitHooksService, services: true do
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
- service.execute(user, @repo_path, @blankrev, @newrev, @ref)
+ service.execute(user, project, @blankrev, @newrev, @ref)
end.to raise_error(GitHooksService::PreReceiveError)
end
end
@@ -43,7 +42,7 @@ describe GitHooksService, services: true do
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
- service.execute(user, @repo_path, @blankrev, @newrev, @ref)
+ service.execute(user, project, @blankrev, @newrev, @ref)
end.to raise_error(GitHooksService::PreReceiveError)
end
end
diff --git a/spec/services/git_push_service_spec.rb b/spec/services/git_push_service_spec.rb
index ca827fc0f39..8e8816870e1 100644
--- a/spec/services/git_push_service_spec.rb
+++ b/spec/services/git_push_service_spec.rb
@@ -401,18 +401,6 @@ describe GitPushService, services: true do
expect(SystemNoteService).not_to receive(:cross_reference)
execute_service(project, commit_author, @oldrev, @newrev, @ref )
end
-
- it "doesn't close issues when external issue tracker is in use" do
- allow_any_instance_of(Project).to receive(:default_issues_tracker?)
- .and_return(false)
- external_issue_tracker = double(title: 'My Tracker', issue_path: issue.iid, reference_pattern: project.issue_reference_pattern)
- allow_any_instance_of(Project).to receive(:external_issue_tracker).and_return(external_issue_tracker)
-
- # The push still shouldn't create cross-reference notes.
- expect do
- execute_service(project, commit_author, @oldrev, @newrev, 'refs/heads/hurf' )
- end.not_to change { Note.where(project_id: project.id, system: true).count }
- end
end
context "to non-default branches" do
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index a37257d1bf4..d59b37bee36 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -15,6 +15,14 @@ describe Groups::DestroyService, services: true do
group.add_user(user, Gitlab::Access::OWNER)
end
+ def destroy_group(group, user, async)
+ if async
+ Groups::DestroyService.new(group, user).async_execute
+ else
+ Groups::DestroyService.new(group, user).execute
+ end
+ end
+
shared_examples 'group destruction' do |async|
context 'database records' do
before do
@@ -30,30 +38,14 @@ describe Groups::DestroyService, services: true do
context 'file system' do
context 'Sidekiq inline' do
before do
- # Run sidekiq immediatly to check that renamed dir will be removed
+ # Run sidekiq immediately to check that renamed dir will be removed
Sidekiq::Testing.inline! { destroy_group(group, user, async) }
end
- it { expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey }
- it { expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey }
- end
-
- context 'Sidekiq fake' do
- before do
- # Don't run sidekiq to check if renamed repository exists
- Sidekiq::Testing.fake! { destroy_group(group, user, async) }
+ it 'verifies that paths have been deleted' do
+ expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey
+ expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey
end
-
- it { expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey }
- it { expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_truthy }
- end
- end
-
- def destroy_group(group, user, async)
- if async
- Groups::DestroyService.new(group, user).async_execute
- else
- Groups::DestroyService.new(group, user).execute
end
end
end
@@ -61,6 +53,26 @@ describe Groups::DestroyService, services: true do
describe 'asynchronous delete' do
it_behaves_like 'group destruction', true
+ context 'Sidekiq fake' do
+ before do
+ # Don't run Sidekiq to verify that group and projects are not actually destroyed
+ Sidekiq::Testing.fake! { destroy_group(group, user, true) }
+ end
+
+ after do
+ # Clean up stale directories
+ gitlab_shell.rm_namespace(project.repository_storage_path, group.path)
+ gitlab_shell.rm_namespace(project.repository_storage_path, remove_path)
+ end
+
+ it 'verifies original paths and projects still exist' do
+ expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_truthy
+ expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey
+ expect(Project.unscoped.count).to eq(1)
+ expect(Group.unscoped.count).to eq(2)
+ end
+ end
+
context 'potential race conditions' do
context "when the `GroupDestroyWorker` task runs immediately" do
it "deletes the group" do
diff --git a/spec/services/notification_recipient_service_spec.rb b/spec/services/notification_recipient_service_spec.rb
new file mode 100644
index 00000000000..dfe1ee7c41e
--- /dev/null
+++ b/spec/services/notification_recipient_service_spec.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+
+describe NotificationRecipientService, services: true do
+ set(:user) { create(:user) }
+ set(:project) { create(:empty_project, :public) }
+ set(:issue) { create(:issue, project: project) }
+
+ set(:watcher) do
+ watcher = create(:user)
+ setting = watcher.notification_settings_for(project)
+ setting.level = :watch
+ setting.save
+
+ watcher
+ end
+
+ subject { described_class.new(project) }
+
+ describe '#build_recipients' do
+ it 'does not modify the participants of the target' do
+ expect { subject.build_recipients(issue, user, action: :new_issue) }
+ .not_to change { issue.participants(user) }
+ end
+ end
+
+ describe '#build_new_note_recipients' do
+ set(:note) { create(:note_on_issue, noteable: issue, project: project) }
+
+ it 'does not modify the participants of the target' do
+ expect { subject.build_new_note_recipients(note) }
+ .not_to change { note.noteable.participants(note.author) }
+ end
+ end
+end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 76c52d55ae5..441a5276c56 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -30,6 +30,12 @@ describe Projects::TransferService, services: true do
transfer_project(project, user, group)
end
+ it 'expires full_path cache' do
+ expect(project).to receive(:expires_full_path_cache)
+
+ transfer_project(project, user, group)
+ end
+
it 'executes system hooks' do
expect_any_instance_of(Projects::TransferService).to receive(:execute_system_hooks)
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index c9e63efbc14..35373675894 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -359,18 +359,18 @@ describe QuickActions::InterpretService, services: true do
let(:content) { "/assign @#{developer.username}" }
context 'Issue' do
- it 'fetches assignee and populates assignee_id if content contains /assign' do
+ it 'fetches assignee and populates assignee_ids if content contains /assign' do
_, updates = service.execute(content, issue)
- expect(updates).to eq(assignee_ids: [developer.id])
+ expect(updates[:assignee_ids]).to match_array([developer.id])
end
end
context 'Merge Request' do
- it 'fetches assignee and populates assignee_id if content contains /assign' do
+ it 'fetches assignee and populates assignee_ids if content contains /assign' do
_, updates = service.execute(content, merge_request)
- expect(updates).to eq(assignee_id: developer.id)
+ expect(updates).to eq(assignee_ids: [developer.id])
end
end
end
@@ -383,7 +383,7 @@ describe QuickActions::InterpretService, services: true do
end
context 'Issue' do
- it 'fetches assignee and populates assignee_id if content contains /assign' do
+ it 'fetches assignee and populates assignee_ids if content contains /assign' do
_, updates = service.execute(content, issue)
expect(updates[:assignee_ids]).to match_array([developer.id])
@@ -391,10 +391,10 @@ describe QuickActions::InterpretService, services: true do
end
context 'Merge Request' do
- it 'fetches assignee and populates assignee_id if content contains /assign' do
+ it 'fetches assignee and populates assignee_ids if content contains /assign' do
_, updates = service.execute(content, merge_request)
- expect(updates).to eq(assignee_id: developer.id)
+ expect(updates).to eq(assignee_ids: [developer.id])
end
end
end
@@ -422,11 +422,27 @@ describe QuickActions::InterpretService, services: true do
end
context 'Merge Request' do
- it 'populates assignee_id: nil if content contains /unassign' do
- merge_request.update(assignee_id: developer.id)
+ it 'populates assignee_ids: [] if content contains /unassign' do
+ merge_request.update(assignee_ids: [developer.id])
_, updates = service.execute(content, merge_request)
- expect(updates).to eq(assignee_id: nil)
+ expect(updates).to eq(assignee_ids: [])
+ end
+ end
+ end
+
+ context 'reassign command' do
+ let(:content) { '/reassign' }
+
+ context 'Issue' do
+ it 'reassigns user if content contains /reassign @user' do
+ user = create(:user)
+
+ issue.update(assignee_ids: [developer.id])
+
+ _, updates = service.execute("/reassign @#{user.username}", issue)
+
+ expect(updates).to eq(assignee_ids: [user.id])
end
end
end
diff --git a/spec/support/fake_migration_classes.rb b/spec/support/fake_migration_classes.rb
index 3de0460c3ca..b0fc8422857 100644
--- a/spec/support/fake_migration_classes.rb
+++ b/spec/support/fake_migration_classes.rb
@@ -1,3 +1,11 @@
class FakeRenameReservedPathMigrationV1 < ActiveRecord::Migration
include Gitlab::Database::RenameReservedPathsMigration::V1
+
+ def version
+ '20170316163845'
+ end
+
+ def name
+ "FakeRenameReservedPathMigrationV1"
+ end
end
diff --git a/spec/support/features/issuable_slash_commands_shared_examples.rb b/spec/support/features/issuable_slash_commands_shared_examples.rb
index 50869099bb7..98b014df6cd 100644
--- a/spec/support/features/issuable_slash_commands_shared_examples.rb
+++ b/spec/support/features/issuable_slash_commands_shared_examples.rb
@@ -28,7 +28,12 @@ shared_examples 'issuable record that supports quick actions in its description
describe "new #{issuable_type}", js: true do
context 'with commands in the description' do
it "creates the #{issuable_type} and interpret commands accordingly" do
- visit public_send("new_namespace_project_#{issuable_type}_path", project.namespace, project, new_url_opts)
+ case issuable_type
+ when :merge_request
+ visit public_send("namespace_project_new_merge_request_path", project.namespace, project, new_url_opts)
+ when :issue
+ visit public_send("new_namespace_project_issue_path", project.namespace, project, new_url_opts)
+ end
fill_in "#{issuable_type}_title", with: 'bug 345'
fill_in "#{issuable_type}_description", with: "bug description\n/label ~bug\n/milestone %\"ASAP\""
click_button "Submit #{issuable_type}".humanize
diff --git a/spec/support/generate-seed-repo-rb b/spec/support/generate-seed-repo-rb
index 7335f74c0e9..c89389b90ca 100755
--- a/spec/support/generate-seed-repo-rb
+++ b/spec/support/generate-seed-repo-rb
@@ -15,7 +15,7 @@
require 'erb'
require 'tempfile'
-SOURCE = 'https://gitlab.com/gitlab-org/gitlab-git-test.git'.freeze
+SOURCE = File.expand_path('../gitlab-git-test.git', __FILE__).freeze
SCRIPT_NAME = 'generate-seed-repo-rb'.freeze
REPO_NAME = 'gitlab-git-test.git'.freeze
diff --git a/spec/support/gitlab-git-test.git/HEAD b/spec/support/gitlab-git-test.git/HEAD
new file mode 100644
index 00000000000..cb089cd89a7
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/master
diff --git a/spec/support/gitlab-git-test.git/README.md b/spec/support/gitlab-git-test.git/README.md
new file mode 100644
index 00000000000..f072cd421be
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/README.md
@@ -0,0 +1,16 @@
+# Gitlab::Git test repository
+
+This repository is used by (some of) the tests in spec/lib/gitlab/git.
+
+Do not add new large files to this repository. Otherwise we needlessly
+inflate the size of the gitlab-ce repository.
+
+## How to make changes to this repository
+
+- (if needed) clone `https://gitlab.com/gitlab-org/gitlab-ce.git` to your local machine
+- clone `gitlab-ce/spec/support/gitlab-git-test.git` locally (i.e. clone from your hard drive, not from the internet)
+- make changes in your local clone of gitlab-git-test
+- run `git push` which will push to your local source `gitlab-ce/spec/support/gitlab-git-test.git`
+- in gitlab-ce: run `spec/support/prepare-gitlab-git-test-for-commit`
+- in gitlab-ce: `git add spec/support/seed_repo.rb spec/support/gitlab-git-test.git`
+- commit your changes in gitlab-ce
diff --git a/spec/support/gitlab-git-test.git/config b/spec/support/gitlab-git-test.git/config
new file mode 100644
index 00000000000..03e2d1b1e0f
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/config
@@ -0,0 +1,7 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = true
+ precomposeunicode = true
+[remote "origin"]
+ url = https://gitlab.com/gitlab-org/gitlab-git-test.git
diff --git a/spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.idx b/spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.idx
new file mode 100644
index 00000000000..2253da798c4
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.idx
Binary files differ
diff --git a/spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.pack b/spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.pack
new file mode 100644
index 00000000000..3a61107c5b1
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/objects/pack/pack-691247af2a6acb0b63b73ac0cb90540e93614043.pack
Binary files differ
diff --git a/spec/support/gitlab-git-test.git/packed-refs b/spec/support/gitlab-git-test.git/packed-refs
new file mode 100644
index 00000000000..ce5ab1f705b
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/packed-refs
@@ -0,0 +1,18 @@
+# pack-refs with: peeled fully-peeled
+0b4bc9a49b562e85de7cc9e834518ea6828729b9 refs/heads/feature
+12d65c8dd2b2676fa3ac47d955accc085a37a9c1 refs/heads/fix
+6473c90867124755509e100d0d35ebdc85a0b6ae refs/heads/fix-blob-path
+58fa1a3af4de73ea83fe25a1ef1db8e0c56f67e5 refs/heads/fix-existing-submodule-dir
+40f4a7a617393735a95a0bb67b08385bc1e7c66d refs/heads/fix-mode
+9abd6a8c113a2dd76df3fdb3d58a8cec6db75f8d refs/heads/gitattributes
+46e1395e609395de004cacd4b142865ab0e52a29 refs/heads/gitattributes-updated
+4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6 refs/heads/master
+5937ac0a7beb003549fc5fd26fc247adbce4a52e refs/heads/merge-test
+f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8 refs/tags/v1.0.0
+^6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
+8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b refs/tags/v1.1.0
+^5937ac0a7beb003549fc5fd26fc247adbce4a52e
+10d64eed7760f2811ee2d64b44f1f7d3b364f17b refs/tags/v1.2.0
+^eb49186cfa5c4338011f5f590fac11bd66c5c631
+2ac1f24e253e08135507d0830508febaaccf02ee refs/tags/v1.2.1
+^fa1b1e6c004a68b7d8763b86455da9e6b23e36d6
diff --git a/spec/support/gitlab-git-test.git/refs/heads/.gitkeep b/spec/support/gitlab-git-test.git/refs/heads/.gitkeep
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/refs/heads/.gitkeep
diff --git a/spec/support/gitlab-git-test.git/refs/tags/.gitkeep b/spec/support/gitlab-git-test.git/refs/tags/.gitkeep
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/spec/support/gitlab-git-test.git/refs/tags/.gitkeep
diff --git a/spec/support/issue_tracker_service_shared_example.rb b/spec/support/issue_tracker_service_shared_example.rb
index e70b3963d9d..a6ab03cb808 100644
--- a/spec/support/issue_tracker_service_shared_example.rb
+++ b/spec/support/issue_tracker_service_shared_example.rb
@@ -8,15 +8,15 @@ end
RSpec.shared_examples 'allows project key on reference pattern' do |url_attr|
it 'allows underscores in the project name' do
- expect(subject.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234'
+ expect(described_class.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234'
end
it 'allows numbers in the project name' do
- expect(subject.reference_pattern.match('EXT3_EXT-1234')[0]).to eq 'EXT3_EXT-1234'
+ expect(described_class.reference_pattern.match('EXT3_EXT-1234')[0]).to eq 'EXT3_EXT-1234'
end
it 'requires the project name to begin with A-Z' do
- expect(subject.reference_pattern.match('3EXT_EXT-1234')).to eq nil
- expect(subject.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234'
+ expect(described_class.reference_pattern.match('3EXT_EXT-1234')).to eq nil
+ expect(described_class.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234'
end
end
diff --git a/spec/support/matchers/access_matchers_for_controller.rb b/spec/support/matchers/access_matchers_for_controller.rb
new file mode 100644
index 00000000000..fb43f51c70c
--- /dev/null
+++ b/spec/support/matchers/access_matchers_for_controller.rb
@@ -0,0 +1,84 @@
+# AccessMatchersForController
+#
+# For testing authorize_xxx in controller.
+module AccessMatchersForController
+ extend RSpec::Matchers::DSL
+ include Warden::Test::Helpers
+
+ EXPECTED_STATUS_CODE_ALLOWED = [200, 201, 302].freeze
+ EXPECTED_STATUS_CODE_DENIED = [401, 404].freeze
+
+ def emulate_user(role, membership = nil)
+ case role
+ when :admin
+ user = create(:admin)
+ sign_in(user)
+ when :user
+ user = create(:user)
+ sign_in(user)
+ when :external
+ user = create(:user, external: true)
+ sign_in(user)
+ when :visitor
+ user = nil
+ when User
+ user = role
+ sign_in(user)
+ when *Gitlab::Access.sym_options_with_owner.keys # owner, master, developer, reporter, guest
+ raise ArgumentError, "cannot emulate #{role} without membership parent" unless membership
+
+ user = create_user_by_membership(role, membership)
+ sign_in(user)
+ else
+ raise ArgumentError, "cannot emulate user #{role}"
+ end
+
+ user
+ end
+
+ def create_user_by_membership(role, membership)
+ if role == :owner && membership.owner
+ user = membership.owner
+ else
+ user = create(:user)
+ membership.public_send(:"add_#{role}", user)
+ end
+ user
+ end
+
+ def description_for(role, type, expected, result)
+ "be #{type} for #{role}. Expected: #{expected.join(',')} Got: #{result}"
+ end
+
+ matcher :be_allowed_for do |role|
+ match do |action|
+ emulate_user(role, @membership)
+ action.call
+
+ EXPECTED_STATUS_CODE_ALLOWED.include?(response.status)
+ end
+
+ chain :of do |membership|
+ @membership = membership
+ end
+
+ description { description_for(role, 'allowed', EXPECTED_STATUS_CODE_ALLOWED, response.status) }
+ supports_block_expectations
+ end
+
+ matcher :be_denied_for do |role|
+ match do |action|
+ emulate_user(role, @membership)
+ action.call
+
+ EXPECTED_STATUS_CODE_DENIED.include?(response.status)
+ end
+
+ chain :of do |membership|
+ @membership = membership
+ end
+
+ description { description_for(role, 'denied', EXPECTED_STATUS_CODE_DENIED, response.status) }
+ supports_block_expectations
+ end
+end
diff --git a/spec/support/matchers/be_utf8.rb b/spec/support/matchers/be_utf8.rb
new file mode 100644
index 00000000000..ea806352422
--- /dev/null
+++ b/spec/support/matchers/be_utf8.rb
@@ -0,0 +1,9 @@
+RSpec::Matchers.define :be_utf8 do |_|
+ match do |actual|
+ actual.is_a?(String) && actual.encoding == Encoding.find('UTF-8')
+ end
+
+ description do
+ "be a String with encoding UTF-8"
+ end
+end
diff --git a/spec/support/prepare-gitlab-git-test-for-commit b/spec/support/prepare-gitlab-git-test-for-commit
new file mode 100755
index 00000000000..3047786a599
--- /dev/null
+++ b/spec/support/prepare-gitlab-git-test-for-commit
@@ -0,0 +1,17 @@
+#!/usr/bin/env ruby
+
+abort unless [
+ system('spec/support/generate-seed-repo-rb', out: 'spec/support/seed_repo.rb'),
+ system('spec/support/unpack-gitlab-git-test')
+].all?
+
+exit if ARGV.first != '--check-for-changes'
+
+git_status = IO.popen(%w[git status --porcelain], &:read)
+abort unless $?.success?
+
+puts git_status
+
+if git_status.lines.grep(%r{^.. spec/support/gitlab-git-test.git}).any?
+ abort "error: detected changes in gitlab-git-test.git"
+end
diff --git a/spec/support/seed_helper.rb b/spec/support/seed_helper.rb
index 47b5f556e66..8731847592b 100644
--- a/spec/support/seed_helper.rb
+++ b/spec/support/seed_helper.rb
@@ -9,7 +9,7 @@ TEST_MUTABLE_REPO_PATH = 'mutable-repo.git'.freeze
TEST_BROKEN_REPO_PATH = 'broken-repo.git'.freeze
module SeedHelper
- GITLAB_GIT_TEST_REPO_URL = ENV.fetch('GITLAB_GIT_TEST_REPO_URL', 'https://gitlab.com/gitlab-org/gitlab-git-test.git').freeze
+ GITLAB_GIT_TEST_REPO_URL = File.expand_path('../gitlab-git-test.git', __FILE__).freeze
def ensure_seeds
if File.exist?(SEED_STORAGE_PATH)
diff --git a/spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb b/spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb
new file mode 100644
index 00000000000..96c821b26f7
--- /dev/null
+++ b/spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb
@@ -0,0 +1,9 @@
+shared_examples 'issue sidebar stays collapsed on mobile' do
+ before do
+ resize_screen_xs
+ end
+
+ it 'keeps the sidebar collapsed' do
+ expect(page).not_to have_css('.right-sidebar.right-sidebar-collapsed')
+ end
+end
diff --git a/spec/support/protected_branches/access_control_ce_shared_examples.rb b/spec/support/shared_examples/features/protected_branches_access_control_ce.rb
index 287d6bb13c3..b6341127a76 100644
--- a/spec/support/protected_branches/access_control_ce_shared_examples.rb
+++ b/spec/support/shared_examples/features/protected_branches_access_control_ce.rb
@@ -1,4 +1,4 @@
-RSpec.shared_examples "protected branches > access control > CE" do
+shared_examples "protected branches > access control > CE" do
ProtectedBranch::PushAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
it "allows creating protected branches that #{access_type_name} can push to" do
visit namespace_project_protected_branches_path(project.namespace, project)
diff --git a/spec/support/stub_env.rb b/spec/support/stub_env.rb
index 2999bcd9fb1..b8928867174 100644
--- a/spec/support/stub_env.rb
+++ b/spec/support/stub_env.rb
@@ -1,15 +1,33 @@
+# Inspired by https://github.com/ljkbennett/stub_env/blob/master/lib/stub_env/helpers.rb
module StubENV
- def stub_env(key, value)
- allow(ENV).to receive(:[]).and_call_original unless @env_already_stubbed
- @env_already_stubbed ||= true
+ def stub_env(key_or_hash, value = nil)
+ init_stub unless env_stubbed?
+ if key_or_hash.is_a? Hash
+ key_or_hash.each { |k, v| add_stubbed_value(k, v) }
+ else
+ add_stubbed_value key_or_hash, value
+ end
+ end
+
+ private
+
+ STUBBED_KEY = '__STUBBED__'.freeze
+
+ def add_stubbed_value(key, value)
allow(ENV).to receive(:[]).with(key).and_return(value)
+ allow(ENV).to receive(:fetch).with(key).and_return(value)
+ allow(ENV).to receive(:fetch).with(key, anything()) do |_, default_val|
+ value || default_val
+ end
+ end
+
+ def env_stubbed?
+ ENV[STUBBED_KEY]
end
-end
-# It's possible that the state of the class variables are not reset across
-# test runs.
-RSpec.configure do |config|
- config.after(:each) do
- @env_already_stubbed = nil
+ def init_stub
+ allow(ENV).to receive(:[]).and_call_original
+ allow(ENV).to receive(:fetch).and_call_original
+ add_stubbed_value(STUBBED_KEY, true)
end
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index 1c5267c290b..32546abcad4 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -120,18 +120,21 @@ module TestEnv
end
def setup_gitlab_shell
- unless File.directory?(Gitlab.config.gitlab_shell.path)
- unless system('rake', 'gitlab:shell:install')
- raise 'Can`t clone gitlab-shell'
- end
+ shell_needs_update = component_needs_update?(Gitlab.config.gitlab_shell.path,
+ Gitlab::Shell.version_required)
+
+ unless !shell_needs_update || system('rake', 'gitlab:shell:install')
+ raise 'Can`t clone gitlab-shell'
end
end
def setup_gitaly
socket_path = Gitlab::GitalyClient.address('default').sub(/\Aunix:/, '')
gitaly_dir = File.dirname(socket_path)
+ gitaly_needs_update = component_needs_update?(gitaly_dir,
+ Gitlab::GitalyClient.expected_server_version)
- unless !gitaly_needs_update?(gitaly_dir) || system('rake', "gitlab:gitaly:install[#{gitaly_dir}]")
+ unless !gitaly_needs_update || system('rake', "gitlab:gitaly:install[#{gitaly_dir}]")
raise "Can't clone gitaly"
end
@@ -261,13 +264,13 @@ module TestEnv
end
end
- def gitaly_needs_update?(gitaly_dir)
- gitaly_version = File.read(File.join(gitaly_dir, 'VERSION')).strip
+ def component_needs_update?(component_folder, expected_version)
+ version = File.read(File.join(component_folder, 'VERSION')).strip
# Notice that this will always yield true when using branch versions
# (`=branch_name`), but that actually makes sure the server is always based
# on the latest branch revision.
- gitaly_version != Gitlab::GitalyClient.expected_server_version
+ version != expected_version
rescue Errno::ENOENT
true
end
diff --git a/spec/support/unpack-gitlab-git-test b/spec/support/unpack-gitlab-git-test
new file mode 100755
index 00000000000..d5b4912457d
--- /dev/null
+++ b/spec/support/unpack-gitlab-git-test
@@ -0,0 +1,38 @@
+#!/usr/bin/env ruby
+require 'fileutils'
+
+REPO = 'spec/support/gitlab-git-test.git'.freeze
+PACK_DIR = REPO + '/objects/pack'
+GIT = %W[git --git-dir=#{REPO}].freeze
+BASE_PACK = 'pack-691247af2a6acb0b63b73ac0cb90540e93614043'.freeze
+
+def main
+ unpack
+ # We want to store the refs in a packed-refs file because if we don't
+ # they can get mangled by filesystems.
+ abort unless system(*GIT, *%w[pack-refs --all])
+ abort unless system(*GIT, 'fsck')
+end
+
+# We don't want contributors to commit new pack files because those
+# create unnecessary churn.
+def unpack
+ pack_files = Dir[File.join(PACK_DIR, '*')].reject do |pack|
+ pack.start_with?(File.join(PACK_DIR, BASE_PACK))
+ end
+ return if pack_files.empty?
+
+ pack_files.each do |pack|
+ unless pack.end_with?('.pack')
+ FileUtils.rm(pack)
+ next
+ end
+
+ File.open(pack, 'rb') do |open_pack|
+ File.unlink(pack)
+ abort unless system(*GIT, 'unpack-objects', in: open_pack)
+ end
+ end
+end
+
+main
diff --git a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
index 4052dbf8df3..3e17fe2104b 100644
--- a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'projects/merge_requests/show/_commits.html.haml' do
+describe 'projects/merge_requests/_commits.html.haml' do
include Devise::Test::ControllerHelpers
let(:user) { create(:user) }
diff --git a/spec/views/projects/merge_requests/_new_submit.html.haml_spec.rb b/spec/views/projects/merge_requests/creations/_new_submit.html.haml_spec.rb
index 4f698a34ab5..1e9bdf9108f 100644
--- a/spec/views/projects/merge_requests/_new_submit.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/creations/_new_submit.html.haml_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'projects/merge_requests/_new_submit.html.haml', :view do
+describe 'projects/merge_requests/creations/_new_submit.html.haml', :view do
let(:merge_request) { create(:merge_request) }
let!(:pipeline) { create(:ci_empty_pipeline) }
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index cc9bc29c6cc..a8f4bb72acf 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -4,7 +4,7 @@ describe PostReceive do
let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) }
- let(:project_identifier) { "project-#{project.id}" }
+ let(:gl_repository) { "project-#{project.id}" }
let(:key) { create(:key, user: project.owner) }
let(:key_id) { key.shell_id }
@@ -19,22 +19,14 @@ describe PostReceive do
end
context 'with a non-existing project' do
- let(:project_identifier) { "project-123456789" }
+ let(:gl_repository) { "project-123456789" }
let(:error_message) do
- "Triggered hook for non-existing project with identifier \"#{project_identifier}\""
+ "Triggered hook for non-existing project with gl_repository \"#{gl_repository}\""
end
it "returns false and logs an error" do
expect(Gitlab::GitLogger).to receive(:error).with("POST-RECEIVE: #{error_message}")
- expect(described_class.new.perform(project_identifier, key_id, base64_changes)).to be(false)
- end
- end
-
- context "with an absolute path as the project identifier" do
- it "searches the project by full path" do
- expect(Project).to receive(:find_by_full_path).with(project.full_path, follow_redirects: true).and_call_original
-
- described_class.new.perform(pwd(project), key_id, base64_changes)
+ expect(described_class.new.perform(gl_repository, key_id, base64_changes)).to be(false)
end
end
@@ -49,7 +41,7 @@ describe PostReceive do
it "calls GitTagPushService" do
expect_any_instance_of(GitPushService).to receive(:execute).and_return(true)
expect_any_instance_of(GitTagPushService).not_to receive(:execute)
- described_class.new.perform(project_identifier, key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
@@ -59,7 +51,7 @@ describe PostReceive do
it "calls GitTagPushService" do
expect_any_instance_of(GitPushService).not_to receive(:execute)
expect_any_instance_of(GitTagPushService).to receive(:execute).and_return(true)
- described_class.new.perform(project_identifier, key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
@@ -69,12 +61,12 @@ describe PostReceive do
it "does not call any of the services" do
expect_any_instance_of(GitPushService).not_to receive(:execute)
expect_any_instance_of(GitTagPushService).not_to receive(:execute)
- described_class.new.perform(project_identifier, key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
context "gitlab-ci.yml" do
- subject { described_class.new.perform(project_identifier, key_id, base64_changes) }
+ subject { described_class.new.perform(gl_repository, key_id, base64_changes) }
context "creates a Ci::Pipeline for every change" do
before do
@@ -111,7 +103,7 @@ describe PostReceive do
it 'calls SystemHooksService' do
expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with(fake_hook_data, :repository_update_hooks).and_return(true)
- described_class.new.perform(project_identifier, key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
end
@@ -119,7 +111,7 @@ describe PostReceive do
context "webhook" do
it "fetches the correct project" do
expect(Project).to receive(:find_by).with(id: project.id.to_s)
- described_class.new.perform(project_identifier, key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
it "does not run if the author is not in the project" do
@@ -129,7 +121,7 @@ describe PostReceive do
expect(project).not_to receive(:execute_hooks)
- expect(described_class.new.perform(project_identifier, key_id, base64_changes)).to be_falsey
+ expect(described_class.new.perform(gl_repository, key_id, base64_changes)).to be_falsey
end
it "asks the project to trigger all hooks" do
@@ -137,18 +129,14 @@ describe PostReceive do
expect(project).to receive(:execute_hooks).twice
expect(project).to receive(:execute_services).twice
- described_class.new.perform(project_identifier, key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
it "enqueues a UpdateMergeRequestsWorker job" do
allow(Project).to receive(:find_by).and_return(project)
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(project.id, project.owner.id, any_args)
- described_class.new.perform(project_identifier, key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
-
- def pwd(project)
- File.join(Gitlab.config.repositories.storages.default['path'], project.path_with_namespace)
- end
end