diff options
author | Phil Hughes <me@iamphill.com> | 2018-05-25 13:49:50 +0100 |
---|---|---|
committer | Phil Hughes <me@iamphill.com> | 2018-05-25 13:49:50 +0100 |
commit | 0c0a779f4c97622ad2359a94d1c870b75a8b0e6d (patch) | |
tree | b87bc046f6f59d29a47a671c31c325a4fdc0c0e1 | |
parent | 9c464e5eae6e4eb268bbdae6ad542d1de698b623 (diff) | |
parent | 4e257213343673a71a81332e5f5703bfb3043a4f (diff) | |
download | gitlab-ce-0c0a779f4c97622ad2359a94d1c870b75a8b0e6d.tar.gz |
Merge branch 'master' into ide-jobs-list-components
64 files changed, 831 insertions, 346 deletions
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js index 06fdc8fe65b..dbdc4de7986 100644 --- a/app/assets/javascripts/api.js +++ b/app/assets/javascripts/api.js @@ -21,6 +21,7 @@ const Api = { issuableTemplatePath: '/:namespace_path/:project_path/templates/:type/:key', usersPath: '/api/:version/users.json', commitPath: '/api/:version/projects/:id/repository/commits', + commitPipelinesPath: '/:project_id/commit/:sha/pipelines', branchSinglePath: '/api/:version/projects/:id/repository/branches/:branch', createBranchPath: '/api/:version/projects/:id/repository/branches', pipelinesPath: '/api/:version/projects/:id/pipelines', @@ -166,6 +167,19 @@ const Api = { }); }, + commitPipelines(projectId, sha) { + const encodedProjectId = projectId + .split('/') + .map(fragment => encodeURIComponent(fragment)) + .join('/'); + + const url = Api.buildUrl(Api.commitPipelinesPath) + .replace(':project_id', encodedProjectId) + .replace(':sha', encodeURIComponent(sha)); + + return axios.get(url); + }, + branchSingle(id, branch) { const url = Api.buildUrl(Api.branchSinglePath) .replace(':id', encodeURIComponent(id)) diff --git a/app/assets/javascripts/ide/components/ide.vue b/app/assets/javascripts/ide/components/ide.vue index 318e7aa5716..c8bbdcced1a 100644 --- a/app/assets/javascripts/ide/components/ide.vue +++ b/app/assets/javascripts/ide/components/ide.vue @@ -129,8 +129,6 @@ export default { v-if="currentProjectId" /> </div> - <ide-status-bar - :file="activeFile" - /> + <ide-status-bar :file="activeFile"/> </article> </template> diff --git a/app/assets/javascripts/ide/components/ide_status_bar.vue b/app/assets/javascripts/ide/components/ide_status_bar.vue index 70c6d53c3ab..6f60cfbf184 100644 --- a/app/assets/javascripts/ide/components/ide_status_bar.vue +++ b/app/assets/javascripts/ide/components/ide_status_bar.vue @@ -1,14 +1,16 @@ <script> -import { mapGetters } from 'vuex'; +import { mapActions, mapState, mapGetters } from 'vuex'; import icon from '~/vue_shared/components/icon.vue'; import tooltip from '~/vue_shared/directives/tooltip'; import timeAgoMixin from '~/vue_shared/mixins/timeago'; +import CiIcon from '../../vue_shared/components/ci_icon.vue'; import userAvatarImage from '../../vue_shared/components/user_avatar/user_avatar_image.vue'; export default { components: { icon, userAvatarImage, + CiIcon, }, directives: { tooltip, @@ -27,8 +29,16 @@ export default { }; }, computed: { + ...mapState(['currentBranchId', 'currentProjectId']), ...mapGetters(['currentProject', 'lastCommit']), }, + watch: { + lastCommit() { + if (!this.isPollingInitialized) { + this.initPipelinePolling(); + } + }, + }, mounted() { this.startTimer(); }, @@ -36,13 +46,21 @@ export default { if (this.intervalId) { clearInterval(this.intervalId); } + if (this.isPollingInitialized) { + this.stopPipelinePolling(); + } }, methods: { + ...mapActions(['pipelinePoll', 'stopPipelinePolling']), startTimer() { this.intervalId = setInterval(() => { this.commitAgeUpdate(); }, 1000); }, + initPipelinePolling() { + this.pipelinePoll(); + this.isPollingInitialized = true; + }, commitAgeUpdate() { if (this.lastCommit) { this.lastCommitFormatedAge = this.timeFormated(this.lastCommit.committed_date); @@ -61,6 +79,23 @@ export default { class="ide-status-branch" v-if="lastCommit && lastCommitFormatedAge" > + <span + class="ide-status-pipeline" + v-if="lastCommit.pipeline && lastCommit.pipeline.details" + > + <ci-icon + :status="lastCommit.pipeline.details.status" + v-tooltip + :title="lastCommit.pipeline.details.status.text" + /> + Pipeline + <a + class="monospace" + :href="lastCommit.pipeline.details.status.details_path">#{{ lastCommit.pipeline.id }}</a> + {{ lastCommit.pipeline.details.status.text }} + for + </span> + <icon name="commit" /> diff --git a/app/assets/javascripts/ide/services/index.js b/app/assets/javascripts/ide/services/index.js index a12e637616a..e8b51f2b516 100644 --- a/app/assets/javascripts/ide/services/index.js +++ b/app/assets/javascripts/ide/services/index.js @@ -75,4 +75,8 @@ export default { }, }); }, + lastCommitPipelines({ getters }) { + const commitSha = getters.lastCommit.id; + return Api.commitPipelines(getters.currentProject.path_with_namespace, commitSha); + }, }; diff --git a/app/assets/javascripts/ide/stores/actions/project.js b/app/assets/javascripts/ide/stores/actions/project.js index eff9bc03651..cece9154c82 100644 --- a/app/assets/javascripts/ide/stores/actions/project.js +++ b/app/assets/javascripts/ide/stores/actions/project.js @@ -1,6 +1,11 @@ +import Visibility from 'visibilityjs'; import flash from '~/flash'; +import { __ } from '~/locale'; import service from '../../services'; import * as types from '../mutation_types'; +import Poll from '../../../lib/utils/poll'; + +let eTagPoll; export const getProjectData = ( { commit, state, dispatch }, @@ -21,7 +26,7 @@ export const getProjectData = ( }) .catch(() => { flash( - 'Error loading project data. Please try again.', + __('Error loading project data. Please try again.'), 'alert', document, null, @@ -59,7 +64,7 @@ export const getBranchData = ( }) .catch(() => { flash( - 'Error loading branch data. Please try again.', + __('Error loading branch data. Please try again.'), 'alert', document, null, @@ -73,25 +78,74 @@ export const getBranchData = ( } }); -export const refreshLastCommitData = ( - { commit, state, dispatch }, - { projectId, branchId } = {}, -) => service - .getBranchData(projectId, branchId) - .then(({ data }) => { - commit(types.SET_BRANCH_COMMIT, { - projectId, - branchId, - commit: data.commit, +export const refreshLastCommitData = ({ commit, state, dispatch }, { projectId, branchId } = {}) => + service + .getBranchData(projectId, branchId) + .then(({ data }) => { + commit(types.SET_BRANCH_COMMIT, { + projectId, + branchId, + commit: data.commit, + }); + }) + .catch(() => { + flash(__('Error loading last commit.'), 'alert', document, null, false, true); }); - }) - .catch(() => { - flash( - 'Error loading last commit.', - 'alert', - document, - null, - false, - true, + +export const pollSuccessCallBack = ({ commit, state, dispatch }, { data }) => { + if (data.pipelines && data.pipelines.length) { + const lastCommitHash = + state.projects[state.currentProjectId].branches[state.currentBranchId].commit.id; + const lastCommitPipeline = data.pipelines.find( + pipeline => pipeline.commit.id === lastCommitHash, ); + commit(types.SET_LAST_COMMIT_PIPELINE, { + projectId: state.currentProjectId, + branchId: state.currentBranchId, + pipeline: lastCommitPipeline || {}, + }); + } + + return data; +}; + +export const pipelinePoll = ({ getters, dispatch }) => { + eTagPoll = new Poll({ + resource: service, + method: 'lastCommitPipelines', + data: { + getters, + }, + successCallback: ({ data }) => dispatch('pollSuccessCallBack', { data }), + errorCallback: () => { + flash( + __('Something went wrong while fetching the latest pipeline status.'), + 'alert', + document, + null, + false, + true, + ); + }, }); + + if (!Visibility.hidden()) { + eTagPoll.makeRequest(); + } + + Visibility.change(() => { + if (!Visibility.hidden()) { + eTagPoll.restart(); + } else { + eTagPoll.stop(); + } + }); +}; + +export const stopPipelinePolling = () => { + eTagPoll.stop(); +}; + +export const restartPipelinePolling = () => { + eTagPoll.restart(); +}; diff --git a/app/assets/javascripts/ide/stores/mutation_types.js b/app/assets/javascripts/ide/stores/mutation_types.js index 27e91573510..4e8340b3e9e 100644 --- a/app/assets/javascripts/ide/stores/mutation_types.js +++ b/app/assets/javascripts/ide/stores/mutation_types.js @@ -23,6 +23,7 @@ export const SET_BRANCH = 'SET_BRANCH'; export const SET_BRANCH_COMMIT = 'SET_BRANCH_COMMIT'; export const SET_BRANCH_WORKING_REFERENCE = 'SET_BRANCH_WORKING_REFERENCE'; export const TOGGLE_BRANCH_OPEN = 'TOGGLE_BRANCH_OPEN'; +export const SET_LAST_COMMIT_PIPELINE = 'SET_LAST_COMMIT_PIPELINE'; // Tree mutation types export const SET_DIRECTORY_DATA = 'SET_DIRECTORY_DATA'; diff --git a/app/assets/javascripts/ide/stores/mutations/branch.js b/app/assets/javascripts/ide/stores/mutations/branch.js index e09f88878f4..f17ec4da308 100644 --- a/app/assets/javascripts/ide/stores/mutations/branch.js +++ b/app/assets/javascripts/ide/stores/mutations/branch.js @@ -14,6 +14,10 @@ export default { treeId: `${projectPath}/${branchName}`, active: true, workingReference: '', + commit: { + ...branch.commit, + pipeline: {}, + }, }, }, }); @@ -28,4 +32,9 @@ export default { commit, }); }, + [types.SET_LAST_COMMIT_PIPELINE](state, { projectId, branchId, pipeline }) { + Object.assign(state.projects[projectId].branches[branchId].commit, { + pipeline, + }); + }, }; diff --git a/app/assets/javascripts/pipelines/components/empty_state.vue b/app/assets/javascripts/pipelines/components/empty_state.vue index 10ac8c08bed..6083421073e 100644 --- a/app/assets/javascripts/pipelines/components/empty_state.vue +++ b/app/assets/javascripts/pipelines/components/empty_state.vue @@ -34,7 +34,7 @@ </h4> <p> - {{ s__(`Pipelines|Continous Integration can help + {{ s__(`Pipelines|Continuous Integration can help catch bugs by running your tests automatically, while Continuous Deployment can help you deliver code to your product environment.`) }} diff --git a/app/assets/javascripts/pipelines/components/pipelines_table.vue b/app/assets/javascripts/pipelines/components/pipelines_table.vue index 41986b827cd..4318abe97e0 100644 --- a/app/assets/javascripts/pipelines/components/pipelines_table.vue +++ b/app/assets/javascripts/pipelines/components/pipelines_table.vue @@ -37,6 +37,7 @@ return { pipelineId: '', endpoint: '', + cancelingPipeline: null, }; }, computed: { @@ -64,6 +65,7 @@ }, onSubmit() { eventHub.$emit('postAction', this.endpoint); + this.cancelingPipeline = this.pipelineId; }, }, }; @@ -106,6 +108,7 @@ :update-graph-dropdown="updateGraphDropdown" :auto-devops-help-path="autoDevopsHelpPath" :view-type="viewType" + :canceling-pipeline="cancelingPipeline" /> <modal diff --git a/app/assets/javascripts/pipelines/components/pipelines_table_row.vue b/app/assets/javascripts/pipelines/components/pipelines_table_row.vue index 0f671ceea21..b99246a92d0 100644 --- a/app/assets/javascripts/pipelines/components/pipelines_table_row.vue +++ b/app/assets/javascripts/pipelines/components/pipelines_table_row.vue @@ -46,12 +46,16 @@ type: String, required: true, }, + cancelingPipeline: { + type: String, + required: false, + default: null, + }, }, pipelinesTable: PIPELINES_TABLE, data() { return { isRetrying: false, - isCancelling: false, }; }, computed: { @@ -227,12 +231,14 @@ isChildView() { return this.viewType === 'child'; }, + + isCancelling() { + return this.cancelingPipeline === this.pipeline.id; + }, }, methods: { handleCancelClick() { - this.isCancelling = true; - eventHub.$emit('openConfirmationModal', { pipelineId: this.pipeline.id, endpoint: this.pipeline.cancel_path, diff --git a/app/assets/stylesheets/framework/variables.scss b/app/assets/stylesheets/framework/variables.scss index b8ea8ee14c5..86f82ec14ce 100644 --- a/app/assets/stylesheets/framework/variables.scss +++ b/app/assets/stylesheets/framework/variables.scss @@ -230,7 +230,7 @@ $row-hover: $blue-50; $row-hover-border: $blue-200; $progress-color: #c0392b; $header-height: 40px; -$ide-statusbar-height: 27px; +$ide-statusbar-height: 25px; $fixed-layout-width: 1280px; $limited-layout-width: 990px; $limited-layout-width-sm: 790px; diff --git a/app/assets/stylesheets/pages/repo.scss b/app/assets/stylesheets/pages/repo.scss index a6bba7eb9e5..4e8b5d3b1b7 100644 --- a/app/assets/stylesheets/pages/repo.scss +++ b/app/assets/stylesheets/pages/repo.scss @@ -22,7 +22,6 @@ height: calc(100vh - #{$header-height}); margin-top: 0; border-top: 1px solid $white-dark; - border-bottom: 1px solid $white-dark; padding-bottom: $ide-statusbar-height; &.is-collapsed { @@ -380,7 +379,7 @@ .ide-status-bar { border-top: 1px solid $white-dark; - padding: $gl-bar-padding $gl-padding; + padding: 2px $gl-padding-8 0; background: $white-light; display: flex; justify-content: space-between; @@ -391,12 +390,19 @@ left: 0; width: 100%; + font-size: 12px; + line-height: 22px; + + * { + font-size: inherit; + } + > div + div { padding-left: $gl-padding; } svg { - vertical-align: middle; + vertical-align: sub; } } diff --git a/app/controllers/omniauth_callbacks_controller.rb b/app/controllers/omniauth_callbacks_controller.rb index ed89bed029b..27fd5f7ba37 100644 --- a/app/controllers/omniauth_callbacks_controller.rb +++ b/app/controllers/omniauth_callbacks_controller.rb @@ -26,11 +26,11 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController # Extend the standard message generation to accept our custom exception def failure_message - exception = env["omniauth.error"] + exception = request.env["omniauth.error"] error = exception.error_reason if exception.respond_to?(:error_reason) error ||= exception.error if exception.respond_to?(:error) error ||= exception.message if exception.respond_to?(:message) - error ||= env["omniauth.error.type"].to_s + error ||= request.env["omniauth.error.type"].to_s error.to_s.humanize if error end diff --git a/app/controllers/projects/clusters/applications_controller.rb b/app/controllers/projects/clusters/applications_controller.rb index 90c7fa62216..35885543622 100644 --- a/app/controllers/projects/clusters/applications_controller.rb +++ b/app/controllers/projects/clusters/applications_controller.rb @@ -5,9 +5,10 @@ class Projects::Clusters::ApplicationsController < Projects::ApplicationControll before_action :authorize_create_cluster!, only: [:create] def create - Clusters::Applications::ScheduleInstallationService.new(project, current_user, - application_class: @application_class, - cluster: @cluster).execute + application = @application_class.find_or_create_by!(cluster: @cluster) + + Clusters::Applications::ScheduleInstallationService.new(project, current_user).execute(application) + head :no_content rescue StandardError head :bad_request diff --git a/app/helpers/commits_helper.rb b/app/helpers/commits_helper.rb index e594a1d0ba3..f5a7871a6e0 100644 --- a/app/helpers/commits_helper.rb +++ b/app/helpers/commits_helper.rb @@ -63,7 +63,7 @@ module CommitsHelper # Returns a link formatted as a commit branch link def commit_branch_link(url, text) link_to(url, class: 'label label-gray ref-name branch-link') do - sprite_icon('fork', size: 12, css_class: 'fork-svg') + "#{text}" + sprite_icon('branch', size: 12, css_class: 'fork-svg') + "#{text}" end end @@ -77,7 +77,7 @@ module CommitsHelper # Returns a link formatted as a commit tag link def commit_tag_link(url, text) link_to(url, class: 'label label-gray ref-name') do - icon('tag', class: 'append-right-5') + "#{text}" + sprite_icon('tag', size: 12, css_class: 'append-right-5 vertical-align-middle') + "#{text}" end end diff --git a/app/models/project.rb b/app/models/project.rb index 0e727664d39..0fe9f8880b4 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -1431,8 +1431,8 @@ class Project < ActiveRecord::Base self.runners_token && ActiveSupport::SecurityUtils.variable_size_secure_compare(token, self.runners_token) end - def open_issues_count - Projects::OpenIssuesCountService.new(self).count + def open_issues_count(current_user = nil) + Projects::OpenIssuesCountService.new(self, current_user).count end def open_merge_requests_count diff --git a/app/services/clusters/applications/schedule_installation_service.rb b/app/services/clusters/applications/schedule_installation_service.rb index eb8caa68ef7..9c5461e85e1 100644 --- a/app/services/clusters/applications/schedule_installation_service.rb +++ b/app/services/clusters/applications/schedule_installation_service.rb @@ -1,21 +1,10 @@ module Clusters module Applications class ScheduleInstallationService < ::BaseService - def execute - application_class.find_or_create_by!(cluster: cluster).try do |application| - application.make_scheduled! - ClusterInstallAppWorker.perform_async(application.name, application.id) - end - end - - private - - def application_class - params[:application_class] - end + def execute(application) + application.make_scheduled! - def cluster - params[:cluster] + ClusterInstallAppWorker.perform_async(application.name, application.id) end end end diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb index 1fb1796b56c..0127d781686 100644 --- a/app/services/merge_requests/refresh_service.rb +++ b/app/services/merge_requests/refresh_service.rb @@ -3,6 +3,12 @@ module MergeRequests def execute(oldrev, newrev, ref) return true unless Gitlab::Git.branch_ref?(ref) + do_execute(oldrev, newrev, ref) + end + + private + + def do_execute(oldrev, newrev, ref) @oldrev, @newrev = oldrev, newrev @branch_name = Gitlab::Git.ref_name(ref) @@ -28,8 +34,6 @@ module MergeRequests true end - private - def close_upon_missing_source_branch_ref # MergeRequest#reload_diff ignores not opened MRs. This means it won't # create an `empty` diff for `closed` MRs without a source branch, keeping diff --git a/app/services/projects/open_issues_count_service.rb b/app/services/projects/open_issues_count_service.rb index a975a06a05c..0a004677417 100644 --- a/app/services/projects/open_issues_count_service.rb +++ b/app/services/projects/open_issues_count_service.rb @@ -2,14 +2,42 @@ module Projects # Service class for counting and caching the number of open issues of a # project. class OpenIssuesCountService < Projects::CountService + include Gitlab::Utils::StrongMemoize + + def initialize(project, user = nil) + @user = user + + super(project) + end + def cache_key_name - 'open_issues_count' + public_only? ? 'public_open_issues_count' : 'total_open_issues_count' + end + + def public_only? + !user_is_at_least_reporter? + end + + def relation_for_count + self.class.query(@project, public_only: public_only?) + end + + def user_is_at_least_reporter? + strong_memoize(:user_is_at_least_reporter) do + @user && @project.team.member?(@user, Gitlab::Access::REPORTER) + end end - def self.query(project_ids) - # We don't include confidential issues in this number since this would - # expose the number of confidential issues to non project members. - Issue.opened.public_only.where(project: project_ids) + # We only show total issues count for reporters + # which are allowed to view confidential issues + # This will still show a discrepancy on issues number but should be less than before. + # Check https://gitlab.com/gitlab-org/gitlab-ce/issues/38418 description. + def self.query(projects, public_only: true) + if public_only + Issue.opened.public_only.where(project: projects) + else + Issue.opened.where(project: projects) + end end end end diff --git a/app/views/layouts/nav/sidebar/_project.html.haml b/app/views/layouts/nav/sidebar/_project.html.haml index f3af15d771d..99e4113bd5a 100644 --- a/app/views/layouts/nav/sidebar/_project.html.haml +++ b/app/views/layouts/nav/sidebar/_project.html.haml @@ -89,7 +89,7 @@ = _('Issues') - if @project.issues_enabled? %span.badge.count.issue_counter - = number_with_delimiter(@project.open_issues_count) + = number_with_delimiter(@project.open_issues_count(current_user)) %ul.sidebar-sub-level-items = nav_link(controller: :issues, html_options: { class: "fly-out-top-item" } ) do @@ -98,7 +98,7 @@ = _('Issues') - if @project.issues_enabled? %span.badge.count.issue_counter.fly-out-badge - = number_with_delimiter(@project.open_issues_count) + = number_with_delimiter(@project.open_issues_count(current_user)) %li.divider.fly-out-top-item = nav_link(controller: :issues, action: :index) do = link_to project_issues_path(@project), title: 'Issues' do diff --git a/app/views/projects/commit/branches.html.haml b/app/views/projects/commit/branches.html.haml index 8611129b356..a91e31afc2b 100644 --- a/app/views/projects/commit/branches.html.haml +++ b/app/views/projects/commit/branches.html.haml @@ -6,7 +6,8 @@ - if @branches.any? || @tags.any? || @tags_limit_exceeded %span - = link_to "…", "#", class: "js-details-expand label label-gray" + = link_to "#", class: "js-details-expand label label-gray ref-name" do + = sprite_icon('ellipsis_h', size: 12, css_class: 'vertical-align-middle') %span.js-details-content.hide = commit_branches_links(@project, @branches) - if @tags_limit_exceeded diff --git a/changelogs/unreleased/44579-ide-add-pipeline-to-status-bar.yml b/changelogs/unreleased/44579-ide-add-pipeline-to-status-bar.yml new file mode 100644 index 00000000000..21e7c795815 --- /dev/null +++ b/changelogs/unreleased/44579-ide-add-pipeline-to-status-bar.yml @@ -0,0 +1,5 @@ +--- +title: Add pipeline status to the status bar of the Web IDE +merge_request: +author: +type: added diff --git a/changelogs/unreleased/commit-branch-tag-icon-update.yml b/changelogs/unreleased/commit-branch-tag-icon-update.yml new file mode 100644 index 00000000000..136b7cbf0f4 --- /dev/null +++ b/changelogs/unreleased/commit-branch-tag-icon-update.yml @@ -0,0 +1,5 @@ +--- +title: Updated icons for branch and tag names in commit details +merge_request: 18953 +author: Constance Okoghenun +type: changed diff --git a/changelogs/unreleased/issue_38418.yml b/changelogs/unreleased/issue_38418.yml new file mode 100644 index 00000000000..79452b27e4b --- /dev/null +++ b/changelogs/unreleased/issue_38418.yml @@ -0,0 +1,5 @@ +--- +title: Fix issue count on sidebar +merge_request: +author: +type: other diff --git a/changelogs/unreleased/migrate-restore-repo-to-gitaly.yml b/changelogs/unreleased/migrate-restore-repo-to-gitaly.yml new file mode 100644 index 00000000000..59f375de20e --- /dev/null +++ b/changelogs/unreleased/migrate-restore-repo-to-gitaly.yml @@ -0,0 +1,5 @@ +--- +title: Support restoring repositories into gitaly +merge_request: +author: +type: changed diff --git a/changelogs/unreleased/sh-fix-backup-specific-rake-task.yml b/changelogs/unreleased/sh-fix-backup-specific-rake-task.yml new file mode 100644 index 00000000000..71b121710ee --- /dev/null +++ b/changelogs/unreleased/sh-fix-backup-specific-rake-task.yml @@ -0,0 +1,5 @@ +--- +title: Fix backup creation and restore for specific Rake tasks +merge_request: +author: +type: fixed diff --git a/config/initializers/7_prometheus_metrics.rb b/config/initializers/7_prometheus_metrics.rb index eb7959e4da6..146c4b1e024 100644 --- a/config/initializers/7_prometheus_metrics.rb +++ b/config/initializers/7_prometheus_metrics.rb @@ -25,7 +25,7 @@ Sidekiq.configure_server do |config| end end -if Gitlab::Metrics.prometheus_metrics_enabled? +if !Rails.env.test? && Gitlab::Metrics.prometheus_metrics_enabled? unless Sidekiq.server? Gitlab::Metrics::Samplers::UnicornSampler.initialize_instance(Settings.monitoring.unicorn_sampler_interval).start end diff --git a/config/webpack.config.js b/config/webpack.config.js index 27050e7069d..d6ab32972fb 100644 --- a/config/webpack.config.js +++ b/config/webpack.config.js @@ -306,7 +306,10 @@ module.exports = { host: DEV_SERVER_HOST, port: DEV_SERVER_PORT, disableHostCheck: true, - headers: { 'Access-Control-Allow-Origin': '*' }, + headers: { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': '*', + }, stats: 'errors-only', hot: DEV_SERVER_LIVERELOAD, inline: DEV_SERVER_LIVERELOAD, diff --git a/doc/api/issues.md b/doc/api/issues.md index d0063e0b8a2..5613cb6d915 100644 --- a/doc/api/issues.md +++ b/doc/api/issues.md @@ -467,7 +467,7 @@ POST /projects/:id/issues | `description` | string | no | The description of an issue | | `confidential` | boolean | no | Set an issue to be confidential. Default is `false`. | | `assignee_ids` | Array[integer] | no | The ID of the users to assign issue | -| `milestone_id` | integer | no | The ID of a milestone to assign issue | +| `milestone_id` | integer | no | The global ID of a milestone to assign issue | | `labels` | string | no | Comma-separated label names for an issue | | `created_at` | string | no | Date time string, ISO 8601 formatted, e.g. `2016-03-11T03:45:40Z` (requires admin or project owner rights) | | `due_date` | string | no | Date time string in the format YEAR-MONTH-DAY, e.g. `2016-03-11` | @@ -548,7 +548,7 @@ PUT /projects/:id/issues/:issue_iid | `description` | string | no | The description of an issue | | `confidential` | boolean | no | Updates an issue to be confidential | | `assignee_ids` | Array[integer] | no | The ID of the user(s) to assign the issue to. Set to `0` or provide an empty value to unassign all assignees. | -| `milestone_id` | integer | no | The ID of a milestone to assign the issue to. Set to `0` or provide an empty value to unassign a milestone.| +| `milestone_id` | integer | no | The global ID of a milestone to assign the issue to. Set to `0` or provide an empty value to unassign a milestone.| | `labels` | string | no | Comma-separated label names for an issue. Set to an empty string to unassign all labels. | | `state_event` | string | no | The state event of an issue. Set `close` to close the issue and `reopen` to reopen it | | `updated_at` | string | no | Date time string, ISO 8601 formatted, e.g. `2016-03-11T03:45:40Z` (requires admin or project owner rights) | diff --git a/doc/api/merge_requests.md b/doc/api/merge_requests.md index cbd51c9870c..4e34831422a 100644 --- a/doc/api/merge_requests.md +++ b/doc/api/merge_requests.md @@ -539,7 +539,7 @@ POST /projects/:id/merge_requests | `description` | string | no | Description of MR | | `target_project_id` | integer | no | The target project (numeric id) | | `labels` | string | no | Labels for MR as a comma-separated list | -| `milestone_id` | integer | no | The ID of a milestone | +| `milestone_id` | integer | no | The global ID of a milestone | | `remove_source_branch` | boolean | no | Flag indicating if a merge request should remove the source branch when merging | | `allow_maintainer_to_push` | boolean | no | Whether or not a maintainer of the target project can push to the source branch | @@ -622,7 +622,7 @@ PUT /projects/:id/merge_requests/:merge_request_iid | `target_branch` | string | no | The target branch | | `title` | string | no | Title of MR | | `assignee_id` | integer | no | The ID of the user to assign the merge request to. Set to `0` or provide an empty value to unassign all assignees. | -| `milestone_id` | integer | no | The ID of a milestone to assign the merge request to. Set to `0` or provide an empty value to unassign a milestone.| +| `milestone_id` | integer | no | The global ID of a milestone to assign the merge request to. Set to `0` or provide an empty value to unassign a milestone.| | `labels` | string | no | Comma-separated label names for a merge request. Set to an empty string to unassign all labels. | | `description` | string | no | Description of MR | | `state_event` | string | no | New state (close/reopen) | diff --git a/doc/ci/examples/code_climate.md b/doc/ci/examples/code_climate.md index d1aa783cc9c..cc19e090964 100644 --- a/doc/ci/examples/code_climate.md +++ b/doc/ci/examples/code_climate.md @@ -5,10 +5,10 @@ GitLab CI and Docker. First, you need GitLab Runner with [docker-in-docker executor][dind]. -Once you set up the Runner, add a new job to `.gitlab-ci.yml`, called `codequality`: +Once you set up the Runner, add a new job to `.gitlab-ci.yml`, called `code_quality`: ```yaml -codequality: +code_quality: image: docker:stable variables: DOCKER_DRIVER: overlay2 @@ -23,20 +23,27 @@ codequality: --volume /var/run/docker.sock:/var/run/docker.sock "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code artifacts: - paths: [codeclimate.json] + paths: [gl-code-quality-report.json] ``` -The above example will create a `codequality` job in your CI/CD pipeline which +The above example will create a `code_quality` job in your CI/CD pipeline which will scan your source code for code quality issues. The report will be saved as an artifact that you can later download and analyze. TIP: **Tip:** Starting with [GitLab Starter][ee] 9.3, this information will be automatically extracted and shown right in the merge request widget. To do -so, the CI/CD job must be named `codequality` and the artifact path must be -`codeclimate.json`. +so, the CI/CD job must be named `code_quality` and the artifact path must be +`gl-code-quality-report.json`. [Learn more on code quality diffs in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/code_quality_diff.html). +CAUTION: **Caution:** +Code Quality was previously using `codeclimate` and `codequality` for job name and +`codeclimate.json` for the artifact name. While these old names +are still maintained they have been deprecated with GitLab 11.0 and may be removed +in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml` +configuration to reflect that change. + [cli]: https://github.com/codeclimate/codeclimate [dind]: ../docker/using_docker_build.md#use-docker-in-docker-executor [ee]: https://about.gitlab.com/products/ diff --git a/doc/ci/examples/container_scanning.md b/doc/ci/examples/container_scanning.md index a9501f6c577..92ff90507ee 100644 --- a/doc/ci/examples/container_scanning.md +++ b/doc/ci/examples/container_scanning.md @@ -7,10 +7,10 @@ for Vulnerability Static Analysis for containers. All you need is a GitLab Runner with the Docker executor (the shared Runners on GitLab.com will work fine). You can then add a new job to `.gitlab-ci.yml`, -called `sast:container`: +called `container_scanning`: ```yaml -sast:container: +container_scanning: image: docker:stable variables: DOCKER_DRIVER: overlay2 @@ -34,12 +34,12 @@ sast:container: - retries=0 - echo "Waiting for clair daemon to start" - while( ! wget -T 10 -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; echo -n "." ; if [ $retries -eq 10 ] ; then echo " Timeout, aborting." ; exit 1 ; fi ; retries=$(($retries+1)) ; done - - ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-sast-container-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true + - ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-container-scanning-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true artifacts: - paths: [gl-sast-container-report.json] + paths: [gl-container-scanning-report.json] ``` -The above example will create a `sast:container` job in your CI/CD pipeline, pull +The above example will create a `container_scanning` job in your CI/CD pipeline, pull the image from the [Container Registry](../../user/project/container_registry.md) (whose name is defined from the two `CI_APPLICATION_` variables) and scan it for possible vulnerabilities. The report will be saved as an artifact that you @@ -52,8 +52,15 @@ in our case its named `clair-whitelist.yml`. TIP: **Tip:** Starting with [GitLab Ultimate][ee] 10.4, this information will be automatically extracted and shown right in the merge request widget. To do -so, the CI/CD job must be named `sast:container` and the artifact path must be -`gl-sast-container-report.json`. +so, the CI/CD job must be named `container_scanning` and the artifact path must be +`gl-container-scanning-report.json`. [Learn more on container scanning results shown in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html). +CAUTION: **Caution:** +Container Scanning was previously using `sast:container` for job name and +`gl-sast-container-report.json` for the artifact name. While these old names +are still maintained they have been deprecated with GitLab 11.0 and may be removed +in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml` +configuration to reflect that change. + [ee]: https://about.gitlab.com/products/ diff --git a/doc/install/requirements.md b/doc/install/requirements.md index 1f2b4d9d3d9..1f399a8a3f7 100644 --- a/doc/install/requirements.md +++ b/doc/install/requirements.md @@ -27,8 +27,8 @@ Please see the [installation from source guide](installation.md) and the [instal ### Non-Unix operating systems such as Windows -GitLab is developed for Unix operating systems. -GitLab does **not** run on Windows and we have no plans of supporting it in the near future. +GitLab is developed for Unix operating systems. +It does **not** run on Windows, and we have no plans to support it in the near future. For the latest development status view this [issue](https://gitlab.com/gitlab-org/gitlab-ce/issues/46567). Please consider using a virtual machine to run GitLab. ## Ruby versions diff --git a/doc/raketasks/backup_restore.md b/doc/raketasks/backup_restore.md index 77139c50d07..5faae7ca2d6 100644 --- a/doc/raketasks/backup_restore.md +++ b/doc/raketasks/backup_restore.md @@ -31,8 +31,8 @@ sudo apt-get install -y rsync >**Note:** In GitLab 9.2 the timestamp format was changed from `EPOCH_YYYY_MM_DD` to -`EPOCH_YYYY_MM_DD_GitLab version`, for example `1493107454_2017_04_25` -would become `1493107454_2017_04_25_9.1.0`. +`EPOCH_YYYY_MM_DD_GitLab_version`, for example `1493107454_2018_04_25` +would become `1493107454_2018_04_25_10.6.4-ce`. The backup archive will be saved in `backup_path`, which is specified in the `config/gitlab.yml` file. @@ -41,8 +41,8 @@ identifies the time at which each backup was created, plus the GitLab version. The timestamp is needed if you need to restore GitLab and multiple backups are available. -For example, if the backup name is `1493107454_2017_04_25_9.1.0_gitlab_backup.tar`, -then the timestamp is `1493107454_2017_04_25_9.1.0`. +For example, if the backup name is `1493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar`, +then the timestamp is `1493107454_2018_04_25_10.6.4-ce`. ### Creating a backup of the GitLab system @@ -574,7 +574,7 @@ First make sure your backup tar file is in the backup directory described in the `/var/opt/gitlab/backups`. ```shell -sudo cp 1493107454_2017_04_25_9.1.0_gitlab_backup.tar /var/opt/gitlab/backups/ +sudo cp 11493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar /var/opt/gitlab/backups/ ``` Stop the processes that are connected to the database. Leave the rest of GitLab @@ -592,7 +592,7 @@ restore: ```shell # This command will overwrite the contents of your GitLab database! -sudo gitlab-rake gitlab:backup:restore BACKUP=1493107454_2017_04_25_9.1.0 +sudo gitlab-rake gitlab:backup:restore BACKUP=1493107454_2018_04_25_10.6.4-ce ``` Next, restore `/etc/gitlab/gitlab-secrets.json` if necessary as mentioned above. diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md index f80c82acfa3..efec365042a 100644 --- a/doc/topics/autodevops/index.md +++ b/doc/topics/autodevops/index.md @@ -220,8 +220,8 @@ tests, it's up to you to add them. ### Auto Code Quality -Auto Code Quality uses the open source -[`codeclimate` image](https://hub.docker.com/r/codeclimate/codeclimate/) to run +Auto Code Quality uses the +[Code Quality image](https://gitlab.com/gitlab-org/security-products/codequality) to run static analysis and other code checks on the current code. The report is created, and is uploaded as an artifact which you can later download and check out. diff --git a/doc/topics/autodevops/quick_start_guide.md b/doc/topics/autodevops/quick_start_guide.md index 8989a4c8956..61c04f3d9bb 100644 --- a/doc/topics/autodevops/quick_start_guide.md +++ b/doc/topics/autodevops/quick_start_guide.md @@ -126,10 +126,10 @@ Next, a pipeline needs to be triggered. Since the test project doesn't have a manually visit `https://gitlab.com/<username>/minimal-ruby-app/pipelines/new`, where `<username>` is your username. -This will create a new pipeline with several jobs: `build`, `test`, `codequality`, +This will create a new pipeline with several jobs: `build`, `test`, `code_quality`, and `production`. The `build` job will create a Docker image with your new change and push it to the Container Registry. The `test` job will test your -changes, whereas the `codequality` job will run static analysis on your changes. +changes, whereas the `code_quality` job will run static analysis on your changes. Finally, the `production` job will deploy your changes to a production application. Once the deploy job succeeds you should be able to see your application by diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb index 6a5a223a614..45a935ab352 100644 --- a/lib/backup/artifacts.rb +++ b/lib/backup/artifacts.rb @@ -2,7 +2,11 @@ require 'backup/files' module Backup class Artifacts < Files - def initialize + attr_reader :progress + + def initialize(progress) + @progress = progress + super('artifacts', JobArtifactUploader.root) end end diff --git a/lib/backup/builds.rb b/lib/backup/builds.rb index f869916e199..adf85ca4719 100644 --- a/lib/backup/builds.rb +++ b/lib/backup/builds.rb @@ -2,7 +2,11 @@ require 'backup/files' module Backup class Builds < Files - def initialize + attr_reader :progress + + def initialize(progress) + @progress = progress + super('builds', Settings.gitlab_ci.builds_path) end end diff --git a/lib/backup/database.rb b/lib/backup/database.rb index 5e6828de597..1608f7ad02d 100644 --- a/lib/backup/database.rb +++ b/lib/backup/database.rb @@ -2,9 +2,11 @@ require 'yaml' module Backup class Database + attr_reader :progress attr_reader :config, :db_file_name - def initialize + def initialize(progress) + @progress = progress @config = YAML.load_file(File.join(Rails.root, 'config', 'database.yml'))[Rails.env] @db_file_name = File.join(Gitlab.config.backup.path, 'db', 'database.sql.gz') end @@ -19,12 +21,12 @@ module Backup dump_pid = case config["adapter"] when /^mysql/ then - $progress.print "Dumping MySQL database #{config['database']} ... " + progress.print "Dumping MySQL database #{config['database']} ... " # Workaround warnings from MySQL 5.6 about passwords on cmd line ENV['MYSQL_PWD'] = config["password"].to_s if config["password"] spawn('mysqldump', *mysql_args, config['database'], out: compress_wr) when "postgresql" then - $progress.print "Dumping PostgreSQL database #{config['database']} ... " + progress.print "Dumping PostgreSQL database #{config['database']} ... " pg_env pgsql_args = ["--clean"] # Pass '--clean' to include 'DROP TABLE' statements in the DB dump. if Gitlab.config.backup.pg_schema @@ -53,12 +55,12 @@ module Backup restore_pid = case config["adapter"] when /^mysql/ then - $progress.print "Restoring MySQL database #{config['database']} ... " + progress.print "Restoring MySQL database #{config['database']} ... " # Workaround warnings from MySQL 5.6 about passwords on cmd line ENV['MYSQL_PWD'] = config["password"].to_s if config["password"] spawn('mysql', *mysql_args, config['database'], in: decompress_rd) when "postgresql" then - $progress.print "Restoring PostgreSQL database #{config['database']} ... " + progress.print "Restoring PostgreSQL database #{config['database']} ... " pg_env spawn('psql', config['database'], in: decompress_rd) end @@ -111,9 +113,9 @@ module Backup def report_success(success) if success - $progress.puts '[DONE]'.color(:green) + progress.puts '[DONE]'.color(:green) else - $progress.puts '[FAILED]'.color(:red) + progress.puts '[FAILED]'.color(:red) end end end diff --git a/lib/backup/lfs.rb b/lib/backup/lfs.rb index 4e234e50a7a..185ff8ae6bd 100644 --- a/lib/backup/lfs.rb +++ b/lib/backup/lfs.rb @@ -2,7 +2,11 @@ require 'backup/files' module Backup class Lfs < Files - def initialize + attr_reader :progress + + def initialize(progress) + @progress = progress + super('lfs', Settings.lfs.storage_path) end end diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb index f27ce4d2b2b..a8da0c7edef 100644 --- a/lib/backup/manager.rb +++ b/lib/backup/manager.rb @@ -4,6 +4,12 @@ module Backup FOLDERS_TO_BACKUP = %w[repositories db].freeze FILE_NAME_SUFFIX = '_gitlab_backup.tar'.freeze + attr_reader :progress + + def initialize(progress) + @progress = progress + end + def pack # Make sure there is a connection ActiveRecord::Base.connection.reconnect! @@ -14,11 +20,11 @@ module Backup end # create archive - $progress.print "Creating backup archive: #{tar_file} ... " + progress.print "Creating backup archive: #{tar_file} ... " # Set file permissions on open to prevent chmod races. tar_system_options = { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } if Kernel.system('tar', '-cf', '-', *backup_contents, tar_system_options) - $progress.puts "done".color(:green) + progress.puts "done".color(:green) else puts "creating archive #{tar_file} failed".color(:red) abort 'Backup failed' @@ -29,11 +35,11 @@ module Backup end def upload - $progress.print "Uploading backup archive to remote storage #{remote_directory} ... " + progress.print "Uploading backup archive to remote storage #{remote_directory} ... " connection_settings = Gitlab.config.backup.upload.connection if connection_settings.blank? - $progress.puts "skipped".color(:yellow) + progress.puts "skipped".color(:yellow) return end @@ -43,7 +49,7 @@ module Backup multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size, encryption: Gitlab.config.backup.upload.encryption, storage_class: Gitlab.config.backup.upload.storage_class) - $progress.puts "done".color(:green) + progress.puts "done".color(:green) else puts "uploading backup to #{remote_directory} failed".color(:red) abort 'Backup failed' @@ -51,13 +57,13 @@ module Backup end def cleanup - $progress.print "Deleting tmp directories ... " + progress.print "Deleting tmp directories ... " backup_contents.each do |dir| next unless File.exist?(File.join(backup_path, dir)) if FileUtils.rm_rf(File.join(backup_path, dir)) - $progress.puts "done".color(:green) + progress.puts "done".color(:green) else puts "deleting tmp directory '#{dir}' failed".color(:red) abort 'Backup failed' @@ -67,7 +73,7 @@ module Backup def remove_old # delete backups - $progress.print "Deleting old backups ... " + progress.print "Deleting old backups ... " keep_time = Gitlab.config.backup.keep_time.to_i if keep_time > 0 @@ -88,31 +94,32 @@ module Backup FileUtils.rm(file) removed += 1 rescue => e - $progress.puts "Deleting #{file} failed: #{e.message}".color(:red) + progress.puts "Deleting #{file} failed: #{e.message}".color(:red) end end end end - $progress.puts "done. (#{removed} removed)".color(:green) + progress.puts "done. (#{removed} removed)".color(:green) else - $progress.puts "skipping".color(:yellow) + progress.puts "skipping".color(:yellow) end end + # rubocop: disable Metrics/AbcSize def unpack Dir.chdir(backup_path) do # check for existing backups in the backup dir if backup_file_list.empty? - $progress.puts "No backups found in #{backup_path}" - $progress.puts "Please make sure that file name ends with #{FILE_NAME_SUFFIX}" + progress.puts "No backups found in #{backup_path}" + progress.puts "Please make sure that file name ends with #{FILE_NAME_SUFFIX}" exit 1 elsif backup_file_list.many? && ENV["BACKUP"].nil? - $progress.puts 'Found more than one backup:' + progress.puts 'Found more than one backup:' # print list of available backups - $progress.puts " " + available_timestamps.join("\n ") - $progress.puts 'Please specify which one you want to restore:' - $progress.puts 'rake gitlab:backup:restore BACKUP=timestamp_of_backup' + progress.puts " " + available_timestamps.join("\n ") + progress.puts 'Please specify which one you want to restore:' + progress.puts 'rake gitlab:backup:restore BACKUP=timestamp_of_backup' exit 1 end @@ -123,31 +130,31 @@ module Backup end unless File.exist?(tar_file) - $progress.puts "The backup file #{tar_file} does not exist!" + progress.puts "The backup file #{tar_file} does not exist!" exit 1 end - $progress.print 'Unpacking backup ... ' + progress.print 'Unpacking backup ... ' unless Kernel.system(*%W(tar -xf #{tar_file})) - $progress.puts 'unpacking backup failed'.color(:red) + progress.puts 'unpacking backup failed'.color(:red) exit 1 else - $progress.puts 'done'.color(:green) + progress.puts 'done'.color(:green) end ENV["VERSION"] = "#{settings[:db_version]}" if settings[:db_version].to_i > 0 # restoring mismatching backups can lead to unexpected problems if settings[:gitlab_version] != Gitlab::VERSION - $progress.puts(<<~HEREDOC.color(:red)) + progress.puts(<<~HEREDOC.color(:red)) GitLab version mismatch: Your current GitLab version (#{Gitlab::VERSION}) differs from the GitLab version in the backup! Please switch to the following version and try again: version: #{settings[:gitlab_version]} HEREDOC - $progress.puts - $progress.puts "Hint: git checkout v#{settings[:gitlab_version]}" + progress.puts + progress.puts "Hint: git checkout v#{settings[:gitlab_version]}" exit 1 end end diff --git a/lib/backup/pages.rb b/lib/backup/pages.rb index 5830b209d6e..542e35a7c7c 100644 --- a/lib/backup/pages.rb +++ b/lib/backup/pages.rb @@ -2,7 +2,11 @@ require 'backup/files' module Backup class Pages < Files - def initialize + attr_reader :progress + + def initialize(progress) + @progress = progress + super('pages', Gitlab.config.pages.path) end end diff --git a/lib/backup/registry.rb b/lib/backup/registry.rb index 91698669402..35821805797 100644 --- a/lib/backup/registry.rb +++ b/lib/backup/registry.rb @@ -2,7 +2,11 @@ require 'backup/files' module Backup class Registry < Files - def initialize + attr_reader :progress + + def initialize(progress) + @progress = progress + super('registry', Settings.registry.path) end end diff --git a/lib/backup/repository.rb b/lib/backup/repository.rb index 65e06fd78c0..c3360c391af 100644 --- a/lib/backup/repository.rb +++ b/lib/backup/repository.rb @@ -6,6 +6,12 @@ module Backup include Backup::Helper # rubocop:disable Metrics/AbcSize + attr_reader :progress + + def initialize(progress) + @progress = progress + end + def dump prepare @@ -67,6 +73,9 @@ module Backup end def prepare_directories + # TODO: Need to find a way to do this for gitaly + # Gitaly discussion issue: https://gitlab.com/gitlab-org/gitaly/issues/1194 + Gitlab.config.repositories.storages.each do |name, repository_storage| path = repository_storage.legacy_disk_path next unless File.exist?(path) @@ -87,70 +96,65 @@ module Backup end end + def restore_custom_hooks(project) + # TODO: Need to find a way to do this for gitaly + # Gitaly migration issue: https://gitlab.com/gitlab-org/gitaly/issues/1195 + in_path(path_to_tars(project)) do |dir| + path_to_project_repo = path_to_repo(project) + cmd = %W(tar -xf #{path_to_tars(project, dir)} -C #{path_to_project_repo} #{dir}) + + output, status = Gitlab::Popen.popen(cmd) + unless status.zero? + progress_warn(project, cmd.join(' '), output) + end + end + end + def restore prepare_directories + gitlab_shell = Gitlab::Shell.new Project.find_each(batch_size: 1000) do |project| - progress.print " * #{display_repo_path(project)} ... " - path_to_project_repo = path_to_repo(project) + progress.print " * #{project.full_path} ... " path_to_project_bundle = path_to_bundle(project) - project.ensure_storage_path_exists - cmd = if File.exist?(path_to_project_bundle) - %W(#{Gitlab.config.git.bin_path} clone --bare --mirror #{path_to_project_bundle} #{path_to_project_repo}) - else - %W(#{Gitlab.config.git.bin_path} init --bare #{path_to_project_repo}) - end + restore_repo_success = nil + if File.exist?(path_to_project_bundle) + begin + gitlab_shell.remove_repository(project.repository_storage, project.disk_path) if project.repository_exists? + project.repository.create_from_bundle path_to_project_bundle + restore_repo_success = true + rescue => e + restore_repo_success = false + progress.puts "Error: #{e}".color(:red) + end + else + restore_repo_success = gitlab_shell.create_repository(project.repository_storage, project.disk_path) + end - output, status = Gitlab::Popen.popen(cmd) - if status.zero? + if restore_repo_success progress.puts "[DONE]".color(:green) else - progress_warn(project, cmd.join(' '), output) + progress.puts "[Failed] restoring #{project.full_path} repository".color(:red) end - in_path(path_to_tars(project)) do |dir| - cmd = %W(tar -xf #{path_to_tars(project, dir)} -C #{path_to_project_repo} #{dir}) - - output, status = Gitlab::Popen.popen(cmd) - unless status.zero? - progress_warn(project, cmd.join(' '), output) - end - end + restore_custom_hooks(project) wiki = ProjectWiki.new(project) - path_to_wiki_repo = path_to_repo(wiki) path_to_wiki_bundle = path_to_bundle(wiki) if File.exist?(path_to_wiki_bundle) - progress.print " * #{display_repo_path(wiki)} ... " - - # If a wiki bundle exists, first remove the empty repo - # that was initialized with ProjectWiki.new() and then - # try to restore with 'git clone --bare'. - FileUtils.rm_rf(path_to_wiki_repo) - cmd = %W(#{Gitlab.config.git.bin_path} clone --bare #{path_to_wiki_bundle} #{path_to_wiki_repo}) - - output, status = Gitlab::Popen.popen(cmd) - if status.zero? - progress.puts " [DONE]".color(:green) - else - progress_warn(project, cmd.join(' '), output) + progress.print " * #{wiki.full_path} ... " + begin + gitlab_shell.remove_repository(wiki.repository_storage, wiki.disk_path) if wiki.repository_exists? + wiki.repository.create_from_bundle(path_to_wiki_bundle) + progress.puts "[DONE]".color(:green) + rescue => e + progress.puts "[Failed] restoring #{wiki.full_path} wiki".color(:red) + progress.puts "Error #{e}".color(:red) end end end - - progress.print 'Put GitLab hooks in repositories dirs'.color(:yellow) - cmd = %W(#{Gitlab.config.gitlab_shell.path}/bin/create-hooks) + repository_storage_paths_args - - output, status = Gitlab::Popen.popen(cmd) - if status.zero? - progress.puts " [DONE]".color(:green) - else - puts " [FAILED]".color(:red) - puts "failed: #{cmd}" - puts output - end end # rubocop:enable Metrics/AbcSize @@ -217,10 +221,6 @@ module Backup Gitlab.config.repositories.storages.values.map { |rs| rs.legacy_disk_path } end - def progress - $progress - end - def display_repo_path(project) project.hashed_storage?(:repository) ? "#{project.full_path} (#{project.disk_path})" : project.full_path end diff --git a/lib/backup/uploads.rb b/lib/backup/uploads.rb index d46e2cd869d..49b117a7ee3 100644 --- a/lib/backup/uploads.rb +++ b/lib/backup/uploads.rb @@ -2,7 +2,11 @@ require 'backup/files' module Backup class Uploads < Files - def initialize + attr_reader :progress + + def initialize(progress) + @progress = progress + super('uploads', Rails.root.join('public/uploads')) end end diff --git a/lib/tasks/gitlab/backup.rake b/lib/tasks/gitlab/backup.rake index 24e37f6c6cc..e96fbb64372 100644 --- a/lib/tasks/gitlab/backup.rake +++ b/lib/tasks/gitlab/backup.rake @@ -6,7 +6,6 @@ namespace :gitlab do desc "GitLab | Create a backup of the GitLab system" task create: :gitlab_environment do warn_user_is_not_gitlab - configure_cron_mode Rake::Task["gitlab:backup:db:create"].invoke Rake::Task["gitlab:backup:repo:create"].invoke @@ -17,7 +16,7 @@ namespace :gitlab do Rake::Task["gitlab:backup:lfs:create"].invoke Rake::Task["gitlab:backup:registry:create"].invoke - backup = Backup::Manager.new + backup = Backup::Manager.new(progress) backup.pack backup.cleanup backup.remove_old @@ -27,9 +26,8 @@ namespace :gitlab do desc 'GitLab | Restore a previously created backup' task restore: :gitlab_environment do warn_user_is_not_gitlab - configure_cron_mode - backup = Backup::Manager.new + backup = Backup::Manager.new(progress) backup.unpack unless backup.skipped?('db') @@ -49,9 +47,9 @@ namespace :gitlab do # Drop all tables Load the schema to ensure we don't have any newer tables # hanging out from a failed upgrade - $progress.puts 'Cleaning the database ... '.color(:blue) + progress.puts 'Cleaning the database ... '.color(:blue) Rake::Task['gitlab:db:drop_tables'].invoke - $progress.puts 'done'.color(:green) + progress.puts 'done'.color(:green) Rake::Task['gitlab:backup:db:restore'].invoke rescue Gitlab::TaskAbortedByUserError puts "Quitting...".color(:red) @@ -74,173 +72,173 @@ namespace :gitlab do namespace :repo do task create: :gitlab_environment do - $progress.puts "Dumping repositories ...".color(:blue) + progress.puts "Dumping repositories ...".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("repositories") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Repository.new.dump - $progress.puts "done".color(:green) + Backup::Repository.new(progress).dump + progress.puts "done".color(:green) end end task restore: :gitlab_environment do - $progress.puts "Restoring repositories ...".color(:blue) - Backup::Repository.new.restore - $progress.puts "done".color(:green) + progress.puts "Restoring repositories ...".color(:blue) + Backup::Repository.new(progress).restore + progress.puts "done".color(:green) end end namespace :db do task create: :gitlab_environment do - $progress.puts "Dumping database ... ".color(:blue) + progress.puts "Dumping database ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("db") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Database.new.dump - $progress.puts "done".color(:green) + Backup::Database.new(progress).dump + progress.puts "done".color(:green) end end task restore: :gitlab_environment do - $progress.puts "Restoring database ... ".color(:blue) - Backup::Database.new.restore - $progress.puts "done".color(:green) + progress.puts "Restoring database ... ".color(:blue) + Backup::Database.new(progress).restore + progress.puts "done".color(:green) end end namespace :builds do task create: :gitlab_environment do - $progress.puts "Dumping builds ... ".color(:blue) + progress.puts "Dumping builds ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("builds") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Builds.new.dump - $progress.puts "done".color(:green) + Backup::Builds.new(progress).dump + progress.puts "done".color(:green) end end task restore: :gitlab_environment do - $progress.puts "Restoring builds ... ".color(:blue) - Backup::Builds.new.restore - $progress.puts "done".color(:green) + progress.puts "Restoring builds ... ".color(:blue) + Backup::Builds.new(progress).restore + progress.puts "done".color(:green) end end namespace :uploads do task create: :gitlab_environment do - $progress.puts "Dumping uploads ... ".color(:blue) + progress.puts "Dumping uploads ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("uploads") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Uploads.new.dump - $progress.puts "done".color(:green) + Backup::Uploads.new(progress).dump + progress.puts "done".color(:green) end end task restore: :gitlab_environment do - $progress.puts "Restoring uploads ... ".color(:blue) - Backup::Uploads.new.restore - $progress.puts "done".color(:green) + progress.puts "Restoring uploads ... ".color(:blue) + Backup::Uploads.new(progress).restore + progress.puts "done".color(:green) end end namespace :artifacts do task create: :gitlab_environment do - $progress.puts "Dumping artifacts ... ".color(:blue) + progress.puts "Dumping artifacts ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("artifacts") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Artifacts.new.dump - $progress.puts "done".color(:green) + Backup::Artifacts.new(progress).dump + progress.puts "done".color(:green) end end task restore: :gitlab_environment do - $progress.puts "Restoring artifacts ... ".color(:blue) - Backup::Artifacts.new.restore - $progress.puts "done".color(:green) + progress.puts "Restoring artifacts ... ".color(:blue) + Backup::Artifacts.new(progress).restore + progress.puts "done".color(:green) end end namespace :pages do task create: :gitlab_environment do - $progress.puts "Dumping pages ... ".color(:blue) + progress.puts "Dumping pages ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("pages") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Pages.new.dump - $progress.puts "done".color(:green) + Backup::Pages.new(progress).dump + progress.puts "done".color(:green) end end task restore: :gitlab_environment do - $progress.puts "Restoring pages ... ".color(:blue) - Backup::Pages.new.restore - $progress.puts "done".color(:green) + progress.puts "Restoring pages ... ".color(:blue) + Backup::Pages.new(progress).restore + progress.puts "done".color(:green) end end namespace :lfs do task create: :gitlab_environment do - $progress.puts "Dumping lfs objects ... ".color(:blue) + progress.puts "Dumping lfs objects ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("lfs") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Lfs.new.dump - $progress.puts "done".color(:green) + Backup::Lfs.new(progress).dump + progress.puts "done".color(:green) end end task restore: :gitlab_environment do - $progress.puts "Restoring lfs objects ... ".color(:blue) - Backup::Lfs.new.restore - $progress.puts "done".color(:green) + progress.puts "Restoring lfs objects ... ".color(:blue) + Backup::Lfs.new(progress).restore + progress.puts "done".color(:green) end end namespace :registry do task create: :gitlab_environment do - $progress.puts "Dumping container registry images ... ".color(:blue) + progress.puts "Dumping container registry images ... ".color(:blue) if Gitlab.config.registry.enabled if ENV["SKIP"] && ENV["SKIP"].include?("registry") - $progress.puts "[SKIPPED]".color(:cyan) + progress.puts "[SKIPPED]".color(:cyan) else - Backup::Registry.new.dump - $progress.puts "done".color(:green) + Backup::Registry.new(progress).dump + progress.puts "done".color(:green) end else - $progress.puts "[DISABLED]".color(:cyan) + progress.puts "[DISABLED]".color(:cyan) end end task restore: :gitlab_environment do - $progress.puts "Restoring container registry images ... ".color(:blue) + progress.puts "Restoring container registry images ... ".color(:blue) if Gitlab.config.registry.enabled - Backup::Registry.new.restore - $progress.puts "done".color(:green) + Backup::Registry.new(progress).restore + progress.puts "done".color(:green) else - $progress.puts "[DISABLED]".color(:cyan) + progress.puts "[DISABLED]".color(:cyan) end end end - def configure_cron_mode + def progress if ENV['CRON'] # We need an object we can say 'puts' and 'print' to; let's use a # StringIO. require 'stringio' - $progress = StringIO.new + StringIO.new else - $progress = $stdout + $stdout end end end # namespace end: backup diff --git a/package.json b/package.json index 1382afd41d6..68e5e0a9b6a 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "webpack-prod": "NODE_ENV=production webpack --config config/webpack.config.js" }, "dependencies": { - "@gitlab-org/gitlab-svgs": "^1.22.0", + "@gitlab-org/gitlab-svgs": "^1.23.0", "autosize": "^4.0.0", "axios": "^0.17.1", "babel-core": "^6.26.3", diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js index 3d7ccf432be..e8435116221 100644 --- a/spec/javascripts/api_spec.js +++ b/spec/javascripts/api_spec.js @@ -341,4 +341,25 @@ describe('Api', () => { .catch(done.fail); }); }); + + describe('commitPipelines', () => { + it('fetches pipelines for a given commit', done => { + const projectId = 'example/foobar'; + const commitSha = 'abc123def'; + const expectedUrl = `${dummyUrlRoot}/${projectId}/commit/${commitSha}/pipelines`; + mock.onGet(expectedUrl).reply(200, [ + { + name: 'test', + }, + ]); + + Api.commitPipelines(projectId, commitSha) + .then(({ data }) => { + expect(data.length).toBe(1); + expect(data[0].name).toBe('test'); + }) + .then(done) + .catch(done.fail); + }); + }); }); diff --git a/spec/javascripts/ide/mock_data.js b/spec/javascripts/ide/mock_data.js index a0cb8bae91c..23c75b964d8 100644 --- a/spec/javascripts/ide/mock_data.js +++ b/spec/javascripts/ide/mock_data.js @@ -80,3 +80,37 @@ export const jobs = [ duration: 1, }, ]; + +export const fullPipelinesResponse = { + data: { + count: { + all: 2, + }, + pipelines: [ + { + id: '51', + commit: { + id: 'xxxxxxxxxxxxxxxxxxxx', + }, + details: { + status: { + icon: 'status_failed', + text: 'failed', + }, + }, + }, + { + id: '50', + commit: { + id: 'abc123def456ghi789jkl', + }, + details: { + status: { + icon: 'status_passed', + text: 'passed', + }, + }, + }, + ], + }, +}; diff --git a/spec/javascripts/ide/stores/actions/project_spec.js b/spec/javascripts/ide/stores/actions/project_spec.js index ebd08d95810..8e078ae7138 100644 --- a/spec/javascripts/ide/stores/actions/project_spec.js +++ b/spec/javascripts/ide/stores/actions/project_spec.js @@ -1,14 +1,33 @@ -import { - refreshLastCommitData, -} from '~/ide/stores/actions'; +import Visibility from 'visibilityjs'; +import MockAdapter from 'axios-mock-adapter'; +import { refreshLastCommitData, pollSuccessCallBack } from '~/ide/stores/actions'; import store from '~/ide/stores'; import service from '~/ide/services'; +import axios from '~/lib/utils/axios_utils'; +import { fullPipelinesResponse } from '../../mock_data'; import { resetStore } from '../../helpers'; import testAction from '../../../helpers/vuex_action_helper'; describe('IDE store project actions', () => { + const setProjectState = () => { + store.state.currentProjectId = 'abc/def'; + store.state.currentBranchId = 'master'; + store.state.projects['abc/def'] = { + id: 4, + path_with_namespace: 'abc/def', + branches: { + master: { + commit: { + id: 'abc123def456ghi789jkl', + title: 'example', + }, + }, + }, + }; + }; + beforeEach(() => { - store.state.projects.abcproject = {}; + store.state.projects['abc/def'] = {}; }); afterEach(() => { @@ -17,18 +36,16 @@ describe('IDE store project actions', () => { describe('refreshLastCommitData', () => { beforeEach(() => { - store.state.currentProjectId = 'abcproject'; + store.state.currentProjectId = 'abc/def'; store.state.currentBranchId = 'master'; - store.state.projects.abcproject = { + store.state.projects['abc/def'] = { + id: 4, branches: { master: { commit: null, }, }, }; - }); - - it('calls the service', done => { spyOn(service, 'getBranchData').and.returnValue( Promise.resolve({ data: { @@ -36,14 +53,16 @@ describe('IDE store project actions', () => { }, }), ); + }); + it('calls the service', done => { store .dispatch('refreshLastCommitData', { projectId: store.state.currentProjectId, branchId: store.state.currentBranchId, }) .then(() => { - expect(service.getBranchData).toHaveBeenCalledWith('abcproject', 'master'); + expect(service.getBranchData).toHaveBeenCalledWith('abc/def', 'master'); done(); }) @@ -53,16 +72,118 @@ describe('IDE store project actions', () => { it('commits getBranchData', done => { testAction( refreshLastCommitData, - {}, - {}, - [{ - type: 'SET_BRANCH_COMMIT', - payload: { - projectId: 'abcproject', - branchId: 'master', - commit: { id: '123' }, + { + projectId: store.state.currentProjectId, + branchId: store.state.currentBranchId, + }, + store.state, + [ + { + type: 'SET_BRANCH_COMMIT', + payload: { + projectId: 'abc/def', + branchId: 'master', + commit: { id: '123' }, + }, + }, + ], // mutations + [ + { + type: 'getLastCommitPipeline', + payload: { + projectId: 'abc/def', + projectIdNumber: store.state.projects['abc/def'].id, + branchId: 'master', + }, + }, + ], // action + done, + ); + }); + }); + + describe('pipelinePoll', () => { + let mock; + + beforeEach(() => { + setProjectState(); + jasmine.clock().install(); + mock = new MockAdapter(axios); + mock + .onGet('/abc/def/commit/abc123def456ghi789jkl/pipelines') + .reply(200, { data: { foo: 'bar' } }, { 'poll-interval': '10000' }); + }); + + afterEach(() => { + jasmine.clock().uninstall(); + mock.restore(); + store.dispatch('stopPipelinePolling'); + }); + + it('calls service periodically', done => { + spyOn(axios, 'get').and.callThrough(); + spyOn(Visibility, 'hidden').and.returnValue(false); + + store + .dispatch('pipelinePoll') + .then(() => { + jasmine.clock().tick(1000); + + expect(axios.get).toHaveBeenCalled(); + expect(axios.get.calls.count()).toBe(1); + }) + .then(() => new Promise(resolve => requestAnimationFrame(resolve))) + .then(() => { + jasmine.clock().tick(10000); + expect(axios.get.calls.count()).toBe(2); + }) + .then(() => new Promise(resolve => requestAnimationFrame(resolve))) + .then(() => { + jasmine.clock().tick(10000); + expect(axios.get.calls.count()).toBe(3); + }) + .then(() => new Promise(resolve => requestAnimationFrame(resolve))) + .then(() => { + jasmine.clock().tick(10000); + expect(axios.get.calls.count()).toBe(4); + }) + + .then(done) + .catch(done.fail); + }); + }); + + describe('pollSuccessCallBack', () => { + beforeEach(() => { + setProjectState(); + }); + + it('commits correct pipeline', done => { + testAction( + pollSuccessCallBack, + fullPipelinesResponse, + store.state, + [ + { + type: 'SET_LAST_COMMIT_PIPELINE', + payload: { + projectId: 'abc/def', + branchId: 'master', + pipeline: { + id: '50', + commit: { + id: 'abc123def456ghi789jkl', + }, + details: { + status: { + icon: 'status_passed', + text: 'passed', + }, + }, + }, + }, }, - }], // mutations + ], // mutations [], // action done, ); diff --git a/spec/javascripts/ide/stores/mutations/branch_spec.js b/spec/javascripts/ide/stores/mutations/branch_spec.js index 29eb859ddaf..f2f1f2a9a2e 100644 --- a/spec/javascripts/ide/stores/mutations/branch_spec.js +++ b/spec/javascripts/ide/stores/mutations/branch_spec.js @@ -37,4 +37,40 @@ describe('Multi-file store branch mutations', () => { expect(localState.projects.Example.branches.master.commit.title).toBe('Example commit'); }); }); + + describe('SET_LAST_COMMIT_PIPELINE', () => { + it('sets the pipeline for the last commit on current project', () => { + localState.projects = { + Example: { + branches: { + master: { + commit: {}, + }, + }, + }, + }; + + mutations.SET_LAST_COMMIT_PIPELINE(localState, { + projectId: 'Example', + branchId: 'master', + pipeline: { + id: '50', + details: { + status: { + icon: 'status_passed', + text: 'passed', + }, + }, + }, + }); + + expect(localState.projects.Example.branches.master.commit.pipeline.id).toBe('50'); + expect(localState.projects.Example.branches.master.commit.pipeline.details.status.text).toBe( + 'passed', + ); + expect(localState.projects.Example.branches.master.commit.pipeline.details.status.icon).toBe( + 'status_passed', + ); + }); + }); }); diff --git a/spec/javascripts/pipelines/empty_state_spec.js b/spec/javascripts/pipelines/empty_state_spec.js index 71f77e5f42e..1e41a7bfe7c 100644 --- a/spec/javascripts/pipelines/empty_state_spec.js +++ b/spec/javascripts/pipelines/empty_state_spec.js @@ -29,7 +29,7 @@ describe('Pipelines Empty State', () => { expect( component.$el.querySelector('p').innerHTML.trim().replace(/\n+\s+/m, ' ').replace(/\s\s+/g, ' '), - ).toContain('Continous Integration can help catch bugs by running your tests automatically,'); + ).toContain('Continuous Integration can help catch bugs by running your tests automatically,'); expect( component.$el.querySelector('p').innerHTML.trim().replace(/\n+\s+/m, ' ').replace(/\s\s+/g, ' '), diff --git a/spec/javascripts/pipelines/mock_data.js b/spec/javascripts/pipelines/mock_data.js index a5a200973d7..03ead6cd8ba 100644 --- a/spec/javascripts/pipelines/mock_data.js +++ b/spec/javascripts/pipelines/mock_data.js @@ -217,7 +217,7 @@ export const pipelineWithStages = { browse_path: '/gitlab-org/gitlab-ee/-/jobs/62411442/artifacts/browse', }, { - name: 'codequality', + name: 'code_quality', expired: false, expire_at: '2018-04-18T14:16:24.484Z', path: '/gitlab-org/gitlab-ee/-/jobs/62411441/artifacts/download', diff --git a/spec/javascripts/pipelines/pipelines_table_row_spec.js b/spec/javascripts/pipelines/pipelines_table_row_spec.js index 05ca4cb9044..68043b91bd0 100644 --- a/spec/javascripts/pipelines/pipelines_table_row_spec.js +++ b/spec/javascripts/pipelines/pipelines_table_row_spec.js @@ -182,7 +182,16 @@ describe('Pipelines Table Row', () => { }); component.$el.querySelector('.js-pipelines-cancel-button').click(); - expect(component.isCancelling).toEqual(true); + }); + + it('renders a loading icon when `cancelingPipeline` matches pipeline id', done => { + component.cancelingPipeline = pipeline.id; + component.$nextTick() + .then(() => { + expect(component.isCancelling).toEqual(true); + }) + .then(done) + .catch(done.fail); }); }); }); diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb index 84688845fa5..23c04a1a101 100644 --- a/spec/lib/backup/manager_spec.rb +++ b/spec/lib/backup/manager_spec.rb @@ -5,6 +5,8 @@ describe Backup::Manager do let(:progress) { StringIO.new } + subject { described_class.new(progress) } + before do allow(progress).to receive(:puts) allow(progress).to receive(:print) diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb index b1ea9c0b622..a44243ac82d 100644 --- a/spec/lib/backup/repository_spec.rb +++ b/spec/lib/backup/repository_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' describe Backup::Repository do let(:progress) { StringIO.new } let!(:project) { create(:project, :wiki_repo) } + subject { described_class.new(progress) } before do allow(progress).to receive(:puts) @@ -24,14 +25,12 @@ describe Backup::Repository do end it 'does not raise error' do - expect { described_class.new.dump }.not_to raise_error + expect { subject.dump }.not_to raise_error end end end describe '#restore' do - subject { described_class.new } - let(:timestamp) { Time.utc(2017, 3, 22) } let(:temp_dirs) do Gitlab.config.repositories.storages.map do |name, storage| @@ -49,14 +48,14 @@ describe Backup::Repository do describe 'command failure' do before do - allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1]) + allow_any_instance_of(Gitlab::Shell).to receive(:create_repository).and_return(false) end context 'hashed storage' do it 'shows the appropriate error' do subject.restore - expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} (#{project.disk_path}) - error") + expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository") end end @@ -66,33 +65,10 @@ describe Backup::Repository do it 'shows the appropriate error' do subject.restore - expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error") + expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository") end end end - - describe 'folders without permissions' do - before do - allow(FileUtils).to receive(:mv).and_raise(Errno::EACCES) - end - - it 'shows error message' do - expect(subject).to receive(:access_denied_error) - subject.restore - end - end - - describe 'folder that is a mountpoint' do - before do - allow(FileUtils).to receive(:mv).and_raise(Errno::EBUSY) - end - - it 'shows error message' do - expect(subject).to receive(:resource_busy_error).and_call_original - - expect { subject.restore }.to raise_error(/is a mountpoint/) - end - end end describe '#empty_repo?' do @@ -102,20 +78,20 @@ describe Backup::Repository do it 'invalidates the emptiness cache' do expect(wiki.repository).to receive(:expire_emptiness_caches).once - described_class.new.send(:empty_repo?, wiki) + subject.send(:empty_repo?, wiki) end context 'wiki repo has content' do let!(:wiki_page) { create(:wiki_page, wiki: wiki) } it 'returns true, regardless of bad cache value' do - expect(described_class.new.send(:empty_repo?, wiki)).to be(false) + expect(subject.send(:empty_repo?, wiki)).to be(false) end end context 'wiki repo does not have content' do it 'returns true, regardless of bad cache value' do - expect(described_class.new.send(:empty_repo?, wiki)).to be_truthy + expect(subject.send(:empty_repo?, wiki)).to be_truthy end end end diff --git a/spec/services/clusters/applications/schedule_installation_service_spec.rb b/spec/services/clusters/applications/schedule_installation_service_spec.rb index 473dbcbb692..bca1e71bef2 100644 --- a/spec/services/clusters/applications/schedule_installation_service_spec.rb +++ b/spec/services/clusters/applications/schedule_installation_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' describe Clusters::Applications::ScheduleInstallationService do def count_scheduled - application_class&.with_status(:scheduled)&.count || 0 + application&.class&.with_status(:scheduled)&.count || 0 end shared_examples 'a failing service' do @@ -10,45 +10,42 @@ describe Clusters::Applications::ScheduleInstallationService do expect(ClusterInstallAppWorker).not_to receive(:perform_async) count_before = count_scheduled - expect { service.execute }.to raise_error(StandardError) + expect { service.execute(application) }.to raise_error(StandardError) expect(count_scheduled).to eq(count_before) end end describe '#execute' do - let(:application_class) { Clusters::Applications::Helm } - let(:cluster) { create(:cluster, :project, :provided_by_gcp) } - let(:project) { cluster.project } - let(:service) { described_class.new(project, nil, cluster: cluster, application_class: application_class) } + let(:project) { double(:project) } + let(:service) { described_class.new(project, nil) } - it 'creates a new application' do - allow(ClusterInstallAppWorker).to receive(:perform_async) + context 'when application is installable' do + let(:application) { create(:clusters_applications_helm, :installable) } - expect { service.execute }.to change { application_class.count }.by(1) - end - - it 'make the application scheduled' do - expect(ClusterInstallAppWorker).to receive(:perform_async).with(application_class.application_name, kind_of(Numeric)).once + it 'make the application scheduled' do + expect(ClusterInstallAppWorker).to receive(:perform_async).with(application.name, kind_of(Numeric)).once - expect { service.execute }.to change { application_class.with_status(:scheduled).count }.by(1) + expect { service.execute(application) }.to change { application.class.with_status(:scheduled).count }.by(1) + end end context 'when installation is already in progress' do let(:application) { create(:clusters_applications_helm, :installing) } - let(:cluster) { application.cluster } it_behaves_like 'a failing service' end - context 'when application_class is nil' do - let(:application_class) { nil } + context 'when application is nil' do + let(:application) { nil } it_behaves_like 'a failing service' end context 'when application cannot be persisted' do + let(:application) { create(:clusters_applications_helm) } + before do - expect_any_instance_of(application_class).to receive(:make_scheduled!).once.and_raise(ActiveRecord::RecordInvalid) + expect(application).to receive(:make_scheduled!).once.and_raise(ActiveRecord::RecordInvalid) end it_behaves_like 'a failing service' diff --git a/spec/services/projects/open_issues_count_service_spec.rb b/spec/services/projects/open_issues_count_service_spec.rb index f964f9972cd..06b470849b3 100644 --- a/spec/services/projects/open_issues_count_service_spec.rb +++ b/spec/services/projects/open_issues_count_service_spec.rb @@ -2,20 +2,53 @@ require 'spec_helper' describe Projects::OpenIssuesCountService do describe '#count' do - it 'returns the number of open issues' do - project = create(:project) - create(:issue, :opened, project: project) + let(:project) { create(:project) } - expect(described_class.new(project).count).to eq(1) + context 'when user is nil' do + it 'does not include confidential issues in the issue count' do + create(:issue, :opened, project: project) + create(:issue, :opened, confidential: true, project: project) + + expect(described_class.new(project).count).to eq(1) + end end - it 'does not include confidential issues in the issue count' do - project = create(:project) + context 'when user is provided' do + let(:user) { create(:user) } + + context 'when user can read confidential issues' do + before do + project.add_reporter(user) + end + + it 'returns the right count with confidential issues' do + create(:issue, :opened, project: project) + create(:issue, :opened, confidential: true, project: project) + + expect(described_class.new(project, user).count).to eq(2) + end + + it 'uses total_open_issues_count cache key' do + expect(described_class.new(project, user).cache_key_name).to eq('total_open_issues_count') + end + end + + context 'when user cannot read confidential issues' do + before do + project.add_guest(user) + end + + it 'does not include confidential issues' do + create(:issue, :opened, project: project) + create(:issue, :opened, confidential: true, project: project) - create(:issue, :opened, project: project) - create(:issue, :opened, confidential: true, project: project) + expect(described_class.new(project, user).count).to eq(1) + end - expect(described_class.new(project).count).to eq(1) + it 'uses public_open_issues_count cache key' do + expect(described_class.new(project, user).cache_key_name).to eq('public_open_issues_count') + end + end end end end diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb index 72e5c2d66dd..f7b71bf42e3 100644 --- a/spec/support/helpers/login_helpers.rb +++ b/spec/support/helpers/login_helpers.rb @@ -132,6 +132,14 @@ module LoginHelpers env['omniauth.auth'] = OmniAuth.config.mock_auth[provider.to_sym] end + def stub_omniauth_failure(strategy, message_key, exception = nil) + env = @request.env + + env['omniauth.error'] = exception + env['omniauth.error.type'] = message_key.to_sym + env['omniauth.error.strategy'] = strategy + end + def stub_omniauth_saml_config(messages) set_devise_mapping(context: Rails.application) Rails.application.routes.disable_clear_and_finalize = true diff --git a/spec/support/helpers/routes_helpers.rb b/spec/support/helpers/routes_helpers.rb new file mode 100644 index 00000000000..c4129606418 --- /dev/null +++ b/spec/support/helpers/routes_helpers.rb @@ -0,0 +1,7 @@ +module RoutesHelpers + def fake_routes(&block) + @routes = @routes.dup + @routes.formatter.clear + ActionDispatch::Routing::Mapper.new(@routes).instance_exec(&block) + end +end diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb index a2e5642a72c..c9252bebb2e 100644 --- a/spec/tasks/gitlab/backup_rake_spec.rb +++ b/spec/tasks/gitlab/backup_rake_spec.rb @@ -125,6 +125,16 @@ describe 'gitlab:app namespace rake task' do expect(Dir.entries(File.join(project.repository.path, 'custom_hooks'))).to include("dummy.txt") end end + + context 'specific backup tasks' do + let(:task_list) { %w(db repo uploads builds artifacts pages lfs registry) } + + it 'prints a progress message to stdout' do + task_list.each do |task| + expect { run_rake_task("gitlab:backup:#{task}:create") }.to output(/Dumping /).to_stdout + end + end + end end context 'tar creation' do diff --git a/spec/workers/update_merge_requests_worker_spec.rb b/spec/workers/update_merge_requests_worker_spec.rb index 0fa19ac84bb..80137815d2b 100644 --- a/spec/workers/update_merge_requests_worker_spec.rb +++ b/spec/workers/update_merge_requests_worker_spec.rb @@ -18,8 +18,13 @@ describe UpdateMergeRequestsWorker do end it 'executes MergeRequests::RefreshService with expected values' do - expect(MergeRequests::RefreshService).to receive(:new).with(project, user).and_call_original - expect_any_instance_of(MergeRequests::RefreshService).to receive(:execute).with(oldrev, newrev, ref) + expect(MergeRequests::RefreshService).to receive(:new) + .with(project, user).and_wrap_original do |method, *args| + method.call(*args).tap do |refresh_service| + expect(refresh_service) + .to receive(:execute).with(oldrev, newrev, ref) + end + end perform end diff --git a/vendor/gitlab-ci-yml/Auto-DevOps.gitlab-ci.yml b/vendor/gitlab-ci-yml/Auto-DevOps.gitlab-ci.yml index a00c6e89a1d..589ebcf1414 100644 --- a/vendor/gitlab-ci-yml/Auto-DevOps.gitlab-ci.yml +++ b/vendor/gitlab-ci-yml/Auto-DevOps.gitlab-ci.yml @@ -77,7 +77,7 @@ test: only: - branches -codequality: +code_quality: image: docker:stable variables: DOCKER_DRIVER: overlay2 @@ -86,9 +86,9 @@ codequality: - docker:stable-dind script: - setup_docker - - codeclimate + - code_quality artifacts: - paths: [codeclimate.json] + paths: [gl-code-quality-report.json] performance: stage: performance @@ -136,7 +136,7 @@ dependency_scanning: artifacts: paths: [gl-dependency-scanning-report.json] -sast:container: +container_scanning: image: docker:stable variables: DOCKER_DRIVER: overlay2 @@ -145,9 +145,9 @@ sast:container: - docker:stable-dind script: - setup_docker - - sast_container + - container_scanning artifacts: - paths: [gl-sast-container-report.json] + paths: [gl-container-scanning-report.json] dast: stage: dast @@ -388,7 +388,7 @@ rollout 100%: # Extract "MAJOR.MINOR" from CI_SERVER_VERSION and generate "MAJOR-MINOR-stable" for Security Products export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/') - function sast_container() { + function container_scanning() { if [[ -n "$CI_REGISTRY_USER" ]]; then echo "Logging to GitLab Container Registry with CI credentials..." docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" "$CI_REGISTRY" @@ -406,10 +406,10 @@ rollout 100%: retries=0 echo "Waiting for clair daemon to start" while( ! wget -T 10 -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; echo -n "." ; if [ $retries -eq 10 ] ; then echo " Timeout, aborting." ; exit 1 ; fi ; retries=$(($retries+1)) ; done - ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-sast-container-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true + ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-container-scanning-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true } - function codeclimate() { + function code_quality() { docker run --env SOURCE_CODE="$PWD" \ --volume "$PWD":/code \ --volume /var/run/docker.sock:/var/run/docker.sock \ diff --git a/yarn.lock b/yarn.lock index e0d46609f7d..bbc4672f84b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -54,9 +54,9 @@ lodash "^4.2.0" to-fast-properties "^2.0.0" -"@gitlab-org/gitlab-svgs@^1.22.0": - version "1.22.0" - resolved "https://registry.yarnpkg.com/@gitlab-org/gitlab-svgs/-/gitlab-svgs-1.22.0.tgz#9f2daefebcda911cba8341313c8c464c8087fe44" +"@gitlab-org/gitlab-svgs@^1.23.0": + version "1.23.0" + resolved "https://registry.yarnpkg.com/@gitlab-org/gitlab-svgs/-/gitlab-svgs-1.23.0.tgz#42047aeedcc06bc12d417ed1efadad1749af9670" "@mrmlnc/readdir-enhanced@^2.2.1": version "2.2.1" |