diff options
52 files changed, 618 insertions, 1985 deletions
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION index 9eb2aa3f109..be386c9ede3 100644 --- a/GITALY_SERVER_VERSION +++ b/GITALY_SERVER_VERSION @@ -1 +1 @@ -0.32.0 +0.33.0 diff --git a/app/assets/stylesheets/framework/dropdowns.scss b/app/assets/stylesheets/framework/dropdowns.scss index 5e768df972f..5f270e288ae 100644 --- a/app/assets/stylesheets/framework/dropdowns.scss +++ b/app/assets/stylesheets/framework/dropdowns.scss @@ -761,7 +761,7 @@ &:hover, &:active, &:focus { - background-color: $gray-darker; + background-color: $dropdown-item-hover-bg; color: $gl-text-color; } diff --git a/app/assets/stylesheets/framework/selects.scss b/app/assets/stylesheets/framework/selects.scss index 40e654f4838..f7a0b355bf1 100644 --- a/app/assets/stylesheets/framework/selects.scss +++ b/app/assets/stylesheets/framework/selects.scss @@ -264,3 +264,41 @@ .ajax-users-dropdown { min-width: 250px !important; } + +// TODO: change global style +.ajax-project-dropdown { + &.select2-drop { + color: $gl-text-color; + } + + .select2-results { + .select2-no-results, + .select2-searching, + .select2-ajax-error, + .select2-selection-limit { + background: transparent; + } + + .select2-result { + padding: 0 1px; + + .select2-match { + font-weight: bold; + text-decoration: none; + } + + .select2-result-label { + padding: #{$gl-padding / 2} $gl-padding; + } + + &.select2-highlighted { + background-color: transparent !important; + color: $gl-text-color; + + .select2-result-label { + background-color: $dropdown-item-hover-bg; + } + } + } + } +} diff --git a/app/assets/stylesheets/framework/variables.scss b/app/assets/stylesheets/framework/variables.scss index 3c109a5a929..225d116e9c7 100644 --- a/app/assets/stylesheets/framework/variables.scss +++ b/app/assets/stylesheets/framework/variables.scss @@ -294,7 +294,7 @@ $dropdown-input-focus-shadow: rgba($dropdown-input-focus-border, .4); $dropdown-loading-bg: rgba(#fff, .6); $dropdown-chevron-size: 10px; $dropdown-toggle-active-border-color: darken($border-color, 14%); - +$dropdown-item-hover-bg: $gray-darker; /* * Filtered Search diff --git a/app/models/ci/pipeline_schedule.rb b/app/models/ci/pipeline_schedule.rb index 085eeeae157..e7e02587759 100644 --- a/app/models/ci/pipeline_schedule.rb +++ b/app/models/ci/pipeline_schedule.rb @@ -9,7 +9,7 @@ module Ci belongs_to :owner, class_name: 'User' has_one :last_pipeline, -> { order(id: :desc) }, class_name: 'Ci::Pipeline' has_many :pipelines - has_many :variables, class_name: 'Ci::PipelineScheduleVariable' + has_many :variables, class_name: 'Ci::PipelineScheduleVariable', validate: false validates :cron, unless: :importing?, cron: true, presence: { unless: :importing? } validates :cron_timezone, cron_timezone: true, presence: { unless: :importing? } diff --git a/app/models/ci/pipeline_schedule_variable.rb b/app/models/ci/pipeline_schedule_variable.rb index 1ff177616e8..ee5b8733fac 100644 --- a/app/models/ci/pipeline_schedule_variable.rb +++ b/app/models/ci/pipeline_schedule_variable.rb @@ -4,5 +4,7 @@ module Ci include HasVariable belongs_to :pipeline_schedule + + validates :key, uniqueness: { scope: :pipeline_schedule_id } end end diff --git a/app/models/ci/stage.rb b/app/models/ci/stage.rb index 59570924c8d..4ee972fa68d 100644 --- a/app/models/ci/stage.rb +++ b/app/models/ci/stage.rb @@ -1,11 +1,66 @@ module Ci class Stage < ActiveRecord::Base extend Ci::Model + include Importable + include HasStatus + include Gitlab::OptimisticLocking + + enum status: HasStatus::STATUSES_ENUM belongs_to :project belongs_to :pipeline - has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id - has_many :builds, foreign_key: :commit_id + has_many :statuses, class_name: 'CommitStatus', foreign_key: :stage_id + has_many :builds, foreign_key: :stage_id + + validates :project, presence: true, unless: :importing? + validates :pipeline, presence: true, unless: :importing? + validates :name, presence: true, unless: :importing? + + state_machine :status, initial: :created do + event :enqueue do + transition created: :pending + transition [:success, :failed, :canceled, :skipped] => :running + end + + event :run do + transition any - [:running] => :running + end + + event :skip do + transition any - [:skipped] => :skipped + end + + event :drop do + transition any - [:failed] => :failed + end + + event :succeed do + transition any - [:success] => :success + end + + event :cancel do + transition any - [:canceled] => :canceled + end + + event :block do + transition any - [:manual] => :manual + end + end + + def update_status + retry_optimistic_lock(self) do + case statuses.latest.status + when 'pending' then enqueue + when 'running' then run + when 'success' then succeed + when 'failed' then drop + when 'canceled' then cancel + when 'manual' then block + when 'skipped' then skip + else skip + end + end + end end end diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb index 07cec63b939..842c6e5cb50 100644 --- a/app/models/commit_status.rb +++ b/app/models/commit_status.rb @@ -39,14 +39,14 @@ class CommitStatus < ActiveRecord::Base scope :after_stage, -> (index) { where('stage_idx > ?', index) } state_machine :status do - event :enqueue do - transition [:created, :skipped, :manual] => :pending - end - event :process do transition [:skipped, :manual] => :created end + event :enqueue do + transition [:created, :skipped, :manual] => :pending + end + event :run do transition pending: :running end @@ -91,6 +91,7 @@ class CommitStatus < ActiveRecord::Base end end + StageUpdateWorker.perform_async(commit_status.stage_id) ExpireJobCacheWorker.perform_async(commit_status.id) end end diff --git a/app/models/concerns/has_status.rb b/app/models/concerns/has_status.rb index 32af5566135..3803e18a96e 100644 --- a/app/models/concerns/has_status.rb +++ b/app/models/concerns/has_status.rb @@ -8,6 +8,8 @@ module HasStatus ACTIVE_STATUSES = %w[pending running].freeze COMPLETED_STATUSES = %w[success failed canceled skipped].freeze ORDERED_STATUSES = %w[failed pending running manual canceled success skipped created].freeze + STATUSES_ENUM = { created: 0, pending: 1, running: 2, success: 3, + failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze class_methods do def status_sql diff --git a/app/models/event.rb b/app/models/event.rb index f2a560a6b56..15ee170ca75 100644 --- a/app/models/event.rb +++ b/app/models/event.rb @@ -83,6 +83,10 @@ class Event < ActiveRecord::Base self.inheritance_column = 'action' class << self + def model_name + ActiveModel::Name.new(self, nil, 'event') + end + def find_sti_class(action) if action.to_i == PUSHED PushEvent @@ -438,6 +442,12 @@ class Event < ActiveRecord::Base EventForMigration.create!(new_attributes) end + def to_partial_path + # We are intentionally using `Event` rather than `self.class` so that + # subclasses also use the `Event` implementation. + Event._to_partial_path + end + private def recent_update? diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb index 884b681ff81..d0ba9f89460 100644 --- a/app/services/ci/create_pipeline_service.rb +++ b/app/services/ci/create_pipeline_service.rb @@ -176,9 +176,14 @@ module Ci end def error(message, save: false) - pipeline.errors.add(:base, message) - pipeline.drop if save - pipeline + pipeline.tap do + pipeline.errors.add(:base, message) + + if save + pipeline.drop + update_merge_requests_head_pipeline + end + end end def pipeline_created_counter diff --git a/app/services/merge_requests/create_from_issue_service.rb b/app/services/merge_requests/create_from_issue_service.rb index e9e4513e0d0..da39a380451 100644 --- a/app/services/merge_requests/create_from_issue_service.rb +++ b/app/services/merge_requests/create_from_issue_service.rb @@ -4,7 +4,7 @@ module MergeRequests return error('Invalid issue iid') unless issue_iid.present? && issue.present? params[:label_ids] = issue.label_ids if issue.label_ids.any? - + result = CreateBranchService.new(project, current_user).execute(branch_name, ref) return result if result[:status] == :error diff --git a/app/workers/stage_update_worker.rb b/app/workers/stage_update_worker.rb new file mode 100644 index 00000000000..eef0b11e70b --- /dev/null +++ b/app/workers/stage_update_worker.rb @@ -0,0 +1,10 @@ +class StageUpdateWorker + include Sidekiq::Worker + include PipelineQueue + + def perform(stage_id) + Ci::Stage.find_by(id: stage_id).try do |stage| + stage.update_status + end + end +end diff --git a/changelogs/unreleased/fix-gb-fix-head-pipeline-when-pipeline-has-errors.yml b/changelogs/unreleased/fix-gb-fix-head-pipeline-when-pipeline-has-errors.yml new file mode 100644 index 00000000000..ede8031a501 --- /dev/null +++ b/changelogs/unreleased/fix-gb-fix-head-pipeline-when-pipeline-has-errors.yml @@ -0,0 +1,5 @@ +--- +title: Fix merge request pipeline status when pipeline has errors +merge_request: 13664 +author: +type: fixed diff --git a/changelogs/unreleased/zj-remove-ci-api-v1.yml b/changelogs/unreleased/zj-remove-ci-api-v1.yml new file mode 100644 index 00000000000..8f2dc321b36 --- /dev/null +++ b/changelogs/unreleased/zj-remove-ci-api-v1.yml @@ -0,0 +1,5 @@ +--- +title: Remove CI API v1 +merge_request: +author: +type: removed diff --git a/config/routes/ci.rb b/config/routes/ci.rb index 8d23aa8fbf6..cbd4c2db852 100644 --- a/config/routes/ci.rb +++ b/config/routes/ci.rb @@ -1,8 +1,4 @@ namespace :ci do - # CI API - Ci::API::API.logger Rails.logger - mount Ci::API::API => '/api' - resource :lint, only: [:show, :create] root to: redirect('/') diff --git a/db/migrate/20170711145320_add_status_to_ci_stages.rb b/db/migrate/20170711145320_add_status_to_ci_stages.rb new file mode 100644 index 00000000000..d497a61a959 --- /dev/null +++ b/db/migrate/20170711145320_add_status_to_ci_stages.rb @@ -0,0 +1,9 @@ +class AddStatusToCiStages < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + def change + add_column :ci_stages, :status, :integer + end +end diff --git a/db/migrate/20170720111708_add_lock_version_to_ci_stages.rb b/db/migrate/20170720111708_add_lock_version_to_ci_stages.rb new file mode 100644 index 00000000000..e1c4f033286 --- /dev/null +++ b/db/migrate/20170720111708_add_lock_version_to_ci_stages.rb @@ -0,0 +1,9 @@ +class AddLockVersionToCiStages < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + def change + add_column :ci_stages, :lock_version, :integer + end +end diff --git a/db/post_migrate/20170711145558_migrate_stages_statuses.rb b/db/post_migrate/20170711145558_migrate_stages_statuses.rb new file mode 100644 index 00000000000..5a24fb1307f --- /dev/null +++ b/db/post_migrate/20170711145558_migrate_stages_statuses.rb @@ -0,0 +1,33 @@ +class MigrateStagesStatuses < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + disable_ddl_transaction! + + BATCH_SIZE = 10000 + RANGE_SIZE = 1000 + MIGRATION = 'MigrateStageStatus'.freeze + + class Stage < ActiveRecord::Base + self.table_name = 'ci_stages' + include ::EachBatch + end + + def up + Stage.where(status: nil).each_batch(of: BATCH_SIZE) do |relation, index| + relation.each_batch(of: RANGE_SIZE) do |batch| + range = relation.pluck('MIN(id)', 'MAX(id)').first + schedule = index * 5.minutes + + BackgroundMigrationWorker.perform_in(schedule, MIGRATION, range) + end + end + end + + def down + disable_statement_timeout + + update_column_in_batches(:ci_stages, :status, nil) + end +end diff --git a/db/schema.rb b/db/schema.rb index 80f8cde1818..c31bff3a8f2 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -379,6 +379,8 @@ ActiveRecord::Schema.define(version: 20170820100558) do t.datetime "created_at" t.datetime "updated_at" t.string "name" + t.integer "status" + t.integer "lock_version" end add_index "ci_stages", ["pipeline_id", "name"], name: "index_ci_stages_on_pipeline_id_and_name", using: :btree diff --git a/doc/api/README.md b/doc/api/README.md index 8acb2145f1a..266b5f018d9 100644 --- a/doc/api/README.md +++ b/doc/api/README.md @@ -55,15 +55,10 @@ following locations: - [Tags](tags.md) - [Todos](todos.md) - [Users](users.md) -- [Validate CI configuration](ci/lint.md) +- [Validate CI configuration](lint.md) - [V3 to V4](v3_to_v4.md) - [Version](version.md) -The following documentation is for the [internal CI API](ci/README.md): - -- [Builds](ci/builds.md) -- [Runners](ci/runners.md) - ## Road to GraphQL Going forward, we will start on moving to diff --git a/doc/api/ci/README.md b/doc/api/ci/README.md deleted file mode 100644 index 96a281e27c8..00000000000 --- a/doc/api/ci/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# GitLab CI API - -## Purpose - -The main purpose of GitLab CI API is to provide the necessary data and context -for GitLab CI Runners. - -All relevant information about the consumer API can be found in a -[separate document](../../api/README.md). - -## API Prefix - -The current CI API prefix is `/ci/api/v1`. - -You need to prepend this prefix to all examples in this documentation, like: - -```bash -GET /ci/api/v1/builds/:id/artifacts -``` - -## Resources - -- [Builds](builds.md) -- [Runners](runners.md) diff --git a/doc/api/ci/builds.md b/doc/api/ci/builds.md deleted file mode 100644 index c8374d94716..00000000000 --- a/doc/api/ci/builds.md +++ /dev/null @@ -1,147 +0,0 @@ -# Builds API - -API used by runners to receive and update builds. - ->**Note:** -This API is intended to be used only by Runners as their own -communication channel. For the consumer API see the -[Jobs API](../jobs.md). - -## Authentication - -This API uses two types of authentication: - -1. Unique Runner's token which is the token assigned to the Runner after it - has been registered. - -2. Using the build authorization token. - This is project's CI token that can be found under the **Builds** section of - a project's settings. The build authorization token can be passed as a - parameter or a value of `BUILD-TOKEN` header. - -These two methods of authentication are interchangeable. - -## Builds - -### Runs oldest pending build by runner - -``` -POST /ci/api/v1/builds/register -``` - -| Attribute | Type | Required | Description | -|-----------|---------|----------|---------------------| -| `token` | string | yes | Unique runner token | - - -``` -curl --request POST "https://gitlab.example.com/ci/api/v1/builds/register" --form "token=t0k3n" -``` - -**Responses:** - -| Status | Data |Description | -|--------|------|---------------------------------------------------------------------------| -| `201` | yes | When a build is scheduled for a runner | -| `204` | no | When no builds are scheduled for a runner (for GitLab Runner >= `v1.3.0`) | -| `403` | no | When invalid token is used or no token is sent | -| `404` | no | When no builds are scheduled for a runner (for GitLab Runner < `v1.3.0`) **or** when the runner is set to `paused` in GitLab runner's configuration page | - -### Update details of an existing build - -``` -PUT /ci/api/v1/builds/:id -``` - -| Attribute | Type | Required | Description | -|-----------|---------|----------|----------------------| -| `id` | integer | yes | The ID of a project | -| `token` | string | yes | Unique runner token | -| `state` | string | no | The state of a build | -| `trace` | string | no | The trace of a build | - -``` -curl --request PUT "https://gitlab.example.com/ci/api/v1/builds/1234" --form "token=t0k3n" --form "state=running" --form "trace=Running git clone...\n" -``` - -### Incremental build trace update - -Using this method you need to send trace content as a request body. You also need to provide the `Content-Range` header -with a range of sent trace part. Note that you need to send parts in the proper order, so the begining of the part -must start just after the end of the previous part. If you provide the wrong part, then GitLab CI API will return `416 -Range Not Satisfiable` response with a header `Range: 0-X`, where `X` is the current trace length. - -For example, if you receive `Range: 0-11` in the response, then your next part must contain a `Content-Range: 11-...` -header and a trace part covered by this range. - -For a valid update API will return `202` response with: -* `Build-Status: {status}` header containing current status of the build, -* `Range: 0-{length}` header with the current trace length. - -``` -PATCH /ci/api/v1/builds/:id/trace.txt -``` - -Parameters: - -| Attribute | Type | Required | Description | -|-----------|---------|----------|----------------------| -| `id` | integer | yes | The ID of a build | - -Headers: - -| Attribute | Type | Required | Description | -|-----------------|---------|----------|-----------------------------------| -| `BUILD-TOKEN` | string | yes | The build authorization token | -| `Content-Range` | string | yes | Bytes range of trace that is sent | - -``` -curl --request PATCH "https://gitlab.example.com/ci/api/v1/builds/1234/trace.txt" --header "BUILD-TOKEN=build_t0k3n" --header "Content-Range=0-21" --data "Running git clone...\n" -``` - - -### Upload artifacts to build - -``` -POST /ci/api/v1/builds/:id/artifacts -``` - -| Attribute | Type | Required | Description | -|-----------|---------|----------|-------------------------------| -| `id` | integer | yes | The ID of a build | -| `token` | string | yes | The build authorization token | -| `file` | mixed | yes | Artifacts file | - -``` -curl --request POST "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" --form "token=build_t0k3n" --form "file=@/path/to/file" -``` - -### Download the artifacts file from build - -``` -GET /ci/api/v1/builds/:id/artifacts -``` - -| Attribute | Type | Required | Description | -|-----------|---------|----------|-------------------------------| -| `id` | integer | yes | The ID of a build | -| `token` | string | yes | The build authorization token | - -``` -curl "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" --form "token=build_t0k3n" -``` - -### Remove the artifacts file from build - -``` -DELETE /ci/api/v1/builds/:id/artifacts -``` - -| Attribute | Type | Required | Description | -|-----------|---------|----------|-------------------------------| -| ` id` | integer | yes | The ID of a build | -| `token` | string | yes | The build authorization token | - -``` -curl --request DELETE "https://gitlab.example.com/ci/api/v1/builds/1234/artifacts" --form "token=build_t0k3n" -``` diff --git a/doc/api/ci/runners.md b/doc/api/ci/runners.md deleted file mode 100644 index 342c039dad8..00000000000 --- a/doc/api/ci/runners.md +++ /dev/null @@ -1,59 +0,0 @@ -# Register and Delete Runners API - -API used by Runners to register and delete themselves. - ->**Note:** -This API is intended to be used only by Runners as their own -communication channel. For the consumer API see the -[new Runners API](../runners.md). - -## Authentication - -This API uses two types of authentication: - -1. Unique Runner's token, which is the token assigned to the Runner after it - has been registered. This token can be found on the Runner's edit page (go to - **Project > Runners**, select one of the Runners listed under **Runners activated for - this project**). - -2. Using Runners' registration token. - This is a token that can be found in project's settings. - It can also be found in the **Admin > Runners** settings area. - There are two types of tokens you can pass: shared Runner registration - token or project specific registration token. - -## Register a new runner - -Used to make GitLab CI aware of available runners. - -```sh -POST /ci/api/v1/runners/register -``` - -| Attribute | Type | Required | Description | -| --------- | ------- | --------- | ----------- | -| `token` | string | yes | Runner's registration token | - -Example request: - -```sh -curl --request POST "https://gitlab.example.com/ci/api/v1/runners/register" --form "token=t0k3n" -``` - -## Delete a Runner - -Used to remove a Runner. - -```sh -DELETE /ci/api/v1/runners/delete -``` - -| Attribute | Type | Required | Description | -| --------- | ------- | --------- | ----------- | -| `token` | string | yes | Unique Runner's token | - -Example request: - -```sh -curl --request DELETE "https://gitlab.example.com/ci/api/v1/runners/delete" --form "token=t0k3n" -``` diff --git a/doc/api/ci/lint.md b/doc/api/lint.md index e4a6dc809b1..bd5a216a99d 100644 --- a/doc/api/ci/lint.md +++ b/doc/api/lint.md @@ -5,7 +5,7 @@ Checks if your .gitlab-ci.yml file is valid. ``` -POST ci/lint +POST /lint ``` | Attribute | Type | Required | Description | @@ -49,3 +49,4 @@ Example responses: ``` [ce-5953]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/5953 + diff --git a/doc/ci/api/README.md b/doc/ci/api/README.md deleted file mode 100644 index 98f37935427..00000000000 --- a/doc/ci/api/README.md +++ /dev/null @@ -1 +0,0 @@ -This document was moved to a [new location](../../api/ci/README.md). diff --git a/doc/ci/api/builds.md b/doc/ci/api/builds.md deleted file mode 100644 index 0563a367609..00000000000 --- a/doc/ci/api/builds.md +++ /dev/null @@ -1 +0,0 @@ -This document was moved to a [new location](../../api/ci/builds.md). diff --git a/doc/ci/api/runners.md b/doc/ci/api/runners.md deleted file mode 100644 index 1027363851c..00000000000 --- a/doc/ci/api/runners.md +++ /dev/null @@ -1 +0,0 @@ -This document was moved to a [new location](../../api/ci/runners.md). diff --git a/doc/topics/authentication/index.md b/doc/topics/authentication/index.md index 0c0d482499a..fac91935a45 100644 --- a/doc/topics/authentication/index.md +++ b/doc/topics/authentication/index.md @@ -37,7 +37,6 @@ This page gathers all the resources for the topic **Authentication** within GitL - [Private Tokens](../../api/README.md#private-tokens) - [Impersonation tokens](../../api/README.md#impersonation-tokens) - [GitLab as an OAuth2 provider](../../api/oauth2.md#gitlab-as-an-oauth2-provider) -- [GitLab Runner API - Authentication](../../api/ci/runners.md#authentication) ## Third-party resources diff --git a/lib/ci/api/api.rb b/lib/ci/api/api.rb deleted file mode 100644 index 24bb3649a76..00000000000 --- a/lib/ci/api/api.rb +++ /dev/null @@ -1,39 +0,0 @@ -module Ci - module API - class API < Grape::API - include ::API::APIGuard - version 'v1', using: :path - - rescue_from ActiveRecord::RecordNotFound do - rack_response({ 'message' => '404 Not found' }.to_json, 404) - end - - # Retain 405 error rather than a 500 error for Grape 0.15.0+. - # https://github.com/ruby-grape/grape/blob/a3a28f5b5dfbb2797442e006dbffd750b27f2a76/UPGRADING.md#changes-to-method-not-allowed-routes - rescue_from Grape::Exceptions::MethodNotAllowed do |e| - error! e.message, e.status, e.headers - end - - rescue_from Grape::Exceptions::Base do |e| - error! e.message, e.status, e.headers - end - - rescue_from :all do |exception| - handle_api_exception(exception) - end - - content_type :txt, 'text/plain' - content_type :json, 'application/json' - format :json - - helpers ::SentryHelper - helpers ::Ci::API::Helpers - helpers ::API::Helpers - helpers Gitlab::CurrentSettings - - mount ::Ci::API::Builds - mount ::Ci::API::Runners - mount ::Ci::API::Triggers - end - end -end diff --git a/lib/ci/api/builds.rb b/lib/ci/api/builds.rb deleted file mode 100644 index 79058c02ce5..00000000000 --- a/lib/ci/api/builds.rb +++ /dev/null @@ -1,219 +0,0 @@ -module Ci - module API - # Builds API - class Builds < Grape::API - resource :builds do - # Runs oldest pending build by runner - Runners only - # - # Parameters: - # token (required) - The uniq token of runner - # - # Example Request: - # POST /builds/register - post "register" do - authenticate_runner! - required_attributes! [:token] - not_found! unless current_runner.active? - update_runner_info - - if current_runner.is_runner_queue_value_latest?(params[:last_update]) - header 'X-GitLab-Last-Update', params[:last_update] - Gitlab::Metrics.add_event(:build_not_found_cached) - return build_not_found! - end - - new_update = current_runner.ensure_runner_queue_value - - result = Ci::RegisterJobService.new(current_runner).execute - - if result.valid? - if result.build - Gitlab::Metrics.add_event(:build_found, - project: result.build.project.full_path) - - present result.build, with: Entities::BuildDetails - else - Gitlab::Metrics.add_event(:build_not_found) - - header 'X-GitLab-Last-Update', new_update - - build_not_found! - end - else - # We received build that is invalid due to concurrency conflict - Gitlab::Metrics.add_event(:build_invalid) - conflict! - end - end - - # Update an existing build - Runners only - # - # Parameters: - # id (required) - The ID of a project - # state (optional) - The state of a build - # trace (optional) - The trace of a build - # Example Request: - # PUT /builds/:id - put ":id" do - authenticate_runner! - build = Ci::Build.where(runner_id: current_runner.id).running.find(params[:id]) - validate_build!(build) - - update_runner_info - - build.trace.set(params[:trace]) if params[:trace] - - Gitlab::Metrics.add_event(:update_build, - project: build.project.full_path) - - case params[:state].to_s - when 'success' - build.success - when 'failed' - build.drop - end - end - - # Send incremental log update - Runners only - # - # Parameters: - # id (required) - The ID of a build - # Body: - # content of logs to append - # Headers: - # Content-Range (required) - range of content that was sent - # BUILD-TOKEN (required) - The build authorization token - # Example Request: - # PATCH /builds/:id/trace.txt - patch ":id/trace.txt" do - build = authenticate_build! - - error!('400 Missing header Content-Range', 400) unless request.headers.key?('Content-Range') - content_range = request.headers['Content-Range'] - content_range = content_range.split('-') - - stream_size = build.trace.append(request.body.read, content_range[0].to_i) - if stream_size < 0 - return error!('416 Range Not Satisfiable', 416, { 'Range' => "0-#{-stream_size}" }) - end - - status 202 - header 'Build-Status', build.status - header 'Range', "0-#{stream_size}" - end - - # Authorize artifacts uploading for build - Runners only - # - # Parameters: - # id (required) - The ID of a build - # token (required) - The build authorization token - # filesize (optional) - the size of uploaded file - # Example Request: - # POST /builds/:id/artifacts/authorize - post ":id/artifacts/authorize" do - require_gitlab_workhorse! - Gitlab::Workhorse.verify_api_request!(headers) - not_allowed! unless Gitlab.config.artifacts.enabled - build = authenticate_build! - forbidden!('build is not running') unless build.running? - - if params[:filesize] - file_size = params[:filesize].to_i - file_to_large! unless file_size < max_artifacts_size - end - - status 200 - content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE - Gitlab::Workhorse.artifact_upload_ok - end - - # Upload artifacts to build - Runners only - # - # Parameters: - # id (required) - The ID of a build - # token (required) - The build authorization token - # file (required) - Artifacts file - # expire_in (optional) - Specify when artifacts should expire (ex. 7d) - # Parameters (accelerated by GitLab Workhorse): - # file.path - path to locally stored body (generated by Workhorse) - # file.name - real filename as send in Content-Disposition - # file.type - real content type as send in Content-Type - # metadata.path - path to locally stored body (generated by Workhorse) - # metadata.name - filename (generated by Workhorse) - # Headers: - # BUILD-TOKEN (required) - The build authorization token, the same as token - # Body: - # The file content - # - # Example Request: - # POST /builds/:id/artifacts - post ":id/artifacts" do - require_gitlab_workhorse! - not_allowed! unless Gitlab.config.artifacts.enabled - build = authenticate_build! - forbidden!('Build is not running!') unless build.running? - - artifacts_upload_path = ArtifactUploader.artifacts_upload_path - artifacts = uploaded_file(:file, artifacts_upload_path) - metadata = uploaded_file(:metadata, artifacts_upload_path) - - bad_request!('Missing artifacts file!') unless artifacts - file_to_large! unless artifacts.size < max_artifacts_size - - build.artifacts_file = artifacts - build.artifacts_metadata = metadata - build.artifacts_expire_in = - params['expire_in'] || - Gitlab::CurrentSettings.current_application_settings - .default_artifacts_expire_in - - if build.save - present(build, with: Entities::BuildDetails) - else - render_validation_error!(build) - end - end - - # Download the artifacts file from build - Runners only - # - # Parameters: - # id (required) - The ID of a build - # token (required) - The build authorization token - # Headers: - # BUILD-TOKEN (required) - The build authorization token, the same as token - # Example Request: - # GET /builds/:id/artifacts - get ":id/artifacts" do - build = authenticate_build! - artifacts_file = build.artifacts_file - - unless artifacts_file.exists? - not_found! - end - - unless artifacts_file.file_storage? - return redirect_to build.artifacts_file.url - end - - present_file!(artifacts_file.path, artifacts_file.filename) - end - - # Remove the artifacts file from build - Runners only - # - # Parameters: - # id (required) - The ID of a build - # token (required) - The build authorization token - # Headers: - # BUILD-TOKEN (required) - The build authorization token, the same as token - # Example Request: - # DELETE /builds/:id/artifacts - delete ":id/artifacts" do - build = authenticate_build! - - status(200) - build.erase_artifacts! - end - end - end - end -end diff --git a/lib/ci/api/entities.rb b/lib/ci/api/entities.rb deleted file mode 100644 index 31f66dd5a58..00000000000 --- a/lib/ci/api/entities.rb +++ /dev/null @@ -1,93 +0,0 @@ -module Ci - module API - module Entities - class Commit < Grape::Entity - expose :id, :sha, :project_id, :created_at - expose :status, :finished_at, :duration - expose :git_commit_message, :git_author_name, :git_author_email - end - - class CommitWithBuilds < Commit - expose :builds - end - - class ArtifactFile < Grape::Entity - expose :filename, :size - end - - class BuildOptions < Grape::Entity - expose :image - expose :services - expose :artifacts - expose :cache - expose :dependencies - expose :after_script - end - - class Build < Grape::Entity - expose :id, :ref, :tag, :sha, :status - expose :name, :token, :stage - expose :project_id - expose :project_name - expose :artifacts_file, using: ArtifactFile, if: ->(build, _) { build.artifacts? } - end - - class BuildCredentials < Grape::Entity - expose :type, :url, :username, :password - end - - class BuildDetails < Build - expose :commands - expose :repo_url - expose :before_sha - expose :allow_git_fetch - expose :token - expose :artifacts_expire_at, if: ->(build, _) { build.artifacts? } - - expose :options do |model| - # This part ensures that output of old API is still the same after adding support - # for extended docker configuration options, used by new API - # - # I'm leaving this here, not in the model, because it should be removed at the same time - # when old API will be removed (planned for August 2017). - model.options.dup.tap do |options| - options[:image] = options[:image][:name] if options[:image].is_a?(Hash) - options[:services]&.map! do |service| - if service.is_a?(Hash) - service[:name] - else - service - end - end - end - end - - expose :timeout do |model| - model.timeout - end - - expose :variables - expose :depends_on_builds, using: Build - - expose :credentials, using: BuildCredentials - end - - class Runner < Grape::Entity - expose :id, :token - end - - class RunnerProject < Grape::Entity - expose :id, :project_id, :runner_id - end - - class WebHook < Grape::Entity - expose :id, :project_id, :url - end - - class TriggerRequest < Grape::Entity - expose :id, :variables - expose :pipeline, using: Commit, as: :commit - end - end - end -end diff --git a/lib/ci/api/helpers.rb b/lib/ci/api/helpers.rb deleted file mode 100644 index a40b6ab6c9f..00000000000 --- a/lib/ci/api/helpers.rb +++ /dev/null @@ -1,89 +0,0 @@ -module Ci - module API - module Helpers - BUILD_TOKEN_HEADER = "HTTP_BUILD_TOKEN".freeze - BUILD_TOKEN_PARAM = :token - UPDATE_RUNNER_EVERY = 10 * 60 - - def authenticate_runners! - forbidden! unless runner_registration_token_valid? - end - - def authenticate_runner! - forbidden! unless current_runner - end - - def authenticate_build! - build = Ci::Build.find_by_id(params[:id]) - - validate_build!(build) do - forbidden! unless build_token_valid?(build) - end - - build - end - - def validate_build!(build) - not_found! unless build - - yield if block_given? - - project = build.project - forbidden!('Project has been deleted!') if project.nil? || project.pending_delete? - forbidden!('Build has been erased!') if build.erased? - end - - def runner_registration_token_valid? - ActiveSupport::SecurityUtils.variable_size_secure_compare( - params[:token], - current_application_settings.runners_registration_token) - end - - def build_token_valid?(build) - token = (params[BUILD_TOKEN_PARAM] || env[BUILD_TOKEN_HEADER]).to_s - - # We require to also check `runners_token` to maintain compatibility with old version of runners - token && (build.valid_token?(token) || build.project.valid_runners_token?(token)) - end - - def update_runner_info - return unless update_runner? - - current_runner.contacted_at = Time.now - current_runner.assign_attributes(get_runner_version_from_params) - current_runner.save if current_runner.changed? - end - - def update_runner? - # Use a random threshold to prevent beating DB updates. - # It generates a distribution between [40m, 80m]. - # - contacted_at_max_age = UPDATE_RUNNER_EVERY + Random.rand(UPDATE_RUNNER_EVERY) - - current_runner.contacted_at.nil? || - (Time.now - current_runner.contacted_at) >= contacted_at_max_age - end - - def build_not_found! - if headers['User-Agent'].to_s =~ /gitlab-ci-multi-runner \d+\.\d+\.\d+(~beta\.\d+\.g[0-9a-f]+)? / - no_content! - else - not_found! - end - end - - def current_runner - @runner ||= Runner.find_by_token(params[:token].to_s) - end - - def get_runner_version_from_params - return unless params["info"].present? - attributes_for_keys(%w(name version revision platform architecture), params["info"]) - end - - def max_artifacts_size - current_application_settings.max_artifacts_size.megabytes.to_i - end - end - end -end diff --git a/lib/ci/api/runners.rb b/lib/ci/api/runners.rb deleted file mode 100644 index 45aa2adccf5..00000000000 --- a/lib/ci/api/runners.rb +++ /dev/null @@ -1,50 +0,0 @@ -module Ci - module API - class Runners < Grape::API - resource :runners do - desc 'Delete a runner' - params do - requires :token, type: String, desc: 'The unique token of the runner' - end - delete "delete" do - authenticate_runner! - - status(200) - Ci::Runner.find_by_token(params[:token]).destroy - end - - desc 'Register a new runner' do - success Entities::Runner - end - params do - requires :token, type: String, desc: 'The unique token of the runner' - optional :description, type: String, desc: 'The description of the runner' - optional :tag_list, type: Array[String], desc: 'A list of tags the runner should run for' - optional :run_untagged, type: Boolean, desc: 'Flag if the runner should execute untagged jobs' - optional :locked, type: Boolean, desc: 'Lock this runner for this specific project' - end - post "register" do - runner_params = declared(params, include_missing: false).except(:token) - - runner = - if runner_registration_token_valid? - # Create shared runner. Requires admin access - Ci::Runner.create(runner_params.merge(is_shared: true)) - elsif project = Project.find_by(runners_token: params[:token]) - # Create a specific runner for project. - project.runners.create(runner_params) - end - - return forbidden! unless runner - - if runner.id - runner.update(get_runner_version_from_params) - present runner, with: Entities::Runner - else - not_found! - end - end - end - end - end -end diff --git a/lib/ci/api/triggers.rb b/lib/ci/api/triggers.rb deleted file mode 100644 index 6225203f223..00000000000 --- a/lib/ci/api/triggers.rb +++ /dev/null @@ -1,39 +0,0 @@ -module Ci - module API - class Triggers < Grape::API - resource :projects do - desc 'Trigger a GitLab CI project build' do - success Entities::TriggerRequest - end - params do - requires :id, type: Integer, desc: 'The ID of a CI project' - requires :ref, type: String, desc: "The name of project's branch or tag" - requires :token, type: String, desc: 'The unique token of the trigger' - optional :variables, type: Hash, desc: 'Optional build variables' - end - post ":id/refs/:ref/trigger" do - project = Project.find_by(ci_id: params[:id]) - trigger = Ci::Trigger.find_by_token(params[:token]) - not_found! unless project && trigger - unauthorized! unless trigger.project == project - - # Validate variables - variables = params[:variables].to_h - unless variables.all? { |key, value| key.is_a?(String) && value.is_a?(String) } - render_api_error!('variables needs to be a map of key-valued strings', 400) - end - - # create request and trigger builds - result = Ci::CreateTriggerRequestService.execute(project, trigger, params[:ref], variables) - pipeline = result.pipeline - - if pipeline.persisted? - present result.trigger_request, with: Entities::TriggerRequest - else - render_validation_error!(pipeline) - end - end - end - end - end -end diff --git a/lib/gitlab/background_migration/migrate_stage_status.rb b/lib/gitlab/background_migration/migrate_stage_status.rb new file mode 100644 index 00000000000..b1ff0900709 --- /dev/null +++ b/lib/gitlab/background_migration/migrate_stage_status.rb @@ -0,0 +1,77 @@ +module Gitlab + module BackgroundMigration + class MigrateStageStatus + STATUSES = { created: 0, pending: 1, running: 2, success: 3, + failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze + + class Build < ActiveRecord::Base + self.table_name = 'ci_builds' + + scope :latest, -> { where(retried: [false, nil]) } + scope :created, -> { where(status: 'created') } + scope :running, -> { where(status: 'running') } + scope :pending, -> { where(status: 'pending') } + scope :success, -> { where(status: 'success') } + scope :failed, -> { where(status: 'failed') } + scope :canceled, -> { where(status: 'canceled') } + scope :skipped, -> { where(status: 'skipped') } + scope :manual, -> { where(status: 'manual') } + + scope :failed_but_allowed, -> do + where(allow_failure: true, status: [:failed, :canceled]) + end + + scope :exclude_ignored, -> do + where("allow_failure = ? OR status IN (?)", + false, %w[created pending running success skipped]) + end + + def self.status_sql + scope_relevant = latest.exclude_ignored + scope_warnings = latest.failed_but_allowed + + builds = scope_relevant.select('count(*)').to_sql + created = scope_relevant.created.select('count(*)').to_sql + success = scope_relevant.success.select('count(*)').to_sql + manual = scope_relevant.manual.select('count(*)').to_sql + pending = scope_relevant.pending.select('count(*)').to_sql + running = scope_relevant.running.select('count(*)').to_sql + skipped = scope_relevant.skipped.select('count(*)').to_sql + canceled = scope_relevant.canceled.select('count(*)').to_sql + warnings = scope_warnings.select('count(*) > 0').to_sql + + <<-SQL.strip_heredoc + (CASE + WHEN (#{builds}) = (#{skipped}) AND (#{warnings}) THEN #{STATUSES[:success]} + WHEN (#{builds}) = (#{skipped}) THEN #{STATUSES[:skipped]} + WHEN (#{builds}) = (#{success}) THEN #{STATUSES[:success]} + WHEN (#{builds}) = (#{created}) THEN #{STATUSES[:created]} + WHEN (#{builds}) = (#{success}) + (#{skipped}) THEN #{STATUSES[:success]} + WHEN (#{builds}) = (#{success}) + (#{skipped}) + (#{canceled}) THEN #{STATUSES[:canceled]} + WHEN (#{builds}) = (#{created}) + (#{skipped}) + (#{pending}) THEN #{STATUSES[:pending]} + WHEN (#{running}) + (#{pending}) > 0 THEN #{STATUSES[:running]} + WHEN (#{manual}) > 0 THEN #{STATUSES[:manual]} + WHEN (#{created}) > 0 THEN #{STATUSES[:running]} + ELSE #{STATUSES[:failed]} + END) + SQL + end + end + + def perform(start_id, stop_id) + status_sql = Build + .where('ci_builds.commit_id = ci_stages.pipeline_id') + .where('ci_builds.stage = ci_stages.name') + .status_sql + + sql = <<-SQL + UPDATE ci_stages SET status = (#{status_sql}) + WHERE ci_stages.status IS NULL + AND ci_stages.id BETWEEN #{start_id.to_i} AND #{stop_id.to_i} + SQL + + ActiveRecord::Base.connection.execute(sql) + end + end + end +end diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb index 53aa5b12489..eb3731ba35a 100644 --- a/lib/gitlab/git/repository.rb +++ b/lib/gitlab/git/repository.rb @@ -64,7 +64,6 @@ module Gitlab end delegate :empty?, - :bare?, to: :rugged delegate :exists?, to: :gitaly_repository_client @@ -126,6 +125,8 @@ module Gitlab # This is to work around a bug in libgit2 that causes in-memory refs to # be stale/invalid when packed-refs is changed. # See https://gitlab.com/gitlab-org/gitlab-ce/issues/15392#note_14538333 + # + # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/474 def find_branch(name, force_reload = false) reload_rugged if force_reload @@ -231,10 +232,6 @@ module Gitlab branch_names + tag_names end - def has_commits? - !empty? - end - # Discovers the default branch based on the repository's available branches # # - If no branches are present, returns nil @@ -574,6 +571,8 @@ module Gitlab end # Delete the specified branch from the repository + # + # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/476 def delete_branch(branch_name) rugged.branches.delete(branch_name) end @@ -583,6 +582,8 @@ module Gitlab # Examples: # create_branch("feature") # create_branch("other-feature", "master") + # + # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/476 def create_branch(ref, start_point = "HEAD") rugged_ref = rugged.branches.create(ref, start_point) target_commit = Gitlab::Git::Commit.find(self, rugged_ref.target) @@ -592,38 +593,26 @@ module Gitlab raise InvalidRef.new("Invalid reference #{start_point}") end - # Return an array of this repository's remote names - def remote_names - rugged.remotes.each_name.to_a - end - # Delete the specified remote from this repository. def remote_delete(remote_name) rugged.remotes.delete(remote_name) + nil end - # Add a new remote to this repository. Returns a Rugged::Remote object + # Add a new remote to this repository. def remote_add(remote_name, url) rugged.remotes.create(remote_name, url) + nil end # Update the specified remote using the values in the +options+ hash # # Example # repo.update_remote("origin", url: "path/to/repo") - def remote_update(remote_name, options = {}) + def remote_update(remote_name, url:) # TODO: Implement other remote options - rugged.remotes.set_url(remote_name, options[:url]) if options[:url] - end - - # Fetch the specified remote - def fetch(remote_name) - rugged.remotes[remote_name].fetch - end - - # Push +*refspecs+ to the remote identified by +remote_name+. - def push(remote_name, *refspecs) - rugged.remotes[remote_name].push(refspecs) + rugged.remotes.set_url(remote_name, url) + nil end AUTOCRLF_VALUES = { diff --git a/spec/factories/ci/stages.rb b/spec/factories/ci/stages.rb index d3c8bf9d54f..b2ded945738 100644 --- a/spec/factories/ci/stages.rb +++ b/spec/factories/ci/stages.rb @@ -15,4 +15,12 @@ FactoryGirl.define do warnings: warnings) end end + + factory :ci_stage_entity, class: Ci::Stage do + project factory: :project + pipeline factory: :ci_empty_pipeline + + name 'test' + status 'pending' + end end diff --git a/spec/features/atom/users_spec.rb b/spec/features/atom/users_spec.rb index 79069bbca8e..9ce687afb31 100644 --- a/spec/features/atom/users_spec.rb +++ b/spec/features/atom/users_spec.rb @@ -41,6 +41,8 @@ describe "User Feed" do target_project: project, description: "Here is the fix: ![an image](image.png)") end + let(:push_event) { create(:push_event, project: project, author: user) } + let!(:push_event_payload) { create(:push_event_payload, event: push_event) } before do project.team << [user, :master] @@ -70,6 +72,10 @@ describe "User Feed" do it 'has XHTML summaries in merge request descriptions' do expect(body).to match /Here is the fix: <a[^>]*><img[^>]*\/><\/a>/ end + + it 'has push event commit ID' do + expect(body).to have_content(Commit.truncate_sha(push_event.commit_id)) + end end end diff --git a/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb b/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb new file mode 100644 index 00000000000..878158910be --- /dev/null +++ b/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb @@ -0,0 +1,80 @@ +require 'spec_helper' + +describe Gitlab::BackgroundMigration::MigrateStageStatus, :migration, schema: 20170711145320 do + let(:projects) { table(:projects) } + let(:pipelines) { table(:ci_pipelines) } + let(:stages) { table(:ci_stages) } + let(:jobs) { table(:ci_builds) } + + STATUSES = { created: 0, pending: 1, running: 2, success: 3, + failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze + + before do + projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1') + pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a') + stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil) + stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'deploy', status: nil) + end + + context 'when stage status is known' do + before do + create_job(project: 1, pipeline: 1, stage: 'test', status: 'success') + create_job(project: 1, pipeline: 1, stage: 'test', status: 'running') + create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed') + end + + it 'sets a correct stage status' do + described_class.new.perform(1, 2) + + expect(stages.first.status).to eq STATUSES[:running] + expect(stages.second.status).to eq STATUSES[:failed] + end + end + + context 'when stage status is not known' do + it 'sets a skipped stage status' do + described_class.new.perform(1, 2) + + expect(stages.first.status).to eq STATUSES[:skipped] + expect(stages.second.status).to eq STATUSES[:skipped] + end + end + + context 'when stage status includes status of a retried job' do + before do + create_job(project: 1, pipeline: 1, stage: 'test', status: 'canceled') + create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed', retried: true) + create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success') + end + + it 'sets a correct stage status' do + described_class.new.perform(1, 2) + + expect(stages.first.status).to eq STATUSES[:canceled] + expect(stages.second.status).to eq STATUSES[:success] + end + end + + context 'when some job in the stage is blocked / manual' do + before do + create_job(project: 1, pipeline: 1, stage: 'test', status: 'failed') + create_job(project: 1, pipeline: 1, stage: 'test', status: 'manual') + create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success', when: 'manual') + end + + it 'sets a correct stage status' do + described_class.new.perform(1, 2) + + expect(stages.first.status).to eq STATUSES[:manual] + expect(stages.second.status).to eq STATUSES[:success] + end + end + + def create_job(project:, pipeline:, stage:, status:, **opts) + stages = { test: 1, build: 2, deploy: 3 } + + jobs.create!(project_id: project, commit_id: pipeline, + stage_idx: stages[stage.to_sym], stage: stage, + status: status, **opts) + end +end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index a3ae0a4686d..8ec8dfe6acf 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -235,18 +235,10 @@ describe Gitlab::Git::Repository, seed_helper: true do it { is_expected.to be < 2 } end - describe '#has_commits?' do - it { expect(repository.has_commits?).to be_truthy } - end - describe '#empty?' do it { expect(repository.empty?).to be_falsey } end - describe '#bare?' do - it { expect(repository.bare?).to be_truthy } - end - describe '#ref_names' do let(:ref_names) { repository.ref_names } subject { ref_names } @@ -441,15 +433,6 @@ describe Gitlab::Git::Repository, seed_helper: true do end end - describe "#remote_names" do - let(:remotes) { repository.remote_names } - - it "should have one entry: 'origin'" do - expect(remotes.size).to eq(1) - expect(remotes.first).to eq("origin") - end - end - describe "#refs_hash" do let(:refs) { repository.refs_hash } diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index ae3b0173160..a5e03e149a7 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -227,6 +227,8 @@ Ci::Pipeline: Ci::Stage: - id - name +- status +- lock_version - project_id - pipeline_id - created_at diff --git a/spec/migrations/migrate_stage_id_reference_in_background_spec.rb b/spec/migrations/migrate_stage_id_reference_in_background_spec.rb index 260378adaa7..9b92f4b70b0 100644 --- a/spec/migrations/migrate_stage_id_reference_in_background_spec.rb +++ b/spec/migrations/migrate_stage_id_reference_in_background_spec.rb @@ -2,19 +2,6 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20170628080858_migrate_stage_id_reference_in_background') describe MigrateStageIdReferenceInBackground, :migration, :sidekiq do - matcher :be_scheduled_migration do |delay, *expected| - match do |migration| - BackgroundMigrationWorker.jobs.any? do |job| - job['args'] == [migration, expected] && - job['at'].to_i == (delay.to_i + Time.now.to_i) - end - end - - failure_message do |migration| - "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!" - end - end - let(:jobs) { table(:ci_builds) } let(:stages) { table(:ci_stages) } let(:pipelines) { table(:ci_pipelines) } diff --git a/spec/migrations/migrate_stages_statuses_spec.rb b/spec/migrations/migrate_stages_statuses_spec.rb new file mode 100644 index 00000000000..4102d57e368 --- /dev/null +++ b/spec/migrations/migrate_stages_statuses_spec.rb @@ -0,0 +1,67 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170711145558_migrate_stages_statuses.rb') + +describe MigrateStagesStatuses, :migration do + let(:jobs) { table(:ci_builds) } + let(:stages) { table(:ci_stages) } + let(:pipelines) { table(:ci_pipelines) } + let(:projects) { table(:projects) } + + STATUSES = { created: 0, pending: 1, running: 2, success: 3, + failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze + + before do + stub_const("#{described_class.name}::BATCH_SIZE", 2) + stub_const("#{described_class.name}::RANGE_SIZE", 2) + + projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1') + projects.create!(id: 2, name: 'gitlab2', path: 'gitlab2') + + pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a') + pipelines.create!(id: 2, project_id: 2, ref: 'feature', sha: '21a3deb') + + create_job(project: 1, pipeline: 1, stage: 'test', status: 'success') + create_job(project: 1, pipeline: 1, stage: 'test', status: 'running') + create_job(project: 1, pipeline: 1, stage: 'build', status: 'success') + create_job(project: 1, pipeline: 1, stage: 'build', status: 'failed') + create_job(project: 2, pipeline: 2, stage: 'test', status: 'success') + create_job(project: 2, pipeline: 2, stage: 'test', status: 'success') + create_job(project: 2, pipeline: 2, stage: 'test', status: 'failed', retried: true) + + stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil) + stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'build', status: nil) + stages.create!(id: 3, pipeline_id: 2, project_id: 2, name: 'test', status: nil) + end + + it 'correctly migrates stages statuses' do + Sidekiq::Testing.inline! do + expect(stages.where(status: nil).count).to eq 3 + + migrate! + + expect(stages.where(status: nil)).to be_empty + expect(stages.all.order('id ASC').pluck(:status)) + .to eq [STATUSES[:running], STATUSES[:failed], STATUSES[:success]] + end + end + + it 'correctly schedules background migrations' do + Sidekiq::Testing.fake! do + Timecop.freeze do + migrate! + + expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes, 1, 2) + expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, 3, 3) + expect(BackgroundMigrationWorker.jobs.size).to eq 2 + end + end + end + + def create_job(project:, pipeline:, stage:, status:, **opts) + stages = { test: 1, build: 2, deploy: 3 } + + jobs.create!(project_id: project, commit_id: pipeline, + stage_idx: stages[stage.to_sym], stage: stage, + status: status, **opts) + end +end diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb new file mode 100644 index 00000000000..74c9d6145e2 --- /dev/null +++ b/spec/models/ci/stage_spec.rb @@ -0,0 +1,79 @@ +require 'spec_helper' + +describe Ci::Stage, :models do + let(:stage) { create(:ci_stage_entity) } + + describe 'associations' do + before do + create(:ci_build, stage_id: stage.id) + create(:commit_status, stage_id: stage.id) + end + + describe '#statuses' do + it 'returns all commit statuses' do + expect(stage.statuses.count).to be 2 + end + end + + describe '#builds' do + it 'returns only builds' do + expect(stage.builds).to be_one + end + end + end + + describe '#status' do + context 'when stage is pending' do + let(:stage) { create(:ci_stage_entity, status: 'pending') } + + it 'has a correct status value' do + expect(stage.status).to eq 'pending' + end + end + + context 'when stage is success' do + let(:stage) { create(:ci_stage_entity, status: 'success') } + + it 'has a correct status value' do + expect(stage.status).to eq 'success' + end + end + end + + describe 'update_status' do + context 'when stage objects needs to be updated' do + before do + create(:ci_build, :success, stage_id: stage.id) + create(:ci_build, :running, stage_id: stage.id) + end + + it 'updates stage status correctly' do + expect { stage.update_status } + .to change { stage.reload.status } + .to 'running' + end + end + + context 'when stage is skipped' do + it 'updates status to skipped' do + expect { stage.update_status } + .to change { stage.reload.status } + .to 'skipped' + end + end + + context 'when stage object is locked' do + before do + create(:ci_build, :failed, stage_id: stage.id) + end + + it 'retries a lock to update a stage status' do + stage.lock_version = 100 + + stage.update_status + + expect(stage.reload).to be_failed + end + end + end +end diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb index 8c4a366ef8f..f7583645e69 100644 --- a/spec/models/commit_status_spec.rb +++ b/spec/models/commit_status_spec.rb @@ -7,10 +7,10 @@ describe CommitStatus do create(:ci_pipeline, project: project, sha: project.commit.id) end - let(:commit_status) { create_status } + let(:commit_status) { create_status(stage: 'test') } - def create_status(args = {}) - create(:commit_status, args.merge(pipeline: pipeline)) + def create_status(**opts) + create(:commit_status, pipeline: pipeline, **opts) end it { is_expected.to belong_to(:pipeline) } diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb deleted file mode 100644 index 7ccba4ba3ec..00000000000 --- a/spec/requests/ci/api/builds_spec.rb +++ /dev/null @@ -1,912 +0,0 @@ -require 'spec_helper' - -describe Ci::API::Builds do - let(:runner) { FactoryGirl.create(:ci_runner, tag_list: %w(mysql ruby)) } - let(:project) { FactoryGirl.create(:project, shared_runners_enabled: false) } - let(:last_update) { nil } - - describe "Builds API for runners" do - let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') } - - before do - project.runners << runner - end - - describe "POST /builds/register" do - let!(:build) { create(:ci_build, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - let(:user_agent) { 'gitlab-ci-multi-runner 1.5.2 (1-5-stable; go1.6.3; linux/amd64)' } - let!(:last_update) { } - let!(:new_update) { } - - before do - stub_container_registry_config(enabled: false) - end - - shared_examples 'no builds available' do - context 'when runner sends version in User-Agent' do - context 'for stable version' do - it 'gives 204 and set X-GitLab-Last-Update' do - expect(response).to have_http_status(204) - expect(response.header).to have_key('X-GitLab-Last-Update') - end - end - - context 'when last_update is up-to-date' do - let(:last_update) { runner.ensure_runner_queue_value } - - it 'gives 204 and set the same X-GitLab-Last-Update' do - expect(response).to have_http_status(204) - expect(response.header['X-GitLab-Last-Update']) - .to eq(last_update) - end - end - - context 'when last_update is outdated' do - let(:last_update) { runner.ensure_runner_queue_value } - let(:new_update) { runner.tick_runner_queue } - - it 'gives 204 and set a new X-GitLab-Last-Update' do - expect(response).to have_http_status(204) - expect(response.header['X-GitLab-Last-Update']) - .to eq(new_update) - end - end - - context 'for beta version' do - let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (1-5-stable; go1.6.3; linux/amd64)' } - it { expect(response).to have_http_status(204) } - end - end - - context "when runner doesn't send version in User-Agent" do - let(:user_agent) { 'Go-http-client/1.1' } - it { expect(response).to have_http_status(404) } - end - - context "when runner doesn't have a User-Agent" do - let(:user_agent) { nil } - it { expect(response).to have_http_status(404) } - end - end - - context 'when an old image syntax is used' do - before do - build.update!(options: { image: 'codeclimate' }) - end - - it 'starts a build' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response["options"]).to eq({ "image" => "codeclimate" }) - end - end - - context 'when a new image syntax is used' do - before do - build.update!(options: { image: { name: 'codeclimate' } }) - end - - it 'starts a build' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response["options"]).to eq({ "image" => "codeclimate" }) - end - end - - context 'when an old service syntax is used' do - before do - build.update!(options: { services: ['mysql'] }) - end - - it 'starts a build' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response["options"]).to eq({ "services" => ["mysql"] }) - end - end - - context 'when a new service syntax is used' do - before do - build.update!(options: { services: [name: 'mysql'] }) - end - - it 'starts a build' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response["options"]).to eq({ "services" => ["mysql"] }) - end - end - - context 'when no image or service is defined' do - before do - build.update!(options: {}) - end - - it 'starts a build' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - - expect(json_response["options"]).to be_empty - end - end - - context 'when there is a pending build' do - it 'starts a build' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(response.headers).not_to have_key('X-GitLab-Last-Update') - expect(json_response['sha']).to eq(build.sha) - expect(runner.reload.platform).to eq("darwin") - expect(json_response["options"]).to eq({ "image" => "ruby:2.1", "services" => ["postgres"] }) - expect(json_response["variables"]).to include( - { "key" => "CI_JOB_NAME", "value" => "spinach", "public" => true }, - { "key" => "CI_JOB_STAGE", "value" => "test", "public" => true }, - { "key" => "DB_NAME", "value" => "postgres", "public" => true } - ) - end - - it 'updates runner info' do - expect { register_builds }.to change { runner.reload.contacted_at } - end - - context 'when concurrently updating build' do - before do - expect_any_instance_of(Ci::Build).to receive(:run!) - .and_raise(ActiveRecord::StaleObjectError.new(nil, nil)) - end - - it 'returns a conflict' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(409) - expect(response.headers).not_to have_key('X-GitLab-Last-Update') - end - end - - context 'registry credentials' do - let(:registry_credentials) do - { 'type' => 'registry', - 'url' => 'registry.example.com:5005', - 'username' => 'gitlab-ci-token', - 'password' => build.token } - end - - context 'when registry is enabled' do - before do - stub_container_registry_config(enabled: true, host_port: 'registry.example.com:5005') - end - - it 'sends registry credentials key' do - register_builds info: { platform: :darwin } - - expect(json_response).to have_key('credentials') - expect(json_response['credentials']).to include(registry_credentials) - end - end - - context 'when registry is disabled' do - before do - stub_container_registry_config(enabled: false, host_port: 'registry.example.com:5005') - end - - it 'does not send registry credentials' do - register_builds info: { platform: :darwin } - - expect(json_response).to have_key('credentials') - expect(json_response['credentials']).not_to include(registry_credentials) - end - end - end - - context 'when docker configuration options are used' do - let!(:build) { create(:ci_build, :extended_options, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - - it 'starts a build' do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response['options']['image']).to eq('ruby:2.1') - expect(json_response['options']['services']).to eq(['postgres', 'docker:dind']) - end - end - end - - context 'when builds are finished' do - before do - build.success - register_builds - end - - it_behaves_like 'no builds available' - end - - context 'for other project with builds' do - before do - build.success - create(:ci_build, :pending) - register_builds - end - - it_behaves_like 'no builds available' - end - - context 'for shared runner' do - let!(:runner) { create(:ci_runner, :shared, token: "SharedRunner") } - - before do - register_builds(runner.token) - end - - it_behaves_like 'no builds available' - end - - context 'for triggered build' do - before do - trigger = create(:ci_trigger, project: project) - create(:ci_trigger_request_with_variables, pipeline: pipeline, builds: [build], trigger: trigger) - project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value") - end - - it "returns variables for triggers" do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response["variables"]).to include( - { "key" => "CI_JOB_NAME", "value" => "spinach", "public" => true }, - { "key" => "CI_JOB_STAGE", "value" => "test", "public" => true }, - { "key" => "CI_PIPELINE_TRIGGERED", "value" => "true", "public" => true }, - { "key" => "DB_NAME", "value" => "postgres", "public" => true }, - { "key" => "SECRET_KEY", "value" => "secret_value", "public" => false }, - { "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false } - ) - end - end - - context 'with multiple builds' do - before do - build.success - end - - let!(:test_build) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) } - - it "returns dependent builds" do - register_builds info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response["id"]).to eq(test_build.id) - expect(json_response["depends_on_builds"].count).to eq(1) - expect(json_response["depends_on_builds"][0]).to include('id' => build.id, 'name' => 'spinach') - end - end - - %w(name version revision platform architecture).each do |param| - context "updates runner #{param}" do - let(:value) { "#{param}_value" } - - subject { runner.read_attribute(param.to_sym) } - - it do - register_builds info: { param => value } - - expect(response).to have_http_status(201) - runner.reload - is_expected.to eq(value) - end - end - end - - context 'when build has no tags' do - before do - build.update(tags: []) - end - - context 'when runner is allowed to pick untagged builds' do - before do - runner.update_column(:run_untagged, true) - end - - it 'picks build' do - register_builds - - expect(response).to have_http_status 201 - end - end - - context 'when runner is not allowed to pick untagged builds' do - before do - runner.update_column(:run_untagged, false) - register_builds - end - - it_behaves_like 'no builds available' - end - end - - context 'when runner is paused' do - let(:runner) { create(:ci_runner, :inactive, token: 'InactiveRunner') } - - it 'responds with 404' do - register_builds - - expect(response).to have_http_status 404 - end - - it 'does not update runner info' do - expect { register_builds } - .not_to change { runner.reload.contacted_at } - end - end - - def register_builds(token = runner.token, **params) - new_params = params.merge(token: token, last_update: last_update) - - post ci_api("/builds/register"), new_params, { 'User-Agent' => user_agent } - end - end - - describe "PUT /builds/:id" do - let(:build) { create(:ci_build, :pending, :trace, pipeline: pipeline, runner_id: runner.id) } - - before do - build.run! - put ci_api("/builds/#{build.id}"), token: runner.token - end - - it "updates a running build" do - expect(response).to have_http_status(200) - end - - it 'does not override trace information when no trace is given' do - expect(build.reload.trace.raw).to eq 'BUILD TRACE' - end - - context 'job has been erased' do - let(:build) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) } - - it 'responds with forbidden' do - expect(response.status).to eq 403 - end - end - end - - describe 'PATCH /builds/:id/trace.txt' do - let(:build) do - attributes = { runner_id: runner.id, pipeline: pipeline } - create(:ci_build, :running, :trace, attributes) - end - - let(:headers) { { Ci::API::Helpers::BUILD_TOKEN_HEADER => build.token, 'Content-Type' => 'text/plain' } } - let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) } - let(:update_interval) { 10.seconds.to_i } - - def patch_the_trace(content = ' appended', request_headers = nil) - unless request_headers - build.trace.read do |stream| - offset = stream.size - limit = offset + content.length - 1 - request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" }) - end - end - - Timecop.travel(build.updated_at + update_interval) do - patch ci_api("/builds/#{build.id}/trace.txt"), content, request_headers - build.reload - end - end - - def initial_patch_the_trace - patch_the_trace(' appended', headers_with_range) - end - - def force_patch_the_trace - 2.times { patch_the_trace('') } - end - - before do - initial_patch_the_trace - end - - context 'when request is valid' do - it 'gets correct response' do - expect(response.status).to eq 202 - expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' - expect(response.header).to have_key 'Range' - expect(response.header).to have_key 'Build-Status' - end - - context 'when build has been updated recently' do - it { expect { patch_the_trace }.not_to change { build.updated_at }} - - it 'changes the build trace' do - patch_the_trace - - expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended' - end - - context 'when Runner makes a force-patch' do - it { expect { force_patch_the_trace }.not_to change { build.updated_at }} - - it "doesn't change the build.trace" do - force_patch_the_trace - - expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' - end - end - end - - context 'when build was not updated recently' do - let(:update_interval) { 15.minutes.to_i } - - it { expect { patch_the_trace }.to change { build.updated_at } } - - it 'changes the build.trace' do - patch_the_trace - - expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended' - end - - context 'when Runner makes a force-patch' do - it { expect { force_patch_the_trace }.to change { build.updated_at } } - - it "doesn't change the build.trace" do - force_patch_the_trace - - expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' - end - end - end - - context 'when project for the build has been deleted' do - let(:build) do - attributes = { runner_id: runner.id, pipeline: pipeline } - create(:ci_build, :running, :trace, attributes) do |build| - build.project.update(pending_delete: true) - end - end - - it 'responds with forbidden' do - expect(response.status).to eq(403) - end - end - end - - context 'when Runner makes a force-patch' do - before do - force_patch_the_trace - end - - it 'gets correct response' do - expect(response.status).to eq 202 - expect(build.reload.trace.raw).to eq 'BUILD TRACE appended' - expect(response.header).to have_key 'Range' - expect(response.header).to have_key 'Build-Status' - end - end - - context 'when content-range start is too big' do - let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20' }) } - - it 'gets 416 error response with range headers' do - expect(response.status).to eq 416 - expect(response.header).to have_key 'Range' - expect(response.header['Range']).to eq '0-11' - end - end - - context 'when content-range start is too small' do - let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20' }) } - - it 'gets 416 error response with range headers' do - expect(response.status).to eq 416 - expect(response.header).to have_key 'Range' - expect(response.header['Range']).to eq '0-11' - end - end - - context 'when Content-Range header is missing' do - let(:headers_with_range) { headers } - - it { expect(response.status).to eq 400 } - end - - context 'when build has been errased' do - let(:build) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) } - - it { expect(response.status).to eq 403 } - end - end - - context "Artifacts" do - let(:file_upload) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') } - let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') } - let(:build) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) } - let(:authorize_url) { ci_api("/builds/#{build.id}/artifacts/authorize") } - let(:post_url) { ci_api("/builds/#{build.id}/artifacts") } - let(:delete_url) { ci_api("/builds/#{build.id}/artifacts") } - let(:get_url) { ci_api("/builds/#{build.id}/artifacts") } - let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') } - let(:headers) { { "GitLab-Workhorse" => "1.0", Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } } - let(:token) { build.token } - let(:headers_with_token) { headers.merge(Ci::API::Helpers::BUILD_TOKEN_HEADER => token) } - - before do - build.run! - end - - describe "POST /builds/:id/artifacts/authorize" do - context "authorizes posting artifact to running build" do - it "using token as parameter" do - post authorize_url, { token: build.token }, headers - - expect(response).to have_http_status(200) - expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response["TempPath"]).not_to be_nil - end - - it "using token as header" do - post authorize_url, {}, headers_with_token - - expect(response).to have_http_status(200) - expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response["TempPath"]).not_to be_nil - end - - it "using runners token" do - post authorize_url, { token: build.project.runners_token }, headers - - expect(response).to have_http_status(200) - expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response["TempPath"]).not_to be_nil - end - - it "reject requests that did not go through gitlab-workhorse" do - headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER) - - post authorize_url, { token: build.token }, headers - - expect(response).to have_http_status(500) - end - end - - context "fails to post too large artifact" do - it "using token as parameter" do - stub_application_setting(max_artifacts_size: 0) - - post authorize_url, { token: build.token, filesize: 100 }, headers - - expect(response).to have_http_status(413) - end - - it "using token as header" do - stub_application_setting(max_artifacts_size: 0) - - post authorize_url, { filesize: 100 }, headers_with_token - - expect(response).to have_http_status(413) - end - end - - context 'authorization token is invalid' do - before do - post authorize_url, { token: 'invalid', filesize: 100 } - end - - it 'responds with forbidden' do - expect(response).to have_http_status(403) - end - end - end - - describe "POST /builds/:id/artifacts" do - context "disable sanitizer" do - before do - # by configuring this path we allow to pass temp file from any path - allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/') - end - - describe 'build has been erased' do - let(:build) { create(:ci_build, erased_at: Time.now) } - - before do - upload_artifacts(file_upload, headers_with_token) - end - - it 'responds with forbidden' do - expect(response.status).to eq 403 - end - end - - describe 'uploading artifacts for a running build' do - shared_examples 'successful artifacts upload' do - it 'updates successfully' do - response_filename = - json_response['artifacts_file']['filename'] - - expect(response).to have_http_status(201) - expect(response_filename).to eq(file_upload.original_filename) - end - end - - context 'uses regular file post' do - before do - upload_artifacts(file_upload, headers_with_token, false) - end - - it_behaves_like 'successful artifacts upload' - end - - context 'uses accelerated file post' do - before do - upload_artifacts(file_upload, headers_with_token, true) - end - - it_behaves_like 'successful artifacts upload' - end - - context 'updates artifact' do - before do - upload_artifacts(file_upload2, headers_with_token) - upload_artifacts(file_upload, headers_with_token) - end - - it_behaves_like 'successful artifacts upload' - end - - context 'when using runners token' do - let(:token) { build.project.runners_token } - - before do - upload_artifacts(file_upload, headers_with_token) - end - - it_behaves_like 'successful artifacts upload' - end - end - - context 'posts artifacts file and metadata file' do - let!(:artifacts) { file_upload } - let!(:metadata) { file_upload2 } - - let(:stored_artifacts_file) { build.reload.artifacts_file.file } - let(:stored_metadata_file) { build.reload.artifacts_metadata.file } - let(:stored_artifacts_size) { build.reload.artifacts_size } - - before do - post(post_url, post_data, headers_with_token) - end - - context 'posts data accelerated by workhorse is correct' do - let(:post_data) do - { 'file.path' => artifacts.path, - 'file.name' => artifacts.original_filename, - 'metadata.path' => metadata.path, - 'metadata.name' => metadata.original_filename } - end - - it 'stores artifacts and artifacts metadata' do - expect(response).to have_http_status(201) - expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename) - expect(stored_metadata_file.original_filename).to eq(metadata.original_filename) - expect(stored_artifacts_size).to eq(71759) - end - end - - context 'no artifacts file in post data' do - let(:post_data) do - { 'metadata' => metadata } - end - - it 'is expected to respond with bad request' do - expect(response).to have_http_status(400) - end - - it 'does not store metadata' do - expect(stored_metadata_file).to be_nil - end - end - end - - context 'with an expire date' do - let!(:artifacts) { file_upload } - let(:default_artifacts_expire_in) {} - - let(:post_data) do - { 'file.path' => artifacts.path, - 'file.name' => artifacts.original_filename, - 'expire_in' => expire_in } - end - - before do - stub_application_setting( - default_artifacts_expire_in: default_artifacts_expire_in) - - post(post_url, post_data, headers_with_token) - end - - context 'with an expire_in given' do - let(:expire_in) { '7 days' } - - it 'updates when specified' do - build.reload - expect(response).to have_http_status(201) - expect(json_response['artifacts_expire_at']).not_to be_empty - expect(build.artifacts_expire_at) - .to be_within(5.minutes).of(7.days.from_now) - end - end - - context 'with no expire_in given' do - let(:expire_in) { nil } - - it 'ignores if not specified' do - build.reload - expect(response).to have_http_status(201) - expect(json_response['artifacts_expire_at']).to be_nil - expect(build.artifacts_expire_at).to be_nil - end - - context 'with application default' do - context 'default to 5 days' do - let(:default_artifacts_expire_in) { '5 days' } - - it 'sets to application default' do - build.reload - expect(response).to have_http_status(201) - expect(json_response['artifacts_expire_at']) - .not_to be_empty - expect(build.artifacts_expire_at) - .to be_within(5.minutes).of(5.days.from_now) - end - end - - context 'default to 0' do - let(:default_artifacts_expire_in) { '0' } - - it 'does not set expire_in' do - build.reload - expect(response).to have_http_status(201) - expect(json_response['artifacts_expire_at']).to be_nil - expect(build.artifacts_expire_at).to be_nil - end - end - end - end - end - - context "artifacts file is too large" do - it "fails to post too large artifact" do - stub_application_setting(max_artifacts_size: 0) - upload_artifacts(file_upload, headers_with_token) - expect(response).to have_http_status(413) - end - end - - context "artifacts post request does not contain file" do - it "fails to post artifacts without file" do - post post_url, {}, headers_with_token - expect(response).to have_http_status(400) - end - end - - context 'GitLab Workhorse is not configured' do - it "fails to post artifacts without GitLab-Workhorse" do - post post_url, { token: build.token }, {} - expect(response).to have_http_status(403) - end - end - end - - context "artifacts are being stored outside of tmp path" do - before do - # by configuring this path we allow to pass file from @tmpdir only - # but all temporary files are stored in system tmp directory - @tmpdir = Dir.mktmpdir - allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir) - end - - after do - FileUtils.remove_entry @tmpdir - end - - it "fails to post artifacts for outside of tmp path" do - upload_artifacts(file_upload, headers_with_token) - expect(response).to have_http_status(400) - end - end - - def upload_artifacts(file, headers = {}, accelerated = true) - if accelerated - post post_url, { - 'file.path' => file.path, - 'file.name' => file.original_filename - }, headers - else - post post_url, { file: file }, headers - end - end - end - - describe 'DELETE /builds/:id/artifacts' do - let(:build) { create(:ci_build, :artifacts) } - - before do - delete delete_url, token: build.token - end - - shared_examples 'having removable artifacts' do - it 'removes build artifacts' do - build.reload - - expect(response).to have_http_status(200) - expect(build.artifacts_file.exists?).to be_falsy - expect(build.artifacts_metadata.exists?).to be_falsy - expect(build.artifacts_size).to be_nil - end - end - - context 'when using build token' do - before do - delete delete_url, token: build.token - end - - it_behaves_like 'having removable artifacts' - end - - context 'when using runnners token' do - before do - delete delete_url, token: build.project.runners_token - end - - it_behaves_like 'having removable artifacts' - end - end - - describe 'GET /builds/:id/artifacts' do - before do - get get_url, token: token - end - - context 'build has artifacts' do - let(:build) { create(:ci_build, :artifacts) } - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } - end - - shared_examples 'having downloadable artifacts' do - it 'download artifacts' do - expect(response).to have_http_status(200) - expect(response.headers).to include download_headers - end - end - - context 'when using build token' do - let(:token) { build.token } - - it_behaves_like 'having downloadable artifacts' - end - - context 'when using runnners token' do - let(:token) { build.project.runners_token } - - it_behaves_like 'having downloadable artifacts' - end - end - - context 'build does not has artifacts' do - let(:token) { build.token } - - it 'responds with not found' do - expect(response).to have_http_status(404) - end - end - end - end - end -end diff --git a/spec/requests/ci/api/runners_spec.rb b/spec/requests/ci/api/runners_spec.rb deleted file mode 100644 index 75059dd20a0..00000000000 --- a/spec/requests/ci/api/runners_spec.rb +++ /dev/null @@ -1,127 +0,0 @@ -require 'spec_helper' - -describe Ci::API::Runners do - include StubGitlabCalls - - let(:registration_token) { 'abcdefg123456' } - - before do - stub_gitlab_calls - stub_application_setting(runners_registration_token: registration_token) - end - - describe "POST /runners/register" do - context 'when runner token is provided' do - before do - post ci_api("/runners/register"), token: registration_token - end - - it 'creates runner with default values' do - expect(response).to have_http_status 201 - expect(Ci::Runner.first.run_untagged).to be true - expect(Ci::Runner.first.token).not_to eq(registration_token) - end - end - - context 'when runner description is provided' do - before do - post ci_api("/runners/register"), token: registration_token, - description: "server.hostname" - end - - it 'creates runner' do - expect(response).to have_http_status 201 - expect(Ci::Runner.first.description).to eq("server.hostname") - end - end - - context 'when runner tags are provided' do - before do - post ci_api("/runners/register"), token: registration_token, - tag_list: "tag1, tag2" - end - - it 'creates runner' do - expect(response).to have_http_status 201 - expect(Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2)) - end - end - - context 'when option for running untagged jobs is provided' do - context 'when tags are provided' do - it 'creates runner' do - post ci_api("/runners/register"), token: registration_token, - run_untagged: false, - tag_list: ['tag'] - - expect(response).to have_http_status 201 - expect(Ci::Runner.first.run_untagged).to be false - end - end - - context 'when tags are not provided' do - it 'does not create runner' do - post ci_api("/runners/register"), token: registration_token, - run_untagged: false - - expect(response).to have_http_status 404 - end - end - end - - context 'when project token is provided' do - let(:project) { FactoryGirl.create(:project) } - - before do - post ci_api("/runners/register"), token: project.runners_token - end - - it 'creates runner' do - expect(response).to have_http_status 201 - expect(project.runners.size).to eq(1) - expect(Ci::Runner.first.token).not_to eq(registration_token) - expect(Ci::Runner.first.token).not_to eq(project.runners_token) - end - end - - context 'when token is invalid' do - it 'returns 403 error' do - post ci_api("/runners/register"), token: 'invalid' - - expect(response).to have_http_status 403 - end - end - - context 'when no token provided' do - it 'returns 400 error' do - post ci_api("/runners/register") - - expect(response).to have_http_status 400 - end - end - - %w(name version revision platform architecture).each do |param| - context "creates runner with #{param} saved" do - let(:value) { "#{param}_value" } - - subject { Ci::Runner.first.read_attribute(param.to_sym) } - - it do - post ci_api("/runners/register"), token: registration_token, info: { param => value } - expect(response).to have_http_status 201 - is_expected.to eq(value) - end - end - end - end - - describe "DELETE /runners/delete" do - it 'returns 200' do - runner = FactoryGirl.create(:ci_runner) - delete ci_api("/runners/delete"), token: runner.token - - expect(response).to have_http_status 200 - expect(Ci::Runner.count).to eq(0) - end - end -end diff --git a/spec/requests/ci/api/triggers_spec.rb b/spec/requests/ci/api/triggers_spec.rb deleted file mode 100644 index 7c77ebb69a2..00000000000 --- a/spec/requests/ci/api/triggers_spec.rb +++ /dev/null @@ -1,90 +0,0 @@ -require 'spec_helper' - -describe Ci::API::Triggers do - describe 'POST /projects/:project_id/refs/:ref/trigger' do - let!(:trigger_token) { 'secure token' } - let!(:project) { create(:project, :repository, ci_id: 10) } - let!(:project2) { create(:project, ci_id: 11) } - - let!(:trigger) do - create(:ci_trigger, - project: project, - token: trigger_token, - owner: create(:user)) - end - - let(:options) do - { - token: trigger_token - } - end - - before do - stub_ci_pipeline_to_return_yaml_file - - project.add_developer(trigger.owner) - end - - context 'Handles errors' do - it 'returns bad request if token is missing' do - post ci_api("/projects/#{project.ci_id}/refs/master/trigger") - expect(response).to have_http_status(400) - end - - it 'returns not found if project is not found' do - post ci_api('/projects/0/refs/master/trigger'), options - expect(response).to have_http_status(404) - end - - it 'returns unauthorized if token is for different project' do - post ci_api("/projects/#{project2.ci_id}/refs/master/trigger"), options - expect(response).to have_http_status(401) - end - end - - context 'Have a commit' do - let(:pipeline) { project.pipelines.last } - - it 'creates builds' do - post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options - expect(response).to have_http_status(201) - pipeline.builds.reload - expect(pipeline.builds.pending.size).to eq(2) - expect(pipeline.builds.size).to eq(5) - end - - it 'returns bad request with no builds created if there\'s no commit for that ref' do - post ci_api("/projects/#{project.ci_id}/refs/other-branch/trigger"), options - expect(response).to have_http_status(400) - expect(json_response['message']['base']) - .to contain_exactly('Reference not found') - end - - context 'Validates variables' do - let(:variables) do - { 'TRIGGER_KEY' => 'TRIGGER_VALUE' } - end - - it 'validates variables to be a hash' do - post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options.merge(variables: 'value') - expect(response).to have_http_status(400) - - expect(json_response['error']).to eq('variables is invalid') - end - - it 'validates variables needs to be a map of key-valued strings' do - post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options.merge(variables: { key: %w(1 2) }) - expect(response).to have_http_status(400) - expect(json_response['message']).to eq('variables needs to be a map of key-valued strings') - end - - it 'creates trigger request with variables' do - post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options.merge(variables: variables) - expect(response).to have_http_status(201) - pipeline.builds.reload - expect(pipeline.builds.first.trigger_request.variables).to eq(variables) - end - end - end - end -end diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb index 53d4fcfed18..8465a6f99bd 100644 --- a/spec/services/ci/create_pipeline_service_spec.rb +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -55,10 +55,15 @@ describe Ci::CreatePipelineService do context 'when merge requests already exist for this source branch' do it 'updates head pipeline of each merge request' do - merge_request_1 = create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project) - merge_request_2 = create(:merge_request, source_branch: 'master', target_branch: "branch_2", source_project: project) + merge_request_1 = create(:merge_request, source_branch: 'master', + target_branch: "branch_1", + source_project: project) - head_pipeline = pipeline + merge_request_2 = create(:merge_request, source_branch: 'master', + target_branch: "branch_2", + source_project: project) + + head_pipeline = execute_service expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline) expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline) @@ -66,9 +71,11 @@ describe Ci::CreatePipelineService do context 'when there is no pipeline for source branch' do it "does not update merge request head pipeline" do - merge_request = create(:merge_request, source_branch: 'feature', target_branch: "branch_1", source_project: project) + merge_request = create(:merge_request, source_branch: 'feature', + target_branch: "branch_1", + source_project: project) - head_pipeline = pipeline + head_pipeline = execute_service expect(merge_request.reload.head_pipeline).not_to eq(head_pipeline) end @@ -76,13 +83,19 @@ describe Ci::CreatePipelineService do context 'when merge request target project is different from source project' do let!(:target_project) { create(:project, :repository) } - let!(:forked_project_link) { create(:forked_project_link, forked_to_project: project, forked_from_project: target_project) } + + let!(:forked_project_link) do + create(:forked_project_link, forked_to_project: project, + forked_from_project: target_project) + end it 'updates head pipeline for merge request' do - merge_request = - create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project, target_project: target_project) + merge_request = create(:merge_request, source_branch: 'master', + target_branch: "branch_1", + source_project: project, + target_project: target_project) - head_pipeline = pipeline + head_pipeline = execute_service expect(merge_request.reload.head_pipeline).to eq(head_pipeline) end @@ -90,15 +103,36 @@ describe Ci::CreatePipelineService do context 'when the pipeline is not the latest for the branch' do it 'does not update merge request head pipeline' do - merge_request = create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project) + merge_request = create(:merge_request, source_branch: 'master', + target_branch: "branch_1", + source_project: project) - allow_any_instance_of(Ci::Pipeline).to receive(:latest?).and_return(false) + allow_any_instance_of(Ci::Pipeline) + .to receive(:latest?).and_return(false) - pipeline + execute_service expect(merge_request.reload.head_pipeline).to be_nil end end + + context 'when pipeline has errors' do + before do + stub_ci_pipeline_yaml_file('some invalid syntax') + end + + it 'updates merge request head pipeline reference' do + merge_request = create(:merge_request, source_branch: 'master', + target_branch: 'feature', + source_project: project) + + head_pipeline = execute_service + + expect(head_pipeline).to be_persisted + expect(head_pipeline.yaml_errors).to be_present + expect(merge_request.reload.head_pipeline).to eq head_pipeline + end + end end context 'auto-cancel enabled' do diff --git a/spec/support/background_migrations_matchers.rb b/spec/support/background_migrations_matchers.rb new file mode 100644 index 00000000000..423c0e4cefc --- /dev/null +++ b/spec/support/background_migrations_matchers.rb @@ -0,0 +1,13 @@ +RSpec::Matchers.define :be_scheduled_migration do |delay, *expected| + match do |migration| + BackgroundMigrationWorker.jobs.any? do |job| + job['args'] == [migration, expected] && + job['at'].to_i == (delay.to_i + Time.now.to_i) + end + end + + failure_message do |migration| + "Migration `#{migration}` with args `#{expected.inspect}` " \ + 'not scheduled in expected time!' + end +end diff --git a/spec/workers/stage_update_worker_spec.rb b/spec/workers/stage_update_worker_spec.rb new file mode 100644 index 00000000000..7bc76c79464 --- /dev/null +++ b/spec/workers/stage_update_worker_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' + +describe StageUpdateWorker do + describe '#perform' do + context 'when stage exists' do + let(:stage) { create(:ci_stage_entity) } + + it 'updates stage status' do + expect_any_instance_of(Ci::Stage).to receive(:update_status) + + described_class.new.perform(stage.id) + end + end + + context 'when stage does not exist' do + it 'does not raise exception' do + expect { described_class.new.perform(123) } + .not_to raise_error + end + end + end +end |