summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--Gemfile.rails5.lock71
-rw-r--r--app/assets/javascripts/ide/components/activity_bar.vue14
-rw-r--r--app/assets/javascripts/notes/constants.js1
-rw-r--r--app/assets/javascripts/notes/stores/collapse_utils.js108
-rw-r--r--app/assets/javascripts/notes/stores/getters.js4
-rw-r--r--app/assets/stylesheets/bootstrap_migration.scss5
-rw-r--r--app/assets/stylesheets/pages/repo.scss19
-rw-r--r--app/assets/stylesheets/print.scss5
-rw-r--r--app/controllers/projects/lfs_storage_controller.rb2
-rw-r--r--app/helpers/projects_helper.rb8
-rw-r--r--app/models/ci/build.rb13
-rw-r--r--app/models/project.rb6
-rw-r--r--app/services/pages_service.rb15
-rw-r--r--app/services/projects/update_service.rb8
-rw-r--r--app/uploaders/object_storage.rb21
-rw-r--r--app/views/projects/_home_panel.html.haml4
-rw-r--r--app/views/projects/buttons/_xcode_link.html.haml2
-rw-r--r--changelogs/unreleased/45820-add-xcode-link.yml5
-rw-r--r--changelogs/unreleased/46452-nomethoderror-undefined-method-previous_changes-for-nil-nilclass.yml5
-rw-r--r--changelogs/unreleased/add-background-migrations-for-not-archived-traces.yml5
-rw-r--r--changelogs/unreleased/gh-importer-transactions.yml5
-rw-r--r--changelogs/unreleased/jivl-smarter-system-notes.yml5
-rw-r--r--changelogs/unreleased/optimise-pages-service-calling.yml5
-rw-r--r--changelogs/unreleased/presigned-multipart-uploads.yml5
-rw-r--r--changelogs/unreleased/rails5-fix-46236.yml5
-rw-r--r--changelogs/unreleased/remove-unused-query-in-hooks.yml5
-rw-r--r--changelogs/unreleased/sh-add-uncached-query-limiter.yml5
-rw-r--r--changelogs/unreleased/sh-fix-pipeline-jobs-nplus-one.yml5
-rw-r--r--config/initializers/artifacts_direct_upload_support.rb7
-rw-r--r--config/initializers/direct_upload_support.rb19
-rw-r--r--config/initializers/postgresql_opclasses_support.rb9
-rw-r--r--db/migrate/20160226114608_add_trigram_indexes_for_searching.rb7
-rw-r--r--db/migrate/20170622135728_add_unique_constraint_to_ci_variables.rb11
-rw-r--r--db/migrate/20171106155656_turn_issues_due_date_index_to_partial_index.rb6
-rw-r--r--db/migrate/20180201110056_add_foreign_keys_to_todos.rb2
-rw-r--r--db/post_migrate/20180529152628_schedule_to_archive_legacy_traces.rb35
-rw-r--r--doc/administration/job_artifacts.md3
-rw-r--r--doc/development/query_recorder.md13
-rw-r--r--doc/topics/autodevops/index.md4
-rw-r--r--lib/api/issues.rb2
-rw-r--r--lib/api/jobs.rb1
-rw-r--r--lib/api/runner.rb2
-rw-r--r--lib/gitlab/auth.rb8
-rw-r--r--lib/gitlab/background_migration/archive_legacy_traces.rb24
-rw-r--r--lib/gitlab/git/repository.rb5
-rw-r--r--lib/gitlab/github_import/importer/pull_request_importer.rb56
-rw-r--r--lib/gitlab/utils/override.rb16
-rw-r--r--lib/object_storage/direct_upload.rb166
-rw-r--r--lib/tasks/gitlab/traces.rake4
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb2
-rw-r--r--spec/helpers/projects_helper_spec.rb42
-rw-r--r--spec/initializers/artifacts_direct_upload_support_spec.rb71
-rw-r--r--spec/initializers/direct_upload_support_spec.rb90
-rw-r--r--spec/javascripts/notes/mock_data.js578
-rw-r--r--spec/javascripts/notes/stores/collapse_utils_spec.js46
-rw-r--r--spec/javascripts/notes/stores/getters_spec.js19
-rw-r--r--spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb59
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb85
-rw-r--r--spec/lib/gitlab/utils/override_spec.rb170
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb164
-rw-r--r--spec/migrations/schedule_to_archive_legacy_traces_spec.rb45
-rw-r--r--spec/models/ci/build_spec.rb72
-rw-r--r--spec/requests/api/issues_spec.rb8
-rw-r--r--spec/requests/api/jobs_spec.rb12
-rw-r--r--spec/requests/api/runner_spec.rb1
-rw-r--r--spec/requests/lfs_http_spec.rb1
-rw-r--r--spec/services/pages_service_spec.rb53
-rw-r--r--spec/services/projects/update_service_spec.rb4
-rw-r--r--spec/support/helpers/query_recorder.rb7
-rw-r--r--spec/support/helpers/stub_object_storage.rb12
-rw-r--r--spec/support/matchers/exceed_query_limit.rb65
-rw-r--r--spec/support/trace/trace_helpers.rb27
-rw-r--r--spec/uploaders/object_storage_spec.rb134
76 files changed, 2159 insertions, 387 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 64470a1f087..e78615e3c29 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -182,7 +182,7 @@ Assigning a team label makes sure issues get the attention of the appropriate
people.
The current team labels are ~Distribution, ~"CI/CD", ~Discussion, ~Documentation, ~Quality,
-~Geo, ~Gitaly, ~Monitoring, ~Platform, ~Release, ~"Security Products" and ~"UX".
+~Geo, ~Gitaly, ~Monitoring, ~Platform, ~Release, ~"Security Products", ~"Configuration", and ~"UX".
The descriptions on the [labels page][labels-page] explain what falls under the
responsibility of each team.
diff --git a/Gemfile.rails5.lock b/Gemfile.rails5.lock
index af7305619eb..14ea3e4519c 100644
--- a/Gemfile.rails5.lock
+++ b/Gemfile.rails5.lock
@@ -72,8 +72,6 @@ GEM
attr_encrypted (3.1.0)
encryptor (~> 3.0.0)
attr_required (1.0.1)
- autoprefixer-rails (8.1.0.1)
- execjs
awesome_print (1.2.0)
axiom-types (0.1.1)
descendants_tracker (~> 0.0.4)
@@ -93,9 +91,6 @@ GEM
binding_of_caller (0.7.3)
debug_inspector (>= 0.0.1)
blankslate (2.1.2.4)
- bootstrap-sass (3.3.7)
- autoprefixer-rails (>= 5.2.1)
- sass (>= 3.3.4)
bootstrap_form (2.7.0)
brakeman (4.2.1)
browser (2.5.3)
@@ -175,7 +170,7 @@ GEM
diff-lcs (1.3)
diffy (3.1.0)
docile (1.1.5)
- domain_name (0.5.20170404)
+ domain_name (0.5.20180417)
unf (>= 0.0.5, < 1.0.0)
doorkeeper (4.3.1)
railties (>= 4.2)
@@ -185,9 +180,10 @@ GEM
dropzonejs-rails (0.7.4)
rails (> 3.1)
email_reply_trimmer (0.1.10)
- email_spec (1.6.0)
+ email_spec (2.2.0)
+ htmlentities (~> 4.3.3)
launchy (~> 2.1)
- mail (~> 2.2)
+ mail (~> 2.7)
encryptor (3.0.0)
equalizer (0.0.11)
erubis (2.7.0)
@@ -288,7 +284,7 @@ GEM
gettext_i18n_rails (>= 0.7.1)
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
- gitaly-proto (0.99.0)
+ gitaly-proto (0.100.0)
google-protobuf (~> 3.1)
grpc (~> 1.10)
github-linguist (5.3.3)
@@ -365,9 +361,9 @@ GEM
grape-entity (0.7.1)
activesupport (>= 4.0)
multi_json (>= 1.3.2)
- grape-route-helpers (2.1.0)
+ grape-path-helpers (1.0.0)
activesupport
- grape (>= 0.16.0)
+ grape (~> 1.0)
rake
grape_logging (1.7.0)
grape
@@ -417,6 +413,7 @@ GEM
httpclient (2.8.3)
i18n (1.0.1)
concurrent-ruby (~> 1.0)
+ icalendar (2.4.1)
ice_nine (0.11.2)
influxdb (0.5.3)
ipaddress (0.8.3)
@@ -450,9 +447,9 @@ GEM
kgio (2.11.2)
knapsack (1.16.0)
rake
- kubeclient (3.0.0)
+ kubeclient (3.1.1)
http (~> 2.2.2)
- recursive-open-struct (~> 1.0.4)
+ recursive-open-struct (~> 1.0, >= 1.0.4)
rest-client (~> 2.0)
launchy (2.4.3)
addressable (~> 2.3)
@@ -521,15 +518,16 @@ GEM
multi_json (~> 1.3)
multi_xml (~> 0.5)
rack (>= 1.2, < 3)
- octokit (4.8.0)
+ octokit (4.9.0)
sawyer (~> 0.8.0, >= 0.5.3)
omniauth (1.8.1)
hashie (>= 3.4.6, < 3.6.0)
rack (>= 1.6.2, < 3)
omniauth-auth0 (2.0.0)
omniauth-oauth2 (~> 1.4)
- omniauth-authentiq (0.3.1)
- omniauth-oauth2 (~> 1.3, >= 1.3.1)
+ omniauth-authentiq (0.3.3)
+ jwt (>= 1.5)
+ omniauth-oauth2 (>= 1.5)
omniauth-azure-oauth2 (0.0.9)
jwt (~> 1.0)
omniauth (~> 1.0)
@@ -628,7 +626,7 @@ GEM
parser
unparser
procto (0.0.3)
- prometheus-client-mmap (0.9.2)
+ prometheus-client-mmap (0.9.3)
pry (0.11.3)
coderay (~> 1.1.0)
method_source (~> 0.9.0)
@@ -702,11 +700,11 @@ GEM
ffi
rbnacl-libsodium (1.0.16)
rbnacl (>= 3.0.1)
- rdoc (4.3.0)
+ rdoc (6.0.4)
re2 (1.1.1)
recaptcha (3.4.0)
json
- recursive-open-struct (1.0.5)
+ recursive-open-struct (1.1.0)
redcarpet (3.4.0)
redis (3.3.5)
redis-actionpack (5.0.2)
@@ -716,8 +714,8 @@ GEM
redis-activesupport (5.0.4)
activesupport (>= 3, < 6)
redis-store (>= 1.3, < 2)
- redis-namespace (1.5.3)
- redis (~> 3.0, >= 3.0.4)
+ redis-namespace (1.6.0)
+ redis (>= 3.0.4)
redis-rack (2.0.4)
rack (>= 1.5, < 3)
redis-store (>= 1.2, < 2)
@@ -836,7 +834,7 @@ GEM
activesupport (>= 3.1)
select2-rails (3.5.10)
thor (~> 0.14)
- selenium-webdriver (3.11.0)
+ selenium-webdriver (3.12.0)
childprocess (~> 0.5)
rubyzip (~> 1.2)
sentry-raven (2.7.2)
@@ -986,7 +984,7 @@ DEPENDENCIES
asciidoctor-plantuml (= 0.0.8)
asset_sync (~> 2.4)
attr_encrypted (~> 3.1.0)
- awesome_print (~> 1.2.0)
+ awesome_print
babosa (~> 1.0.2)
base32 (~> 0.3.0)
batch-loader (~> 1.2.1)
@@ -994,7 +992,6 @@ DEPENDENCIES
benchmark-ips (~> 2.3.0)
better_errors (~> 2.1.0)
binding_of_caller (~> 0.7.2)
- bootstrap-sass (~> 3.3.0)
bootstrap_form (~> 2.7.0)
brakeman (~> 4.2)
browser (~> 2.2)
@@ -1021,7 +1018,7 @@ DEPENDENCIES
doorkeeper-openid_connect (~> 1.3)
dropzonejs-rails (~> 0.7.1)
email_reply_trimmer (~> 0.1)
- email_spec (~> 1.6.0)
+ email_spec (~> 2.2.0)
factory_bot_rails (~> 4.8.2)
faraday (~> 0.12)
fast_blank
@@ -1045,7 +1042,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.3)
- gitaly-proto (~> 0.99.0)
+ gitaly-proto (~> 0.100.0)
github-linguist (~> 5.3.3)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-gollum-lib (~> 4.2)
@@ -1059,7 +1056,7 @@ DEPENDENCIES
gpgme
grape (~> 1.0)
grape-entity (~> 0.7.1)
- grape-route-helpers (~> 2.1.0)
+ grape-path-helpers (~> 1.0)
grape_logging (~> 1.7)
grpc (~> 1.11.0)
haml_lint (~> 0.26.0)
@@ -1070,6 +1067,7 @@ DEPENDENCIES
html-pipeline (~> 2.7.1)
html2text
httparty (~> 0.13.3)
+ icalendar
influxdb (~> 0.2)
jira-ruby (~> 1.4)
jquery-atwho-rails (~> 1.3.2)
@@ -1077,7 +1075,7 @@ DEPENDENCIES
jwt (~> 1.5.6)
kaminari (~> 1.0)
knapsack (~> 1.16)
- kubeclient (~> 3.0)
+ kubeclient (~> 3.1.0)
letter_opener_web (~> 1.3.0)
license_finder (~> 3.1)
licensee (~> 8.9)
@@ -1092,10 +1090,10 @@ DEPENDENCIES
net-ssh (~> 4.2.0)
nokogiri (~> 1.8.2)
oauth2 (~> 1.4)
- octokit (~> 4.8)
+ octokit (~> 4.9)
omniauth (~> 1.8)
omniauth-auth0 (~> 2.0.0)
- omniauth-authentiq (~> 0.3.1)
+ omniauth-authentiq (~> 0.3.3)
omniauth-azure-oauth2 (~> 0.0.9)
omniauth-cas3 (~> 1.1.4)
omniauth-facebook (~> 4.0.0)
@@ -1118,7 +1116,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2)
premailer-rails (~> 1.9.7)
- prometheus-client-mmap (~> 0.9.2)
+ prometheus-client-mmap (~> 0.9.3)
pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1)
@@ -1134,12 +1132,12 @@ DEPENDENCIES
rblineprof (~> 0.3.6)
rbnacl (~> 4.0)
rbnacl-libsodium
- rdoc (~> 4.2)
+ rdoc (~> 6.0)
re2 (~> 1.1.1)
recaptcha (~> 3.0)
redcarpet (~> 3.4)
redis (~> 3.2)
- redis-namespace (~> 1.5.2)
+ redis-namespace (~> 1.6.0)
redis-rails (~> 5.0.2)
request_store (~> 1.3)
responders (~> 2.0)
@@ -1154,6 +1152,7 @@ DEPENDENCIES
rubocop-rspec (~> 1.22.1)
ruby-fogbugz (~> 0.2.1)
ruby-prof (~> 0.17.0)
+ ruby-progressbar
ruby_parser (~> 3.8)
rufus-scheduler (~> 3.4)
rugged (~> 0.27)
@@ -1162,12 +1161,12 @@ DEPENDENCIES
scss_lint (~> 0.56.0)
seed-fu (~> 2.3.7)
select2-rails (~> 3.5.9)
- selenium-webdriver (~> 3.5)
+ selenium-webdriver (~> 3.12)
sentry-raven (~> 2.7)
settingslogic (~> 2.0.9)
sham_rack (~> 1.3.6)
shoulda-matchers (~> 3.1.2)
- sidekiq (~> 5.0)
+ sidekiq (~> 5.1)
sidekiq-cron (~> 0.6.0)
sidekiq-limit_fetch (~> 3.4)
simple_po_parser (~> 1.1.2)
@@ -1199,4 +1198,4 @@ DEPENDENCIES
wikicloth (= 0.8.1)
BUNDLED WITH
- 1.16.1
+ 1.16.2
diff --git a/app/assets/javascripts/ide/components/activity_bar.vue b/app/assets/javascripts/ide/components/activity_bar.vue
index 05dbc1410de..6efcad6adea 100644
--- a/app/assets/javascripts/ide/components/activity_bar.vue
+++ b/app/assets/javascripts/ide/components/activity_bar.vue
@@ -1,4 +1,5 @@
<script>
+import $ from 'jquery';
import { mapActions, mapGetters, mapState } from 'vuex';
import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '~/vue_shared/directives/tooltip';
@@ -20,6 +21,13 @@ export default {
},
methods: {
...mapActions(['updateActivityBarView']),
+ changedActivityView(e, view) {
+ e.currentTarget.blur();
+
+ this.updateActivityBarView(view);
+
+ $(e.currentTarget).tooltip('hide');
+ },
},
activityBarViews,
};
@@ -54,7 +62,7 @@ export default {
:class="{
active: currentActivityView === $options.activityBarViews.edit
}"
- @click.prevent="updateActivityBarView($options.activityBarViews.edit)"
+ @click.prevent="changedActivityView($event, $options.activityBarViews.edit)"
:title="s__('IDE|Edit')"
:aria-label="s__('IDE|Edit')"
>
@@ -73,7 +81,7 @@ export default {
:class="{
active: currentActivityView === $options.activityBarViews.review
}"
- @click.prevent="updateActivityBarView($options.activityBarViews.review)"
+ @click.prevent="changedActivityView($event, $options.activityBarViews.review)"
:title="s__('IDE|Review')"
:aria-label="s__('IDE|Review')"
>
@@ -92,7 +100,7 @@ export default {
:class="{
active: currentActivityView === $options.activityBarViews.commit
}"
- @click.prevent="updateActivityBarView($options.activityBarViews.commit)"
+ @click.prevent="changedActivityView($event, $options.activityBarViews.commit)"
:title="s__('IDE|Commit')"
:aria-label="s__('IDE|Commit')"
>
diff --git a/app/assets/javascripts/notes/constants.js b/app/assets/javascripts/notes/constants.js
index c4de4826eda..5b5b1e89058 100644
--- a/app/assets/javascripts/notes/constants.js
+++ b/app/assets/javascripts/notes/constants.js
@@ -14,6 +14,7 @@ export const EPIC_NOTEABLE_TYPE = 'epic';
export const MERGE_REQUEST_NOTEABLE_TYPE = 'merge_request';
export const UNRESOLVE_NOTE_METHOD_NAME = 'delete';
export const RESOLVE_NOTE_METHOD_NAME = 'post';
+export const DESCRIPTION_TYPE = 'changed the description';
export const NOTEABLE_TYPE_MAPPING = {
Issue: ISSUE_NOTEABLE_TYPE,
diff --git a/app/assets/javascripts/notes/stores/collapse_utils.js b/app/assets/javascripts/notes/stores/collapse_utils.js
new file mode 100644
index 00000000000..fa4a1c56b20
--- /dev/null
+++ b/app/assets/javascripts/notes/stores/collapse_utils.js
@@ -0,0 +1,108 @@
+import { n__, s__, sprintf } from '~/locale';
+import { DESCRIPTION_TYPE } from '../constants';
+
+/**
+ * Changes the description from a note, returns 'changed the description n number of times'
+ */
+export const changeDescriptionNote = (note, descriptionChangedTimes, timeDifferenceMinutes) => {
+ const descriptionNote = Object.assign({}, note);
+
+ descriptionNote.note_html = sprintf(
+ s__(`MergeRequest|
+ %{paragraphStart}changed the description %{descriptionChangedTimes} times %{timeDifferenceMinutes}%{paragraphEnd}`),
+ {
+ paragraphStart: '<p dir="auto">',
+ paragraphEnd: '</p>',
+ descriptionChangedTimes,
+ timeDifferenceMinutes: n__('within %d minute ', 'within %d minutes ', timeDifferenceMinutes),
+ },
+ false,
+ );
+
+ descriptionNote.times_updated = descriptionChangedTimes;
+
+ return descriptionNote;
+};
+
+/**
+ * Checks the time difference between two notes from their 'created_at' dates
+ * returns an integer
+ */
+
+export const getTimeDifferenceMinutes = (noteBeggining, noteEnd) => {
+ const descriptionNoteBegin = new Date(noteBeggining.created_at);
+ const descriptionNoteEnd = new Date(noteEnd.created_at);
+ const timeDifferenceMinutes = (descriptionNoteEnd - descriptionNoteBegin) / 1000 / 60;
+
+ return Math.ceil(timeDifferenceMinutes);
+};
+
+/**
+ * Checks if a note is a system note and if the content is description
+ *
+ * @param {Object} note
+ * @returns {Boolean}
+ */
+export const isDescriptionSystemNote = note => note.system && note.note === DESCRIPTION_TYPE;
+
+/**
+ * Collapses the system notes of a description type, e.g. Changed the description, n minutes ago
+ * the notes will collapse as long as they happen no more than 10 minutes away from each away
+ * in between the notes can be anything, another type of system note
+ * (such as 'changed the weight') or a comment.
+ *
+ * @param {Array} notes
+ * @returns {Array}
+ */
+export const collapseSystemNotes = notes => {
+ let lastDescriptionSystemNote = null;
+ let lastDescriptionSystemNoteIndex = -1;
+ let descriptionChangedTimes = 1;
+
+ return notes.slice(0).reduce((acc, currentNote) => {
+ const note = currentNote.notes[0];
+
+ if (isDescriptionSystemNote(note)) {
+ // is it the first one?
+ if (!lastDescriptionSystemNote) {
+ lastDescriptionSystemNote = note;
+ lastDescriptionSystemNoteIndex = acc.length;
+ } else if (lastDescriptionSystemNote) {
+ const timeDifferenceMinutes = getTimeDifferenceMinutes(
+ lastDescriptionSystemNote,
+ note,
+ );
+
+ // are they less than 10 minutes appart?
+ if (timeDifferenceMinutes > 10) {
+ // reset counter
+ descriptionChangedTimes = 1;
+ // update the previous system note
+ lastDescriptionSystemNote = note;
+ lastDescriptionSystemNoteIndex = acc.length;
+ } else {
+ // increase counter
+ descriptionChangedTimes += 1;
+
+ // delete the previous one
+ acc.splice(lastDescriptionSystemNoteIndex, 1);
+
+ // replace the text of the current system note with the collapsed note.
+ currentNote.notes.splice(
+ 0,
+ 1,
+ changeDescriptionNote(note, descriptionChangedTimes, timeDifferenceMinutes),
+ );
+
+ // update the previous system note index
+ lastDescriptionSystemNoteIndex = acc.length;
+ }
+ }
+ }
+ acc.push(currentNote);
+ return acc;
+ }, []);
+};
+
+// for babel-rewire
+export default {};
diff --git a/app/assets/javascripts/notes/stores/getters.js b/app/assets/javascripts/notes/stores/getters.js
index 787be6f4c99..bc373e0d0fc 100644
--- a/app/assets/javascripts/notes/stores/getters.js
+++ b/app/assets/javascripts/notes/stores/getters.js
@@ -1,6 +1,8 @@
import _ from 'underscore';
+import { collapseSystemNotes } from './collapse_utils';
+
+export const notes = state => collapseSystemNotes(state.notes);
-export const notes = state => state.notes;
export const targetNoteHash = state => state.targetNoteHash;
export const getNotesData = state => state.notesData;
diff --git a/app/assets/stylesheets/bootstrap_migration.scss b/app/assets/stylesheets/bootstrap_migration.scss
index 5cf3bb4ae09..d5679177f8f 100644
--- a/app/assets/stylesheets/bootstrap_migration.scss
+++ b/app/assets/stylesheets/bootstrap_migration.scss
@@ -69,6 +69,11 @@ code {
background-color: inherit;
padding: unset;
}
+
+ .build-trace & {
+ background-color: inherit;
+ padding: inherit;
+ }
}
.code {
diff --git a/app/assets/stylesheets/pages/repo.scss b/app/assets/stylesheets/pages/repo.scss
index 6bbcb15329c..2b3cc33c8ae 100644
--- a/app/assets/stylesheets/pages/repo.scss
+++ b/app/assets/stylesheets/pages/repo.scss
@@ -183,7 +183,7 @@
svg {
position: relative;
- top: -1px;
+ top: -2px;
}
.ide-file-changed-icon {
@@ -458,6 +458,10 @@
width: auto;
margin-right: 0;
+ a {
+ height: 60px;
+ }
+
a:hover,
a:focus {
text-decoration: none;
@@ -718,9 +722,17 @@
}
.ide-new-btn {
+ .btn {
+ padding-top: 3px;
+ padding-bottom: 3px;
+ }
+
+ .dropdown {
+ display: flex;
+ }
+
.dropdown-toggle svg {
- margin-top: -2px;
- margin-bottom: 2px;
+ top: 0;
}
.dropdown-menu {
@@ -877,6 +889,7 @@
border-top: 1px solid transparent;
border-bottom: 1px solid transparent;
outline: 0;
+ cursor: pointer;
svg {
margin: 0 auto;
diff --git a/app/assets/stylesheets/print.scss b/app/assets/stylesheets/print.scss
index 90ccd4abd90..bb10928a037 100644
--- a/app/assets/stylesheets/print.scss
+++ b/app/assets/stylesheets/print.scss
@@ -22,9 +22,9 @@
header,
nav,
-nav.main-nav,
nav.navbar-collapse,
nav.navbar-collapse.collapse,
+.nav-sidebar,
.profiler-results,
.tree-ref-holder,
.tree-holder .breadcrumb,
@@ -38,7 +38,8 @@ ul.notes-form,
.edit-link,
.note-action-button,
.right-sidebar,
-.flash-container {
+.flash-container,
+#js-peek {
display: none !important;
}
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 43d8867a536..45c98d60822 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -18,7 +18,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
def upload_authorize
set_workhorse_internal_api_content_type
- authorized = LfsObjectUploader.workhorse_authorize
+ authorized = LfsObjectUploader.workhorse_authorize(has_length: true)
authorized.merge!(LfsOid: oid, LfsSize: size)
render json: authorized
diff --git a/app/helpers/projects_helper.rb b/app/helpers/projects_helper.rb
index 55078e1a2d2..dfca799a53d 100644
--- a/app/helpers/projects_helper.rb
+++ b/app/helpers/projects_helper.rb
@@ -238,6 +238,14 @@ module ProjectsHelper
"git push --set-upstream #{repository_url}/$(git rev-parse --show-toplevel | xargs basename).git $(git rev-parse --abbrev-ref HEAD)"
end
+ def show_xcode_link?(project = @project)
+ browser.platform.mac? && project.repository.xcode_project?
+ end
+
+ def xcode_uri_to_repo(project = @project)
+ "xcode://clone?repo=#{CGI.escape(default_url_to_repo(project))}"
+ end
+
private
def get_project_nav_tabs(project, current_user)
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 75fd55a8f7b..d93e7cb896f 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -55,6 +55,11 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end
+
+ scope :without_archived_trace, ->() do
+ where('NOT EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').trace)
+ end
+
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
@@ -144,6 +149,7 @@ module Ci
after_transition any => [:success] do |build|
build.run_after_commit do
BuildSuccessWorker.perform_async(id)
+ PagesWorker.perform_async(:deploy, id) if build.pages_generator?
end
end
@@ -183,6 +189,11 @@ module Ci
pipeline.manual_actions.where.not(name: name)
end
+ def pages_generator?
+ Gitlab.config.pages.enabled &&
+ self.name == 'pages'
+ end
+
def playable?
action? && (manual? || retryable?)
end
@@ -402,8 +413,6 @@ module Ci
build_data = Gitlab::DataBuilder::Build.build(self)
project.execute_hooks(build_data.dup, :job_hooks)
project.execute_services(build_data.dup, :job_hooks)
- PagesService.new(build_data).execute
- project.running_or_pending_build_count(force: true)
end
def browsable_artifacts?
diff --git a/app/models/project.rb b/app/models/project.rb
index a4df07b074a..b91a30400b7 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -1656,12 +1656,6 @@ class Project < ActiveRecord::Base
import_state.update_column(:jid, nil)
end
- def running_or_pending_build_count(force: false)
- Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
- builds.running_or_pending.count(:all)
- end
- end
-
# Lazy loading of the `pipeline_status` attribute
def pipeline_status
@pipeline_status ||= Gitlab::Cache::Ci::ProjectPipelineStatus.load_for_project(self)
diff --git a/app/services/pages_service.rb b/app/services/pages_service.rb
deleted file mode 100644
index 446eeb34d3b..00000000000
--- a/app/services/pages_service.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-class PagesService
- attr_reader :data
-
- def initialize(data)
- @data = data
- end
-
- def execute
- return unless Settings.pages.enabled
- return unless data[:build_name] == 'pages'
- return unless data[:build_status] == 'success'
-
- PagesWorker.perform_async(:deploy, data[:build_id])
- end
-end
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index 679f4a9cb62..0d1e2e758cd 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -17,6 +17,8 @@ module Projects
ensure_wiki_exists if enabling_wiki?
+ yield if block_given?
+
if project.update_attributes(params.except(:default_branch))
if project.previous_changes.include?('path')
project.rename_repo
@@ -36,7 +38,7 @@ module Projects
end
def run_auto_devops_pipeline?
- return false if project.repository.gitlab_ci_yml || !project.auto_devops.previous_changes.include?('enabled')
+ return false if project.repository.gitlab_ci_yml || !project.auto_devops&.previous_changes&.include?('enabled')
project.auto_devops.enabled? || (project.auto_devops.enabled.nil? && Gitlab::CurrentSettings.auto_devops_enabled?)
end
@@ -53,8 +55,8 @@ module Projects
def changing_default_branch?
new_branch = params[:default_branch]
- project.repository.exists? &&
- new_branch && new_branch != project.default_branch
+ new_branch && project.repository.exists? &&
+ new_branch != project.default_branch
end
def enabling_wiki?
diff --git a/app/uploaders/object_storage.rb b/app/uploaders/object_storage.rb
index 5bdca26a584..3bb2e1ea63a 100644
--- a/app/uploaders/object_storage.rb
+++ b/app/uploaders/object_storage.rb
@@ -10,8 +10,6 @@ module ObjectStorage
UnknownStoreError = Class.new(StandardError)
ObjectStorageUnavailable = Class.new(StandardError)
- DIRECT_UPLOAD_TIMEOUT = 4.hours
- DIRECT_UPLOAD_EXPIRE_OFFSET = 15.minutes
TMP_UPLOAD_PATH = 'tmp/uploads'.freeze
module Store
@@ -157,9 +155,9 @@ module ObjectStorage
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
- def workhorse_authorize
+ def workhorse_authorize(has_length:, maximum_size: nil)
{
- RemoteObject: workhorse_remote_upload_options,
+ RemoteObject: workhorse_remote_upload_options(has_length: has_length, maximum_size: maximum_size),
TempPath: workhorse_local_upload_path
}.compact
end
@@ -168,23 +166,16 @@ module ObjectStorage
File.join(self.root, TMP_UPLOAD_PATH)
end
- def workhorse_remote_upload_options
+ def workhorse_remote_upload_options(has_length:, maximum_size: nil)
return unless self.object_store_enabled?
return unless self.direct_upload_enabled?
id = [CarrierWave.generate_cache_id, SecureRandom.hex].join('-')
upload_path = File.join(TMP_UPLOAD_PATH, id)
- connection = ::Fog::Storage.new(self.object_store_credentials)
- expire_at = Time.now + DIRECT_UPLOAD_TIMEOUT + DIRECT_UPLOAD_EXPIRE_OFFSET
- options = { 'Content-Type' => 'application/octet-stream' }
+ direct_upload = ObjectStorage::DirectUpload.new(self.object_store_credentials, remote_store_path, upload_path,
+ has_length: has_length, maximum_size: maximum_size)
- {
- ID: id,
- Timeout: DIRECT_UPLOAD_TIMEOUT,
- GetURL: connection.get_object_url(remote_store_path, upload_path, expire_at),
- DeleteURL: connection.delete_object_url(remote_store_path, upload_path, expire_at),
- StoreURL: connection.put_object_url(remote_store_path, upload_path, expire_at, options)
- }
+ direct_upload.to_hash.merge(ID: id)
end
end
diff --git a/app/views/projects/_home_panel.html.haml b/app/views/projects/_home_panel.html.haml
index 075badb9e56..89940512bc6 100644
--- a/app/views/projects/_home_panel.html.haml
+++ b/app/views/projects/_home_panel.html.haml
@@ -42,6 +42,10 @@
.project-clone-holder
= render "shared/clone_panel"
+ - if show_xcode_link?(@project)
+ .project-action-button.project-xcode.inline
+ = render "projects/buttons/xcode_link"
+
- if current_user
- if can?(current_user, :download_code, @project)
= render 'projects/buttons/download', project: @project, ref: @ref
diff --git a/app/views/projects/buttons/_xcode_link.html.haml b/app/views/projects/buttons/_xcode_link.html.haml
new file mode 100644
index 00000000000..a8b32fb0ef5
--- /dev/null
+++ b/app/views/projects/buttons/_xcode_link.html.haml
@@ -0,0 +1,2 @@
+%a.btn.btn-default{ href: xcode_uri_to_repo(@project) }
+ = _("Open in Xcode")
diff --git a/changelogs/unreleased/45820-add-xcode-link.yml b/changelogs/unreleased/45820-add-xcode-link.yml
new file mode 100644
index 00000000000..9e61703ee10
--- /dev/null
+++ b/changelogs/unreleased/45820-add-xcode-link.yml
@@ -0,0 +1,5 @@
+---
+title: Add Open in Xcode link for xcode repositories
+merge_request:
+author:
+type: added
diff --git a/changelogs/unreleased/46452-nomethoderror-undefined-method-previous_changes-for-nil-nilclass.yml b/changelogs/unreleased/46452-nomethoderror-undefined-method-previous_changes-for-nil-nilclass.yml
new file mode 100644
index 00000000000..89dee65f5a8
--- /dev/null
+++ b/changelogs/unreleased/46452-nomethoderror-undefined-method-previous_changes-for-nil-nilclass.yml
@@ -0,0 +1,5 @@
+---
+title: Check for nil AutoDevOps when saving project CI/CD settings.
+merge_request: 19190
+author:
+type: fixed
diff --git a/changelogs/unreleased/add-background-migrations-for-not-archived-traces.yml b/changelogs/unreleased/add-background-migrations-for-not-archived-traces.yml
new file mode 100644
index 00000000000..b1b23c477df
--- /dev/null
+++ b/changelogs/unreleased/add-background-migrations-for-not-archived-traces.yml
@@ -0,0 +1,5 @@
+---
+title: Add background migrations for archiving legacy job traces
+merge_request: 19194
+author:
+type: performance
diff --git a/changelogs/unreleased/gh-importer-transactions.yml b/changelogs/unreleased/gh-importer-transactions.yml
new file mode 100644
index 00000000000..1489d60a3fb
--- /dev/null
+++ b/changelogs/unreleased/gh-importer-transactions.yml
@@ -0,0 +1,5 @@
+---
+title: Move PR IO operations out of a transaction
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/jivl-smarter-system-notes.yml b/changelogs/unreleased/jivl-smarter-system-notes.yml
new file mode 100644
index 00000000000..e640981de9a
--- /dev/null
+++ b/changelogs/unreleased/jivl-smarter-system-notes.yml
@@ -0,0 +1,5 @@
+---
+title: Add support for smarter system notes
+merge_request: 17164
+author:
+type: changed
diff --git a/changelogs/unreleased/optimise-pages-service-calling.yml b/changelogs/unreleased/optimise-pages-service-calling.yml
new file mode 100644
index 00000000000..e017e6b01f1
--- /dev/null
+++ b/changelogs/unreleased/optimise-pages-service-calling.yml
@@ -0,0 +1,5 @@
+---
+title: Optimise PagesWorker usage
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/presigned-multipart-uploads.yml b/changelogs/unreleased/presigned-multipart-uploads.yml
new file mode 100644
index 00000000000..52fae6534fd
--- /dev/null
+++ b/changelogs/unreleased/presigned-multipart-uploads.yml
@@ -0,0 +1,5 @@
+---
+title: Support direct_upload with S3 Multipart uploads
+merge_request:
+author:
+type: added
diff --git a/changelogs/unreleased/rails5-fix-46236.yml b/changelogs/unreleased/rails5-fix-46236.yml
new file mode 100644
index 00000000000..9203b448bed
--- /dev/null
+++ b/changelogs/unreleased/rails5-fix-46236.yml
@@ -0,0 +1,5 @@
+---
+title: Support rails5 in postgres indexes function and fix some migrations
+merge_request: 19400
+author: Jasper Maes
+type: fixed
diff --git a/changelogs/unreleased/remove-unused-query-in-hooks.yml b/changelogs/unreleased/remove-unused-query-in-hooks.yml
new file mode 100644
index 00000000000..ef40b2db5a9
--- /dev/null
+++ b/changelogs/unreleased/remove-unused-query-in-hooks.yml
@@ -0,0 +1,5 @@
+---
+title: Remove unused running_or_pending_build_count
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-add-uncached-query-limiter.yml b/changelogs/unreleased/sh-add-uncached-query-limiter.yml
new file mode 100644
index 00000000000..4318338c229
--- /dev/null
+++ b/changelogs/unreleased/sh-add-uncached-query-limiter.yml
@@ -0,0 +1,5 @@
+---
+title: Remove N+1 query for author in issues API
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-fix-pipeline-jobs-nplus-one.yml b/changelogs/unreleased/sh-fix-pipeline-jobs-nplus-one.yml
new file mode 100644
index 00000000000..eac00f4fca6
--- /dev/null
+++ b/changelogs/unreleased/sh-fix-pipeline-jobs-nplus-one.yml
@@ -0,0 +1,5 @@
+---
+title: Eliminate N+1 queries for CI job artifacts in /api/prjoects/:id/pipelines/:pipeline_id/jobs
+merge_request:
+author:
+type: performance
diff --git a/config/initializers/artifacts_direct_upload_support.rb b/config/initializers/artifacts_direct_upload_support.rb
deleted file mode 100644
index d2bc35ea613..00000000000
--- a/config/initializers/artifacts_direct_upload_support.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-artifacts_object_store = Gitlab.config.artifacts.object_store
-
-if artifacts_object_store.enabled &&
- artifacts_object_store.direct_upload &&
- artifacts_object_store.connection&.provider.to_s != 'Google'
- raise "Only 'Google' is supported as a object storage provider when 'direct_upload' of artifacts is used"
-end
diff --git a/config/initializers/direct_upload_support.rb b/config/initializers/direct_upload_support.rb
new file mode 100644
index 00000000000..32fc8c8bc69
--- /dev/null
+++ b/config/initializers/direct_upload_support.rb
@@ -0,0 +1,19 @@
+class DirectUploadsValidator
+ SUPPORTED_DIRECT_UPLOAD_PROVIDERS = %w(Google AWS).freeze
+
+ ValidationError = Class.new(StandardError)
+
+ def verify!(object_store)
+ return unless object_store.enabled
+ return unless object_store.direct_upload
+ return if SUPPORTED_DIRECT_UPLOAD_PROVIDERS.include?(object_store.connection&.provider.to_s)
+
+ raise ValidationError, "Only #{SUPPORTED_DIRECT_UPLOAD_PROVIDERS.join(',')} are supported as a object storage provider when 'direct_upload' is used"
+ end
+end
+
+DirectUploadsValidator.new.tap do |validator|
+ [Gitlab.config.artifacts, Gitlab.config.uploads, Gitlab.config.lfs].each do |uploader|
+ validator.verify!(uploader.object_store)
+ end
+end
diff --git a/config/initializers/postgresql_opclasses_support.rb b/config/initializers/postgresql_opclasses_support.rb
index c2f3023b330..03bda44a630 100644
--- a/config/initializers/postgresql_opclasses_support.rb
+++ b/config/initializers/postgresql_opclasses_support.rb
@@ -107,8 +107,15 @@ module ActiveRecord
result.map do |row|
index_name = row[0]
- unique = row[1] == 't'
+ unique = if Gitlab.rails5?
+ row[1]
+ else
+ row[1] == 't'
+ end
indkey = row[2].split(" ")
+ if Gitlab.rails5?
+ indkey = indkey.map(&:to_i)
+ end
inddef = row[3]
oid = row[4]
diff --git a/db/migrate/20160226114608_add_trigram_indexes_for_searching.rb b/db/migrate/20160226114608_add_trigram_indexes_for_searching.rb
index 375e389e07a..7aa79bf5e02 100644
--- a/db/migrate/20160226114608_add_trigram_indexes_for_searching.rb
+++ b/db/migrate/20160226114608_add_trigram_indexes_for_searching.rb
@@ -37,7 +37,12 @@ class AddTrigramIndexesForSearching < ActiveRecord::Migration
res = execute("SELECT true AS enabled FROM pg_available_extensions WHERE name = 'pg_trgm' AND installed_version IS NOT NULL;")
row = res.first
- row && row['enabled'] == 't' ? true : false
+ check = if Gitlab.rails5?
+ true
+ else
+ 't'
+ end
+ row && row['enabled'] == check ? true : false
end
def create_trigrams_extension
diff --git a/db/migrate/20170622135728_add_unique_constraint_to_ci_variables.rb b/db/migrate/20170622135728_add_unique_constraint_to_ci_variables.rb
index 8b2cc40ee59..787022b7bfe 100644
--- a/db/migrate/20170622135728_add_unique_constraint_to_ci_variables.rb
+++ b/db/migrate/20170622135728_add_unique_constraint_to_ci_variables.rb
@@ -2,12 +2,13 @@ class AddUniqueConstraintToCiVariables < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
+ INDEX_NAME = 'index_ci_variables_on_project_id_and_key_and_environment_scope'
disable_ddl_transaction!
def up
unless this_index_exists?
- add_concurrent_index(:ci_variables, columns, name: index_name, unique: true)
+ add_concurrent_index(:ci_variables, columns, name: INDEX_NAME, unique: true)
end
end
@@ -18,21 +19,17 @@ class AddUniqueConstraintToCiVariables < ActiveRecord::Migration
add_concurrent_index(:ci_variables, :project_id)
end
- remove_concurrent_index(:ci_variables, columns, name: index_name)
+ remove_concurrent_index(:ci_variables, columns, name: INDEX_NAME)
end
end
private
def this_index_exists?
- index_exists?(:ci_variables, columns, name: index_name)
+ index_exists?(:ci_variables, columns, name: INDEX_NAME)
end
def columns
@columns ||= [:project_id, :key, :environment_scope]
end
-
- def index_name
- 'index_ci_variables_on_project_id_and_key_and_environment_scope'
- end
end
diff --git a/db/migrate/20171106155656_turn_issues_due_date_index_to_partial_index.rb b/db/migrate/20171106155656_turn_issues_due_date_index_to_partial_index.rb
index e4bed778695..08784de4043 100644
--- a/db/migrate/20171106155656_turn_issues_due_date_index_to_partial_index.rb
+++ b/db/migrate/20171106155656_turn_issues_due_date_index_to_partial_index.rb
@@ -20,9 +20,7 @@ class TurnIssuesDueDateIndexToPartialIndex < ActiveRecord::Migration
name: NEW_INDEX_NAME
)
- # We set the column name to nil as otherwise Rails will ignore the custom
- # index name and remove the wrong index.
- remove_concurrent_index(:issues, nil, name: OLD_INDEX_NAME)
+ remove_concurrent_index_by_name(:issues, OLD_INDEX_NAME)
end
def down
@@ -32,6 +30,6 @@ class TurnIssuesDueDateIndexToPartialIndex < ActiveRecord::Migration
name: OLD_INDEX_NAME
)
- remove_concurrent_index(:issues, nil, name: NEW_INDEX_NAME)
+ remove_concurrent_index_by_name(:issues, NEW_INDEX_NAME)
end
end
diff --git a/db/migrate/20180201110056_add_foreign_keys_to_todos.rb b/db/migrate/20180201110056_add_foreign_keys_to_todos.rb
index b7c40f8c01a..020b0550321 100644
--- a/db/migrate/20180201110056_add_foreign_keys_to_todos.rb
+++ b/db/migrate/20180201110056_add_foreign_keys_to_todos.rb
@@ -31,7 +31,7 @@ class AddForeignKeysToTodos < ActiveRecord::Migration
end
def down
- remove_foreign_key :todos, :users
+ remove_foreign_key :todos, column: :user_id
remove_foreign_key :todos, column: :author_id
remove_foreign_key :todos, :notes
end
diff --git a/db/post_migrate/20180529152628_schedule_to_archive_legacy_traces.rb b/db/post_migrate/20180529152628_schedule_to_archive_legacy_traces.rb
new file mode 100644
index 00000000000..965cd3a8714
--- /dev/null
+++ b/db/post_migrate/20180529152628_schedule_to_archive_legacy_traces.rb
@@ -0,0 +1,35 @@
+class ScheduleToArchiveLegacyTraces < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ BATCH_SIZE = 5000
+ BACKGROUND_MIGRATION_CLASS = 'ArchiveLegacyTraces'
+
+ disable_ddl_transaction!
+
+ class Build < ActiveRecord::Base
+ include EachBatch
+ self.table_name = 'ci_builds'
+ self.inheritance_column = :_type_disabled # Disable STI
+
+ scope :type_build, -> { where(type: 'Ci::Build') }
+
+ scope :finished, -> { where(status: [:success, :failed, :canceled]) }
+
+ scope :without_archived_trace, -> do
+ where('NOT EXISTS (SELECT 1 FROM ci_job_artifacts WHERE ci_builds.id = ci_job_artifacts.job_id AND ci_job_artifacts.file_type = 3)')
+ end
+ end
+
+ def up
+ queue_background_migration_jobs_by_range_at_intervals(
+ ::ScheduleToArchiveLegacyTraces::Build.type_build.finished.without_archived_trace,
+ BACKGROUND_MIGRATION_CLASS,
+ 5.minutes,
+ batch_size: BATCH_SIZE)
+ end
+
+ def down
+ # noop
+ end
+end
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index 77fe4d561a1..e59ab5a72e1 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -94,6 +94,7 @@ _The artifacts are stored by default in
> Available in [GitLab Premium](https://about.gitlab.com/products/) and
[GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/)
+> Since version 11.0, we support direct_upload to S3.
If you don't want to use the local disk where GitLab is installed to store the
artifacts, you can use an object storage like AWS S3 instead.
@@ -108,7 +109,7 @@ For source installations the following settings are nested under `artifacts:` an
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artifacts will be stored| |
-| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. Currently only `Google` provider is supported | `false` |
+| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
diff --git a/doc/development/query_recorder.md b/doc/development/query_recorder.md
index 26d3355e94d..61e5e1afede 100644
--- a/doc/development/query_recorder.md
+++ b/doc/development/query_recorder.md
@@ -22,6 +22,19 @@ As an example you might create 5 issues in between counts, which would cause the
> **Note:** In some cases the query count might change slightly between runs for unrelated reasons. In this case you might need to test `exceed_query_limit(control_count + acceptable_change)`, but this should be avoided if possible.
+## Cached queries
+
+By default, QueryRecorder will ignore cached queries in the count. However, it may be better to count
+all queries to avoid introducing an N+1 query that may be masked by the statement cache. To do this,
+pass the `skip_cached` variable to `QueryRecorder` and use the `exceed_all_query_limit` matcher:
+
+it "avoids N+1 database queries" do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { visit_some_page }.count
+ create_list(:issue, 5)
+ expect { visit_some_page }.not_to exceed_all_query_limit(control_count)
+end
+```
+
## Finding the source of the query
It may be useful to identify the source of the queries by looking at the call backtrace.
diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md
index efec365042a..1400b2e36fe 100644
--- a/doc/topics/autodevops/index.md
+++ b/doc/topics/autodevops/index.md
@@ -497,10 +497,10 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). |
| `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. |
| `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. |
-| `CODEQUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. |
+| `CODE_QUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `code_quality` job. If the variable is present, the job will not be created. |
| `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. |
| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. |
-| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. |
+| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `container_scanning` job. If the variable is present, the job will not be created. |
| `REVIEW_DISABLED` | From GitLab 11.0, this variable can be used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. |
| `DAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dast` job. If the variable is present, the job will not be created. |
| `PERFORMANCE_DISABLED` | From GitLab 11.0, this variable can be used to disable the `performance` job. If the variable is present, the job will not be created. |
diff --git a/lib/api/issues.rb b/lib/api/issues.rb
index b64f465ce56..25185d6edc8 100644
--- a/lib/api/issues.rb
+++ b/lib/api/issues.rb
@@ -16,7 +16,7 @@ module API
args[:scope] = args[:scope].underscore if args[:scope]
issues = IssuesFinder.new(current_user, args).execute
- .preload(:assignees, :labels, :notes, :timelogs, :project)
+ .preload(:assignees, :labels, :notes, :timelogs, :project, :author)
issues.reorder(args[:order_by] => args[:sort])
end
diff --git a/lib/api/jobs.rb b/lib/api/jobs.rb
index 54d1acbd412..e95b0dd5267 100644
--- a/lib/api/jobs.rb
+++ b/lib/api/jobs.rb
@@ -54,6 +54,7 @@ module API
pipeline = user_project.pipelines.find(params[:pipeline_id])
builds = pipeline.builds
builds = filter_builds(builds, params[:scope])
+ builds = builds.preload(:job_artifacts_archive)
present paginate(builds), with: Entities::Job
end
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index e9886c76870..db502697a19 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -205,7 +205,7 @@ module API
status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
- JobArtifactUploader.workhorse_authorize
+ JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_artifacts_size)
end
desc 'Upload artifacts for job' do
diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb
index 0f7a7b0ce8d..7de66539848 100644
--- a/lib/gitlab/auth.rb
+++ b/lib/gitlab/auth.rb
@@ -240,7 +240,7 @@ module Gitlab
return unless login == 'gitlab-ci-token'
return unless password
- build = ::Ci::Build.running.find_by_token(password)
+ build = find_build_by_token(password)
return unless build
return unless build.project.builds_enabled?
@@ -301,6 +301,12 @@ module Gitlab
REGISTRY_SCOPES
end
+
+ private
+
+ def find_build_by_token(token)
+ ::Ci::Build.running.find_by_token(token)
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/archive_legacy_traces.rb b/lib/gitlab/background_migration/archive_legacy_traces.rb
new file mode 100644
index 00000000000..5a4e5b2c471
--- /dev/null
+++ b/lib/gitlab/background_migration/archive_legacy_traces.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/AbcSize
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class ArchiveLegacyTraces
+ def perform(start_id, stop_id)
+ # This background migration directly refers to ::Ci::Build model which is defined in application code.
+ # In general, migration code should be isolated as much as possible in order to be idempotent.
+ # However, `archive!` method is too complicated to be replicated by coping its subsequent code.
+ # So we chose a way to use ::Ci::Build directly and we don't change the `archive!` method until 11.1
+ ::Ci::Build.finished.without_archived_trace
+ .where(id: start_id..stop_id).find_each do |build|
+ begin
+ build.trace.archive!
+ rescue => e
+ Rails.logger.error "Failed to archive live trace. id: #{build.id} message: #{e.message}"
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 4cbf20bfe76..7acf11e3c91 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -1397,6 +1397,11 @@ module Gitlab
def write_config(full_path:)
return unless full_path.present?
+ # This guard avoids Gitaly log/error spam
+ unless exists?
+ raise NoRepository, 'repository does not exist'
+ end
+
gitaly_migrate(:write_config) do |is_enabled|
if is_enabled
gitaly_repository_client.write_config(full_path: full_path)
diff --git a/lib/gitlab/github_import/importer/pull_request_importer.rb b/lib/gitlab/github_import/importer/pull_request_importer.rb
index 49d859f9624..b2f6cb7ad19 100644
--- a/lib/gitlab/github_import/importer/pull_request_importer.rb
+++ b/lib/gitlab/github_import/importer/pull_request_importer.rb
@@ -22,15 +22,22 @@ module Gitlab
end
def execute
- if (mr_id = create_merge_request)
- issuable_finder.cache_database_id(mr_id)
+ mr, already_exists = create_merge_request
+
+ if mr
+ insert_git_data(mr, already_exists)
+ issuable_finder.cache_database_id(mr.id)
end
end
# Creates the merge request and returns its ID.
#
# This method will return `nil` if the merge request could not be
- # created.
+ # created, otherwise it will return an Array containing the following
+ # values:
+ #
+ # 1. A MergeRequest instance.
+ # 2. A boolean indicating if the MR already exists.
def create_merge_request
author_id, author_found = user_finder.author_id_for(pull_request)
@@ -69,21 +76,42 @@ module Gitlab
merge_request_id = GithubImport
.insert_and_return_id(attributes, project.merge_requests)
- merge_request = project.merge_requests.find(merge_request_id)
-
- # These fields are set so we can create the correct merge request
- # diffs.
- merge_request.source_branch_sha = pull_request.source_branch_sha
- merge_request.target_branch_sha = pull_request.target_branch_sha
-
- merge_request.keep_around_commit
- merge_request.merge_request_diffs.create
-
- merge_request.id
+ [project.merge_requests.find(merge_request_id), false]
end
rescue ActiveRecord::InvalidForeignKey
# It's possible the project has been deleted since scheduling this
# job. In this case we'll just skip creating the merge request.
+ []
+ rescue ActiveRecord::RecordNotUnique
+ # It's possible we previously created the MR, but failed when updating
+ # the Git data. In this case we'll just continue working on the
+ # existing row.
+ [project.merge_requests.find_by(iid: pull_request.iid), true]
+ end
+
+ def insert_git_data(merge_request, already_exists = false)
+ # These fields are set so we can create the correct merge request
+ # diffs.
+ merge_request.source_branch_sha = pull_request.source_branch_sha
+ merge_request.target_branch_sha = pull_request.target_branch_sha
+
+ merge_request.keep_around_commit
+
+ # MR diffs normally use an "after_save" hook to pull data from Git.
+ # All of this happens in the transaction started by calling
+ # create/save/etc. This in turn can lead to these transactions being
+ # held open for much longer than necessary. To work around this we
+ # first save the diff, then populate it.
+ diff =
+ if already_exists
+ merge_request.merge_request_diffs.take
+ else
+ merge_request.merge_request_diffs.build
+ end
+
+ diff.importing = true
+ diff.save
+ diff.save_git_content
end
end
end
diff --git a/lib/gitlab/utils/override.rb b/lib/gitlab/utils/override.rb
index 8bf6bcb1fe2..7b2a62fed48 100644
--- a/lib/gitlab/utils/override.rb
+++ b/lib/gitlab/utils/override.rb
@@ -87,18 +87,28 @@ module Gitlab
end
def included(base = nil)
- return super if base.nil? # Rails concern, ignoring it
+ super
+
+ queue_verification(base)
+ end
+ alias_method :prepended, :included
+
+ def extended(mod)
super
+ queue_verification(mod.singleton_class)
+ end
+
+ def queue_verification(base)
+ return unless ENV['STATIC_VERIFICATION']
+
if base.is_a?(Class) # We could check for Class in `override`
# This could be `nil` if `override` was never called
Override.extensions[self]&.add_class(base)
end
end
- alias_method :prepended, :included
-
def self.extensions
@extensions ||= {}
end
diff --git a/lib/object_storage/direct_upload.rb b/lib/object_storage/direct_upload.rb
new file mode 100644
index 00000000000..61a69e7ffe4
--- /dev/null
+++ b/lib/object_storage/direct_upload.rb
@@ -0,0 +1,166 @@
+module ObjectStorage
+ #
+ # The DirectUpload c;ass generates a set of presigned URLs
+ # that can be used to upload data to object storage from untrusted component: Workhorse, Runner?
+ #
+ # For Google it assumes that the platform supports variable Content-Length.
+ #
+ # For AWS it initiates Multipart Upload and presignes a set of part uploads.
+ # Class calculates the best part size to be able to upload up to asked maximum size.
+ # The number of generated parts will never go above 100,
+ # but we will always try to reduce amount of generated parts.
+ # The part size is rounded-up to 5MB.
+ #
+ class DirectUpload
+ include Gitlab::Utils::StrongMemoize
+
+ TIMEOUT = 4.hours
+ EXPIRE_OFFSET = 15.minutes
+
+ MAXIMUM_MULTIPART_PARTS = 100
+ MINIMUM_MULTIPART_SIZE = 5.megabytes
+
+ attr_reader :credentials, :bucket_name, :object_name
+ attr_reader :has_length, :maximum_size
+
+ def initialize(credentials, bucket_name, object_name, has_length:, maximum_size: nil)
+ unless has_length
+ raise ArgumentError, 'maximum_size has to be specified if length is unknown' unless maximum_size
+ end
+
+ @credentials = credentials
+ @bucket_name = bucket_name
+ @object_name = object_name
+ @has_length = has_length
+ @maximum_size = maximum_size
+ end
+
+ def to_hash
+ {
+ Timeout: TIMEOUT,
+ GetURL: get_url,
+ StoreURL: store_url,
+ DeleteURL: delete_url,
+ MultipartUpload: multipart_upload_hash
+ }.compact
+ end
+
+ def multipart_upload_hash
+ return unless requires_multipart_upload?
+
+ {
+ PartSize: rounded_multipart_part_size,
+ PartURLs: multipart_part_urls,
+ CompleteURL: multipart_complete_url,
+ AbortURL: multipart_abort_url
+ }
+ end
+
+ def provider
+ credentials[:provider].to_s
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html
+ def get_url
+ connection.get_object_url(bucket_name, object_name, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectDELETE.html
+ def delete_url
+ connection.delete_object_url(bucket_name, object_name, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
+ def store_url
+ connection.put_object_url(bucket_name, object_name, expire_at, upload_options)
+ end
+
+ def multipart_part_urls
+ Array.new(number_of_multipart_parts) do |part_index|
+ multipart_part_upload_url(part_index + 1)
+ end
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadUploadPart.html
+ def multipart_part_upload_url(part_number)
+ connection.signed_url({
+ method: 'PUT',
+ bucket_name: bucket_name,
+ object_name: object_name,
+ query: { uploadId: upload_id, partNumber: part_number },
+ headers: upload_options
+ }, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
+ def multipart_complete_url
+ connection.signed_url({
+ method: 'POST',
+ bucket_name: bucket_name,
+ object_name: object_name,
+ query: { uploadId: upload_id },
+ headers: { 'Content-Type' => 'application/xml' }
+ }, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadAbort.html
+ def multipart_abort_url
+ connection.signed_url({
+ method: 'DELETE',
+ bucket_name: bucket_name,
+ object_name: object_name,
+ query: { uploadId: upload_id }
+ }, expire_at)
+ end
+
+ private
+
+ def rounded_multipart_part_size
+ # round multipart_part_size up to minimum_mulitpart_size
+ (multipart_part_size + MINIMUM_MULTIPART_SIZE - 1) / MINIMUM_MULTIPART_SIZE * MINIMUM_MULTIPART_SIZE
+ end
+
+ def multipart_part_size
+ maximum_size / number_of_multipart_parts
+ end
+
+ def number_of_multipart_parts
+ [
+ # round maximum_size up to minimum_mulitpart_size
+ (maximum_size + MINIMUM_MULTIPART_SIZE - 1) / MINIMUM_MULTIPART_SIZE,
+ MAXIMUM_MULTIPART_PARTS
+ ].min
+ end
+
+ def aws?
+ provider == 'AWS'
+ end
+
+ def requires_multipart_upload?
+ aws? && !has_length
+ end
+
+ def upload_id
+ return unless requires_multipart_upload?
+
+ strong_memoize(:upload_id) do
+ new_upload = connection.initiate_multipart_upload(bucket_name, object_name)
+ new_upload.body["UploadId"]
+ end
+ end
+
+ def expire_at
+ strong_memoize(:expire_at) do
+ Time.now + TIMEOUT + EXPIRE_OFFSET
+ end
+ end
+
+ def upload_options
+ { 'Content-Type' => 'application/octet-stream' }
+ end
+
+ def connection
+ @connection ||= ::Fog::Storage.new(credentials)
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/traces.rake b/lib/tasks/gitlab/traces.rake
index fd2a4f2d11a..ddcca69711f 100644
--- a/lib/tasks/gitlab/traces.rake
+++ b/lib/tasks/gitlab/traces.rake
@@ -8,9 +8,7 @@ namespace :gitlab do
logger = Logger.new(STDOUT)
logger.info('Archiving legacy traces')
- Ci::Build.finished
- .where('NOT EXISTS (?)',
- Ci::JobArtifact.select(1).trace.where('ci_builds.id = ci_job_artifacts.job_id'))
+ Ci::Build.finished.without_archived_trace
.order(id: :asc)
.find_in_batches(batch_size: 1000) do |jobs|
job_ids = jobs.map { |job| [job.id] }
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index bff5bbe99af..ce0b38b7239 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -32,8 +32,6 @@ describe 'User browses a job', :js do
page.within('.erased') do
expect(page).to have_content('Job has been erased')
end
-
- expect(build.project.running_or_pending_build_count).to eq(build.project.builds.running_or_pending.count(:all))
end
context 'with a failed job' do
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index f8877b6d1aa..4e5391295b6 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -435,4 +435,46 @@ describe ProjectsHelper do
expect(helper.send(:git_user_name)).to eq('John \"A\" Doe53')
end
end
+
+ describe 'show_xcode_link' do
+ let!(:project) { create(:project) }
+ let(:mac_ua) { 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36' }
+ let(:ios_ua) { 'Mozilla/5.0 (iPad; CPU OS 5_1_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3' }
+
+ context 'when the repository is xcode compatible' do
+ before do
+ allow(project.repository).to receive(:xcode_project?).and_return(true)
+ end
+
+ it 'returns false if the visitor is not using macos' do
+ allow(helper).to receive(:browser).and_return(Browser.new(ios_ua))
+
+ expect(helper.show_xcode_link?(project)).to eq(false)
+ end
+
+ it 'returns true if the visitor is using macos' do
+ allow(helper).to receive(:browser).and_return(Browser.new(mac_ua))
+
+ expect(helper.show_xcode_link?(project)).to eq(true)
+ end
+ end
+
+ context 'when the repository is not xcode compatible' do
+ before do
+ allow(project.repository).to receive(:xcode_project?).and_return(false)
+ end
+
+ it 'returns false if the visitor is not using macos' do
+ allow(helper).to receive(:browser).and_return(Browser.new(ios_ua))
+
+ expect(helper.show_xcode_link?(project)).to eq(false)
+ end
+
+ it 'returns false if the visitor is using macos' do
+ allow(helper).to receive(:browser).and_return(Browser.new(mac_ua))
+
+ expect(helper.show_xcode_link?(project)).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/initializers/artifacts_direct_upload_support_spec.rb b/spec/initializers/artifacts_direct_upload_support_spec.rb
deleted file mode 100644
index bfb71da3388..00000000000
--- a/spec/initializers/artifacts_direct_upload_support_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-require 'spec_helper'
-
-describe 'Artifacts direct upload support' do
- subject do
- load Rails.root.join('config/initializers/artifacts_direct_upload_support.rb')
- end
-
- let(:connection) do
- { provider: provider }
- end
-
- before do
- stub_artifacts_setting(
- object_store: {
- enabled: enabled,
- direct_upload: direct_upload,
- connection: connection
- })
- end
-
- context 'when object storage is enabled' do
- let(:enabled) { true }
-
- context 'when direct upload is enabled' do
- let(:direct_upload) { true }
-
- context 'when provider is Google' do
- let(:provider) { 'Google' }
-
- it 'succeeds' do
- expect { subject }.not_to raise_error
- end
- end
-
- context 'when connection is empty' do
- let(:connection) { nil }
-
- it 'raises an error' do
- expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
- end
- end
-
- context 'when other provider is used' do
- let(:provider) { 'AWS' }
-
- it 'raises an error' do
- expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
- end
- end
- end
-
- context 'when direct upload is disabled' do
- let(:direct_upload) { false }
- let(:provider) { 'AWS' }
-
- it 'succeeds' do
- expect { subject }.not_to raise_error
- end
- end
- end
-
- context 'when object storage is disabled' do
- let(:enabled) { false }
- let(:direct_upload) { false }
- let(:provider) { 'AWS' }
-
- it 'succeeds' do
- expect { subject }.not_to raise_error
- end
- end
-end
diff --git a/spec/initializers/direct_upload_support_spec.rb b/spec/initializers/direct_upload_support_spec.rb
new file mode 100644
index 00000000000..e51d404e030
--- /dev/null
+++ b/spec/initializers/direct_upload_support_spec.rb
@@ -0,0 +1,90 @@
+require 'spec_helper'
+
+describe 'Direct upload support' do
+ subject do
+ load Rails.root.join('config/initializers/direct_upload_support.rb')
+ end
+
+ where(:config_name) do
+ %w(lfs artifacts uploads)
+ end
+
+ with_them do
+ let(:connection) do
+ { provider: provider }
+ end
+
+ let(:object_store) do
+ {
+ enabled: enabled,
+ direct_upload: direct_upload,
+ connection: connection
+ }
+ end
+
+ before do
+ allow(Gitlab.config).to receive_messages(to_settings(config_name => {
+ object_store: object_store
+ }))
+ end
+
+ context 'when object storage is enabled' do
+ let(:enabled) { true }
+
+ context 'when direct upload is enabled' do
+ let(:direct_upload) { true }
+
+ context 'when provider is AWS' do
+ let(:provider) { 'AWS' }
+
+ it 'succeeds' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when provider is Google' do
+ let(:provider) { 'Google' }
+
+ it 'succeeds' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when connection is empty' do
+ let(:connection) { nil }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error /are supported as a object storage provider when 'direct_upload' is used/
+ end
+ end
+
+ context 'when other provider is used' do
+ let(:provider) { 'Rackspace' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error /are supported as a object storage provider when 'direct_upload' is used/
+ end
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ let(:direct_upload) { false }
+ let(:provider) { 'AWS' }
+
+ it 'succeeds' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when object storage is disabled' do
+ let(:enabled) { false }
+ let(:direct_upload) { false }
+ let(:provider) { 'Rackspace' }
+
+ it 'succeeds' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js
index bfe3a65feee..fa7adc32193 100644
--- a/spec/javascripts/notes/mock_data.js
+++ b/spec/javascripts/notes/mock_data.js
@@ -340,6 +340,79 @@ export const loggedOutnoteableData = {
'/gitlab-org/gitlab-ce/preview_markdown?quick_actions_target_id=98&quick_actions_target_type=Issue',
};
+export const collapseNotesMock = [
+ {
+ expanded: true,
+ id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+ individual_note: true,
+ notes: [
+ {
+ id: 1390,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: 'test',
+ path: '/root',
+ },
+ created_at: '2018-02-26T18:07:41.071Z',
+ updated_at: '2018-02-26T18:07:41.071Z',
+ system: true,
+ system_note_icon_name: 'pencil',
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: null,
+ human_access: 'Owner',
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false },
+ discussion_id: 'b97fb7bda470a65b3e009377a9032edec0a4dd05',
+ emoji_awardable: false,
+ path: '/h5bp/html5-boilerplate/notes/1057',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
+ },
+ ],
+ },
+ {
+ expanded: true,
+ id: 'ffde43f25984ad7f2b4275135e0e2846875336c0',
+ individual_note: true,
+ notes: [
+ {
+ id: 1391,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: 'test',
+ path: '/root',
+ },
+ created_at: '2018-02-26T18:13:24.071Z',
+ updated_at: '2018-02-26T18:13:24.071Z',
+ system: true,
+ system_note_icon_name: 'pencil',
+ noteable_id: 99,
+ noteable_type: 'Issue',
+ type: null,
+ human_access: 'Owner',
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false },
+ discussion_id: '3eb958b4d81dec207ec3537a2f3bd8b9f271bb34',
+ emoji_awardable: false,
+ path: '/h5bp/html5-boilerplate/notes/1057',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
+ },
+ ],
+ },
+];
+
export const INDIVIDUAL_NOTE_RESPONSE_MAP = {
GET: {
'/gitlab-org/gitlab-ce/issues/26/discussions.json': [
@@ -575,3 +648,508 @@ export function discussionNoteInterceptor(request, next) {
}),
);
}
+
+export const notesWithDescriptionChanges = [
+ {
+ id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ expanded: true,
+ notes: [
+ {
+ id: 901,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:36.117Z',
+ updated_at: '2018-05-29T12:05:36.117Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
+ note_html:
+ '<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/901',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ expanded: true,
+ notes: [
+ {
+ id: 902,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:58.694Z',
+ updated_at: '2018-05-29T12:05:58.694Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
+ note_html:
+ '<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/902',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '7f1feda384083eb31763366e6392399fde6f3f31',
+ reply_id: '7f1feda384083eb31763366e6392399fde6f3f31',
+ expanded: true,
+ notes: [
+ {
+ id: 903,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:05.772Z',
+ updated_at: '2018-05-29T12:06:05.772Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: '7f1feda384083eb31763366e6392399fde6f3f31',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_903&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/903',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ expanded: true,
+ notes: [
+ {
+ id: 904,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:16.112Z',
+ updated_at: '2018-05-29T12:06:16.112Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'Ullamcorper eget nulla facilisi etiam',
+ note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/904',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ expanded: true,
+ notes: [
+ {
+ id: 905,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:28.851Z',
+ updated_at: '2018-05-29T12:06:28.851Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/905',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ expanded: true,
+ notes: [
+ {
+ id: 906,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:20:02.925Z',
+ updated_at: '2018-05-29T12:20:02.925Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/906',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+];
+
+export const collapsedSystemNotes = [
+ {
+ id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ expanded: true,
+ notes: [
+ {
+ id: 901,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:36.117Z',
+ updated_at: '2018-05-29T12:05:36.117Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
+ note_html:
+ '<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/901',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ expanded: true,
+ notes: [
+ {
+ id: 902,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:58.694Z',
+ updated_at: '2018-05-29T12:05:58.694Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
+ note_html:
+ '<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/902',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ expanded: true,
+ notes: [
+ {
+ id: 904,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:16.112Z',
+ updated_at: '2018-05-29T12:06:16.112Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'Ullamcorper eget nulla facilisi etiam',
+ note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/904',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ expanded: true,
+ notes: [
+ {
+ id: 905,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:28.851Z',
+ updated_at: '2018-05-29T12:06:28.851Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '\n <p dir="auto">changed the description 2 times within 1 minute </p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/905',
+ times_updated: 2,
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ expanded: true,
+ notes: [
+ {
+ id: 906,
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:20:02.925Z',
+ updated_at: '2018-05-29T12:20:02.925Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/906',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+];
diff --git a/spec/javascripts/notes/stores/collapse_utils_spec.js b/spec/javascripts/notes/stores/collapse_utils_spec.js
new file mode 100644
index 00000000000..06a6aab932a
--- /dev/null
+++ b/spec/javascripts/notes/stores/collapse_utils_spec.js
@@ -0,0 +1,46 @@
+import {
+ isDescriptionSystemNote,
+ changeDescriptionNote,
+ getTimeDifferenceMinutes,
+ collapseSystemNotes,
+} from '~/notes/stores/collapse_utils';
+import {
+ notesWithDescriptionChanges,
+ collapsedSystemNotes,
+} from '../mock_data';
+
+describe('Collapse utils', () => {
+ const mockSystemNote = {
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ system: true,
+ created_at: '2018-05-14T21:28:00.000Z',
+ };
+
+ it('checks if a system note is of a description type', () => {
+ expect(isDescriptionSystemNote(mockSystemNote)).toEqual(true);
+ });
+
+ it('returns false when a system note is not a description type', () => {
+ expect(isDescriptionSystemNote(Object.assign({}, mockSystemNote, { note: 'foo' }))).toEqual(false);
+ });
+
+ it('changes the description to contain the number of changed times', () => {
+ const changedNote = changeDescriptionNote(mockSystemNote, 3, 5);
+
+ expect(changedNote.times_updated).toEqual(3);
+ expect(changedNote.note_html.trim()).toContain('<p dir="auto">changed the description 3 times within 5 minutes </p>');
+ });
+
+ it('gets the time difference between two notes', () => {
+ const anotherSystemNote = {
+ created_at: '2018-05-14T21:33:00.000Z',
+ };
+
+ expect(getTimeDifferenceMinutes(mockSystemNote, anotherSystemNote)).toEqual(5);
+ });
+
+ it('collapses all description system notes made within 10 minutes or less from each other', () => {
+ expect(collapseSystemNotes(notesWithDescriptionChanges)).toEqual(collapsedSystemNotes);
+ });
+});
diff --git a/spec/javascripts/notes/stores/getters_spec.js b/spec/javascripts/notes/stores/getters_spec.js
index 8b2a8d2cd7a..e5550580bf8 100644
--- a/spec/javascripts/notes/stores/getters_spec.js
+++ b/spec/javascripts/notes/stores/getters_spec.js
@@ -1,8 +1,9 @@
import * as getters from '~/notes/stores/getters';
-import { notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data';
+import { notesDataMock, userDataMock, noteableDataMock, individualNote, collapseNotesMock } from '../mock_data';
describe('Getters Notes Store', () => {
let state;
+
beforeEach(() => {
state = {
notes: [individualNote],
@@ -20,6 +21,22 @@ describe('Getters Notes Store', () => {
});
});
+ describe('Collapsed notes', () => {
+ const stateCollapsedNotes = {
+ notes: collapseNotesMock,
+ targetNoteHash: 'hash',
+ lastFetchedAt: 'timestamp',
+
+ notesData: notesDataMock,
+ userData: userDataMock,
+ noteableData: noteableDataMock,
+ };
+
+ it('should return a single system note when a description was updated multiple times', () => {
+ expect(getters.notes(stateCollapsedNotes).length).toEqual(1);
+ });
+ });
+
describe('targetNoteHash', () => {
it('should return `targetNoteHash`', () => {
expect(getters.targetNoteHash(state)).toEqual('hash');
diff --git a/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb b/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb
new file mode 100644
index 00000000000..877c061d11b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::ArchiveLegacyTraces, :migration, schema: 20180529152628 do
+ include TraceHelpers
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:builds) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+
+ before do
+ namespaces.create!(id: 123, name: 'gitlab1', path: 'gitlab1')
+ projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
+ @build = builds.create!(id: 1, project_id: 123, status: 'success', type: 'Ci::Build')
+ end
+
+ context 'when trace file exsits at the right place' do
+ before do
+ create_legacy_trace(@build, 'trace in file')
+ end
+
+ it 'correctly archive legacy traces' do
+ expect(job_artifacts.count).to eq(0)
+ expect(File.exist?(legacy_trace_path(@build))).to be_truthy
+
+ described_class.new.perform(1, 1)
+
+ expect(job_artifacts.count).to eq(1)
+ expect(File.exist?(legacy_trace_path(@build))).to be_falsy
+ expect(File.read(archived_trace_path(job_artifacts.first))).to eq('trace in file')
+ end
+ end
+
+ context 'when trace file does not exsits at the right place' do
+ it 'does not raise errors nor create job artifact' do
+ expect { described_class.new.perform(1, 1) }.not_to raise_error
+
+ expect(job_artifacts.count).to eq(0)
+ end
+ end
+
+ context 'when trace data exsits in database' do
+ before do
+ create_legacy_trace_in_db(@build, 'trace in db')
+ end
+
+ it 'correctly archive legacy traces' do
+ expect(job_artifacts.count).to eq(0)
+ expect(@build.read_attribute(:trace)).not_to be_empty
+
+ described_class.new.perform(1, 1)
+
+ @build.reload
+ expect(job_artifacts.count).to eq(1)
+ expect(@build.read_attribute(:trace)).to be_nil
+ expect(File.read(archived_trace_path(job_artifacts.first))).to eq('trace in db')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 7a9621d9c78..20b0b2c53a0 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2002,6 +2002,18 @@ describe Gitlab::Git::Repository, seed_helper: true do
expect(config).to include("fullpath = #{repository_path}")
end
end
+
+ context 'repository does not exist' do
+ it 'raises NoRepository and does not call Gitaly WriteConfig' do
+ repository = Gitlab::Git::Repository.new('default', 'does/not/exist.git', '')
+
+ expect(repository.gitaly_repository_client).not_to receive(:write_config)
+
+ expect do
+ repository.write_config(full_path: 'foo/bar.git')
+ end.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
+ end
end
context "when gitaly_write_config is enabled" do
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index d34ca0b76b8..81fe97c1e49 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -180,12 +180,12 @@ describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cach
allow(importer.user_finder)
.to receive(:user_id_for)
- .ordered.with(issue.assignees[0])
+ .with(issue.assignees[0])
.and_return(4)
allow(importer.user_finder)
.to receive(:user_id_for)
- .ordered.with(issue.assignees[1])
+ .with(issue.assignees[1])
.and_return(5)
expect(Gitlab::Database)
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index 35f3fdf8304..6686b7ce0b5 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -40,13 +40,19 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
describe '#execute' do
it 'imports the pull request' do
+ mr = double(:merge_request, id: 10)
+
expect(importer)
.to receive(:create_merge_request)
- .and_return(10)
+ .and_return([mr, false])
+
+ expect(importer)
+ .to receive(:insert_git_data)
+ .with(mr, false)
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:cache_database_id)
- .with(10)
+ .with(mr.id)
importer.execute
end
@@ -99,18 +105,11 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
importer.create_merge_request
end
- it 'returns the ID of the created merge request' do
- id = importer.create_merge_request
-
- expect(id).to be_a_kind_of(Numeric)
- end
-
- it 'creates the merge request diffs' do
- importer.create_merge_request
-
- mr = project.merge_requests.take
+ it 'returns the created merge request' do
+ mr, exists = importer.create_merge_request
- expect(mr.merge_request_diffs.exists?).to eq(true)
+ expect(mr).to be_instance_of(MergeRequest)
+ expect(exists).to eq(false)
end
end
@@ -217,5 +216,65 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
expect { importer.create_merge_request }.not_to raise_error
end
end
+
+ context 'when the merge request already exists' do
+ before do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+ end
+
+ it 'returns the existing merge request' do
+ mr1, exists1 = importer.create_merge_request
+ mr2, exists2 = importer.create_merge_request
+
+ expect(mr2).to eq(mr1)
+ expect(exists1).to eq(false)
+ expect(exists2).to eq(true)
+ end
+ end
+ end
+
+ describe '#insert_git_data' do
+ before do
+ allow(importer.milestone_finder)
+ .to receive(:id_for)
+ .with(pull_request)
+ .and_return(milestone.id)
+
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+ end
+
+ it 'creates the merge request diffs' do
+ mr, exists = importer.create_merge_request
+
+ importer.insert_git_data(mr, exists)
+
+ expect(mr.merge_request_diffs.exists?).to eq(true)
+ end
+
+ it 'creates the merge request diff commits' do
+ mr, exists = importer.create_merge_request
+
+ importer.insert_git_data(mr, exists)
+
+ diff = mr.merge_request_diffs.take
+
+ expect(diff.merge_request_diff_commits.exists?).to eq(true)
+ end
end
end
diff --git a/spec/lib/gitlab/utils/override_spec.rb b/spec/lib/gitlab/utils/override_spec.rb
index 7c97cee982a..fc08ebcfc6d 100644
--- a/spec/lib/gitlab/utils/override_spec.rb
+++ b/spec/lib/gitlab/utils/override_spec.rb
@@ -1,7 +1,13 @@
-require 'spec_helper'
+require 'fast_spec_helper'
describe Gitlab::Utils::Override do
- let(:base) { Struct.new(:good) }
+ let(:base) do
+ Struct.new(:good) do
+ def self.good
+ 0
+ end
+ end
+ end
let(:derived) { Class.new(base).tap { |m| m.extend described_class } }
let(:extension) { Module.new.tap { |m| m.extend described_class } }
@@ -9,6 +15,14 @@ describe Gitlab::Utils::Override do
let(:prepending_class) { base.tap { |m| m.prepend extension } }
let(:including_class) { base.tap { |m| m.include extension } }
+ let(:prepending_class_methods) do
+ base.tap { |m| m.singleton_class.prepend extension }
+ end
+
+ let(:extending_class_methods) do
+ base.tap { |m| m.extend extension }
+ end
+
let(:klass) { subject }
def good(mod)
@@ -36,7 +50,7 @@ describe Gitlab::Utils::Override do
shared_examples 'checking as intended' do
it 'checks ok for overriding method' do
good(subject)
- result = klass.new(0).good
+ result = instance.good
expect(result).to eq(1)
described_class.verify!
@@ -45,7 +59,25 @@ describe Gitlab::Utils::Override do
it 'raises NotImplementedError when it is not overriding anything' do
expect do
bad(subject)
- klass.new(0).bad
+ instance.bad
+ described_class.verify!
+ end.to raise_error(NotImplementedError)
+ end
+ end
+
+ shared_examples 'checking as intended, nothing was overridden' do
+ it 'raises NotImplementedError because it is not overriding it' do
+ expect do
+ good(subject)
+ instance.good
+ described_class.verify!
+ end.to raise_error(NotImplementedError)
+ end
+
+ it 'raises NotImplementedError when it is not overriding anything' do
+ expect do
+ bad(subject)
+ instance.bad
described_class.verify!
end.to raise_error(NotImplementedError)
end
@@ -54,7 +86,7 @@ describe Gitlab::Utils::Override do
shared_examples 'nothing happened' do
it 'does not complain when it is overriding something' do
good(subject)
- result = klass.new(0).good
+ result = instance.good
expect(result).to eq(1)
described_class.verify!
@@ -62,7 +94,7 @@ describe Gitlab::Utils::Override do
it 'does not complain when it is not overriding anything' do
bad(subject)
- result = klass.new(0).bad
+ result = instance.bad
expect(result).to eq(true)
described_class.verify!
@@ -75,83 +107,97 @@ describe Gitlab::Utils::Override do
end
describe '#override' do
- context 'when STATIC_VERIFICATION is set' do
- before do
- stub_env('STATIC_VERIFICATION', 'true')
- end
+ context 'when instance is klass.new(0)' do
+ let(:instance) { klass.new(0) }
- context 'when subject is a class' do
- subject { derived }
+ context 'when STATIC_VERIFICATION is set' do
+ before do
+ stub_env('STATIC_VERIFICATION', 'true')
+ end
- it_behaves_like 'checking as intended'
- end
+ context 'when subject is a class' do
+ subject { derived }
+
+ it_behaves_like 'checking as intended'
+ end
+
+ context 'when subject is a module, and class is prepending it' do
+ subject { extension }
+ let(:klass) { prepending_class }
+
+ it_behaves_like 'checking as intended'
+ end
- context 'when subject is a module, and class is prepending it' do
- subject { extension }
- let(:klass) { prepending_class }
+ context 'when subject is a module, and class is including it' do
+ subject { extension }
+ let(:klass) { including_class }
- it_behaves_like 'checking as intended'
+ it_behaves_like 'checking as intended, nothing was overridden'
+ end
end
- context 'when subject is a module, and class is including it' do
- subject { extension }
- let(:klass) { including_class }
+ context 'when STATIC_VERIFICATION is not set' do
+ before do
+ stub_env('STATIC_VERIFICATION', nil)
+ end
- it 'raises NotImplementedError because it is not overriding it' do
- expect do
- good(subject)
- klass.new(0).good
- described_class.verify!
- end.to raise_error(NotImplementedError)
+ context 'when subject is a class' do
+ subject { derived }
+
+ it_behaves_like 'nothing happened'
end
- it 'raises NotImplementedError when it is not overriding anything' do
- expect do
- bad(subject)
- klass.new(0).bad
- described_class.verify!
- end.to raise_error(NotImplementedError)
+ context 'when subject is a module, and class is prepending it' do
+ subject { extension }
+ let(:klass) { prepending_class }
+
+ it_behaves_like 'nothing happened'
end
- end
- end
- end
- context 'when STATIC_VERIFICATION is not set' do
- before do
- stub_env('STATIC_VERIFICATION', nil)
- end
+ context 'when subject is a module, and class is including it' do
+ subject { extension }
+ let(:klass) { including_class }
- context 'when subject is a class' do
- subject { derived }
+ it 'does not complain when it is overriding something' do
+ good(subject)
+ result = instance.good
- it_behaves_like 'nothing happened'
- end
+ expect(result).to eq(0)
+ described_class.verify!
+ end
- context 'when subject is a module, and class is prepending it' do
- subject { extension }
- let(:klass) { prepending_class }
+ it 'does not complain when it is not overriding anything' do
+ bad(subject)
+ result = instance.bad
- it_behaves_like 'nothing happened'
+ expect(result).to eq(true)
+ described_class.verify!
+ end
+ end
+ end
end
- context 'when subject is a module, and class is including it' do
- subject { extension }
- let(:klass) { including_class }
+ context 'when instance is klass' do
+ let(:instance) { klass }
- it 'does not complain when it is overriding something' do
- good(subject)
- result = klass.new(0).good
+ context 'when STATIC_VERIFICATION is set' do
+ before do
+ stub_env('STATIC_VERIFICATION', 'true')
+ end
- expect(result).to eq(0)
- described_class.verify!
- end
+ context 'when subject is a module, and class is prepending it' do
+ subject { extension }
+ let(:klass) { prepending_class_methods }
- it 'does not complain when it is not overriding anything' do
- bad(subject)
- result = klass.new(0).bad
+ it_behaves_like 'checking as intended'
+ end
- expect(result).to eq(true)
- described_class.verify!
+ context 'when subject is a module, and class is extending it' do
+ subject { extension }
+ let(:klass) { extending_class_methods }
+
+ it_behaves_like 'checking as intended, nothing was overridden'
+ end
end
end
end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
new file mode 100644
index 00000000000..5187821e8f4
--- /dev/null
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -0,0 +1,164 @@
+require 'spec_helper'
+
+describe ObjectStorage::DirectUpload do
+ let(:credentials) do
+ {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
+ }
+ end
+
+ let(:storage_url) { 'https://uploads.s3.amazonaws.com/' }
+
+ let(:bucket_name) { 'uploads' }
+ let(:object_name) { 'tmp/uploads/my-file' }
+ let(:maximum_size) { 1.gigabyte }
+
+ let(:direct_upload) { described_class.new(credentials, bucket_name, object_name, has_length: has_length, maximum_size: maximum_size) }
+
+ describe '#has_length' do
+ context 'is known' do
+ let(:has_length) { true }
+ let(:maximum_size) { nil }
+
+ it "maximum size is not required" do
+ expect { direct_upload }.not_to raise_error
+ end
+ end
+
+ context 'is unknown' do
+ let(:has_length) { false }
+
+ context 'and maximum size is specified' do
+ let(:maximum_size) { 1.gigabyte }
+
+ it "does not raise an error" do
+ expect { direct_upload }.not_to raise_error
+ end
+ end
+
+ context 'and maximum size is not specified' do
+ let(:maximum_size) { nil }
+
+ it "raises an error" do
+ expect { direct_upload }.to raise_error /maximum_size has to be specified if length is unknown/
+ end
+ end
+ end
+ end
+
+ describe '#to_hash' do
+ subject { direct_upload.to_hash }
+
+ shared_examples 'a valid upload' do
+ it "returns valid structure" do
+ expect(subject).to have_key(:Timeout)
+ expect(subject[:GetURL]).to start_with(storage_url)
+ expect(subject[:StoreURL]).to start_with(storage_url)
+ expect(subject[:DeleteURL]).to start_with(storage_url)
+ end
+ end
+
+ shared_examples 'a valid upload with multipart data' do
+ before do
+ stub_object_storage_multipart_init(storage_url, "myUpload")
+ end
+
+ it_behaves_like 'a valid upload'
+
+ it "returns valid structure" do
+ expect(subject).to have_key(:MultipartUpload)
+ expect(subject[:MultipartUpload]).to have_key(:PartSize)
+ expect(subject[:MultipartUpload][:PartURLs]).to all(start_with(storage_url))
+ expect(subject[:MultipartUpload][:PartURLs]).to all(include('uploadId=myUpload'))
+ expect(subject[:MultipartUpload][:CompleteURL]).to start_with(storage_url)
+ expect(subject[:MultipartUpload][:CompleteURL]).to include('uploadId=myUpload')
+ expect(subject[:MultipartUpload][:AbortURL]).to start_with(storage_url)
+ expect(subject[:MultipartUpload][:AbortURL]).to include('uploadId=myUpload')
+ end
+ end
+
+ shared_examples 'a valid upload without multipart data' do
+ it_behaves_like 'a valid upload'
+
+ it "returns valid structure" do
+ expect(subject).not_to have_key(:MultipartUpload)
+ end
+ end
+
+ context 'when AWS is used' do
+ context 'when length is known' do
+ let(:has_length) { true }
+
+ it_behaves_like 'a valid upload without multipart data'
+ end
+
+ context 'when length is unknown' do
+ let(:has_length) { false }
+
+ it_behaves_like 'a valid upload with multipart data' do
+ context 'when maximum upload size is 10MB' do
+ let(:maximum_size) { 10.megabyte }
+
+ it 'returns only 2 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(2)
+ end
+
+ it 'part size is mimimum, 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
+ end
+
+ context 'when maximum upload size is 12MB' do
+ let(:maximum_size) { 12.megabyte }
+
+ it 'returns only 3 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(3)
+ end
+
+ it 'part size is rounded-up to 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
+ end
+
+ context 'when maximum upload size is 49GB' do
+ let(:maximum_size) { 49.gigabyte }
+
+ it 'returns maximum, 100 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
+ end
+
+ it 'part size is rounded-up to 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(505.megabyte)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when Google is used' do
+ let(:credentials) do
+ {
+ provider: 'Google',
+ google_storage_access_key_id: 'GOOGLE_ACCESS_KEY_ID',
+ google_storage_secret_access_key: 'GOOGLE_SECRET_ACCESS_KEY'
+ }
+ end
+
+ let(:storage_url) { 'https://storage.googleapis.com/uploads/' }
+
+ context 'when length is known' do
+ let(:has_length) { true }
+
+ it_behaves_like 'a valid upload without multipart data'
+ end
+
+ context 'when length is unknown' do
+ let(:has_length) { false }
+
+ it_behaves_like 'a valid upload without multipart data'
+ end
+ end
+ end
+end
diff --git a/spec/migrations/schedule_to_archive_legacy_traces_spec.rb b/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
new file mode 100644
index 00000000000..d3eac3c45ea
--- /dev/null
+++ b/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
@@ -0,0 +1,45 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20180529152628_schedule_to_archive_legacy_traces')
+
+describe ScheduleToArchiveLegacyTraces, :migration do
+ include TraceHelpers
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:builds) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+
+ before do
+ namespaces.create!(id: 123, name: 'gitlab1', path: 'gitlab1')
+ projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
+ @build_success = builds.create!(id: 1, project_id: 123, status: 'success', type: 'Ci::Build')
+ @build_failed = builds.create!(id: 2, project_id: 123, status: 'failed', type: 'Ci::Build')
+ @builds_canceled = builds.create!(id: 3, project_id: 123, status: 'canceled', type: 'Ci::Build')
+ @build_running = builds.create!(id: 4, project_id: 123, status: 'running', type: 'Ci::Build')
+
+ create_legacy_trace(@build_success, 'This job is done')
+ create_legacy_trace(@build_failed, 'This job is done')
+ create_legacy_trace(@builds_canceled, 'This job is done')
+ create_legacy_trace(@build_running, 'This job is not done yet')
+ end
+
+ it 'correctly archive legacy traces' do
+ expect(job_artifacts.count).to eq(0)
+ expect(File.exist?(legacy_trace_path(@build_success))).to be_truthy
+ expect(File.exist?(legacy_trace_path(@build_failed))).to be_truthy
+ expect(File.exist?(legacy_trace_path(@builds_canceled))).to be_truthy
+ expect(File.exist?(legacy_trace_path(@build_running))).to be_truthy
+
+ migrate!
+
+ expect(job_artifacts.count).to eq(3)
+ expect(File.exist?(legacy_trace_path(@build_success))).to be_falsy
+ expect(File.exist?(legacy_trace_path(@build_failed))).to be_falsy
+ expect(File.exist?(legacy_trace_path(@builds_canceled))).to be_falsy
+ expect(File.exist?(legacy_trace_path(@build_running))).to be_truthy
+ expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @build_success.id).first))).to be_truthy
+ expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @build_failed.id).first))).to be_truthy
+ expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @builds_canceled.id).first))).to be_truthy
+ expect(job_artifacts.where(job_id: @build_running.id)).not_to be_exist
+ end
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 66c9708b4cf..5e27cca6771 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -2506,4 +2506,76 @@ describe Ci::Build do
end
end
end
+
+ describe 'pages deployments' do
+ set(:build) { create(:ci_build, project: project, user: user) }
+
+ context 'when job is "pages"' do
+ before do
+ build.name = 'pages'
+ end
+
+ context 'when pages are enabled' do
+ before do
+ allow(Gitlab.config.pages).to receive_messages(enabled: true)
+ end
+
+ it 'is marked as pages generator' do
+ expect(build).to be_pages_generator
+ end
+
+ context 'job succeeds' do
+ it "calls pages worker" do
+ expect(PagesWorker).to receive(:perform_async).with(:deploy, build.id)
+
+ build.success!
+ end
+ end
+
+ context 'job fails' do
+ it "does not call pages worker" do
+ expect(PagesWorker).not_to receive(:perform_async)
+
+ build.drop!
+ end
+ end
+ end
+
+ context 'when pages are disabled' do
+ before do
+ allow(Gitlab.config.pages).to receive_messages(enabled: false)
+ end
+
+ it 'is not marked as pages generator' do
+ expect(build).not_to be_pages_generator
+ end
+
+ context 'job succeeds' do
+ it "does not call pages worker" do
+ expect(PagesWorker).not_to receive(:perform_async)
+
+ build.success!
+ end
+ end
+ end
+ end
+
+ context 'when job is not "pages"' do
+ before do
+ build.name = 'other-job'
+ end
+
+ it 'is not marked as pages generator' do
+ expect(build).not_to be_pages_generator
+ end
+
+ context 'job succeeds' do
+ it "does not call pages worker" do
+ expect(PagesWorker).not_to receive(:perform_async)
+
+ build.success
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb
index 4181f4ebbbe..a15d60aafe0 100644
--- a/spec/requests/api/issues_spec.rb
+++ b/spec/requests/api/issues_spec.rb
@@ -630,15 +630,17 @@ describe API::Issues do
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ get api("/projects/#{project.id}/issues", user)
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/issues", user)
end.count
- create(:issue, author: user, project: project)
+ create_list(:issue, 3, project: project)
expect do
get api("/projects/#{project.id}/issues", user)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control_count)
end
it 'returns 404 when project does not exist' do
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 45082e644ca..50d6f4b4d99 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -177,6 +177,18 @@ describe API::Jobs do
json_response.each { |job| expect(job['pipeline']['id']).to eq(pipeline.id) }
end
end
+
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
+ end.count
+
+ 3.times { create(:ci_build, :artifacts, pipeline: pipeline) }
+
+ expect do
+ get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
+ end.not_to exceed_all_query_limit(control_count)
+ end
end
context 'unauthorized user' do
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 319ac389083..c981a10ac38 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -1101,6 +1101,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).to have_key('MultipartUpload')
end
end
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 79672fe1cc5..4d30b99262e 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -1021,6 +1021,7 @@ describe 'Git LFS API and storage' do
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).not_to have_key('MultipartUpload')
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
diff --git a/spec/services/pages_service_spec.rb b/spec/services/pages_service_spec.rb
deleted file mode 100644
index f8db6900a0a..00000000000
--- a/spec/services/pages_service_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-require 'spec_helper'
-
-describe PagesService do
- let(:build) { create(:ci_build) }
- let(:data) { Gitlab::DataBuilder::Build.build(build) }
- let(:service) { described_class.new(data) }
-
- before do
- allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
- end
-
- context 'execute asynchronously for pages job' do
- before do
- build.name = 'pages'
- end
-
- context 'on success' do
- before do
- build.success
- end
-
- it 'executes worker' do
- expect(PagesWorker).to receive(:perform_async)
- service.execute
- end
- end
-
- %w(pending running failed canceled).each do |status|
- context "on #{status}" do
- before do
- build.status = status
- end
-
- it 'does not execute worker' do
- expect(PagesWorker).not_to receive(:perform_async)
- service.execute
- end
- end
- end
- end
-
- context 'for other jobs' do
- before do
- build.name = 'other job'
- build.success
- end
-
- it 'does not execute worker' do
- expect(PagesWorker).not_to receive(:perform_async)
- service.execute
- end
- end
-end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 3e6073b9861..1f761bcbbad 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -275,6 +275,10 @@ describe Projects::UpdateService do
it { is_expected.to eq(false) }
end
+ context 'when auto devops is nil' do
+ it { is_expected.to eq(false) }
+ end
+
context 'when auto devops is explicitly enabled' do
before do
project.create_auto_devops!(enabled: true)
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index 28536bbef5e..7ce63375d34 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -1,10 +1,11 @@
module ActiveRecord
class QueryRecorder
- attr_reader :log, :cached
+ attr_reader :log, :skip_cached, :cached
- def initialize(&block)
+ def initialize(skip_cached: true, &block)
@log = []
@cached = []
+ @skip_cached = skip_cached
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
end
@@ -16,7 +17,7 @@ module ActiveRecord
def callback(name, start, finish, message_id, values)
show_backtrace(values) if ENV['QUERY_RECORDER_DEBUG']
- if values[:name]&.include?("CACHE")
+ if values[:name]&.include?("CACHE") && skip_cached
@cached << values[:sql]
elsif !values[:name]&.include?("SCHEMA")
@log << values[:sql]
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index 19d744b959a..bceaf8277ee 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -45,4 +45,16 @@ module StubObjectStorage
remote_directory: 'uploads',
**params)
end
+
+ def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id")
+ stub_request(:post, %r{\A#{endpoint}tmp/uploads/[a-z0-9-]*\?uploads\z})
+ .to_return status: 200, body: <<-EOS.strip_heredoc
+ <?xml version="1.0" encoding="UTF-8"?>
+ <InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
+ <Bucket>example-bucket</Bucket>
+ <Key>example-object</Key>
+ <UploadId>#{upload_id}</UploadId>
+ </InitiateMultipartUploadResult>
+ EOS
+ end
end
diff --git a/spec/support/matchers/exceed_query_limit.rb b/spec/support/matchers/exceed_query_limit.rb
index 88d22a3ddd9..cd042401f3a 100644
--- a/spec/support/matchers/exceed_query_limit.rb
+++ b/spec/support/matchers/exceed_query_limit.rb
@@ -1,17 +1,4 @@
-RSpec::Matchers.define :exceed_query_limit do |expected|
- supports_block_expectations
-
- match do |block|
- @subject_block = block
- actual_count > expected_count + threshold
- end
-
- failure_message_when_negated do |actual|
- threshold_message = threshold > 0 ? " (+#{@threshold})" : ''
- counts = "#{expected_count}#{threshold_message}"
- "Expected a maximum of #{counts} queries, got #{actual_count}:\n\n#{log_message}"
- end
-
+module ExceedQueryLimitHelpers
def with_threshold(threshold)
@threshold = threshold
self
@@ -43,7 +30,7 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
end
def recorder
- @recorder ||= ActiveRecord::QueryRecorder.new(&@subject_block)
+ @recorder ||= ActiveRecord::QueryRecorder.new(skip_cached: skip_cached, &@subject_block)
end
def count_queries(queries)
@@ -61,4 +48,52 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
@recorder.log_message
end
end
+
+ def skip_cached
+ true
+ end
+
+ def verify_count(&block)
+ @subject_block = block
+ actual_count > expected_count + threshold
+ end
+
+ def failure_message
+ threshold_message = threshold > 0 ? " (+#{@threshold})" : ''
+ counts = "#{expected_count}#{threshold_message}"
+ "Expected a maximum of #{counts} queries, got #{actual_count}:\n\n#{log_message}"
+ end
+end
+
+RSpec::Matchers.define :exceed_all_query_limit do |expected|
+ supports_block_expectations
+
+ include ExceedQueryLimitHelpers
+
+ match do |block|
+ verify_count(&block)
+ end
+
+ failure_message_when_negated do |actual|
+ failure_message
+ end
+
+ def skip_cached
+ false
+ end
+end
+
+# Excludes cached methods from the query count
+RSpec::Matchers.define :exceed_query_limit do |expected|
+ supports_block_expectations
+
+ include ExceedQueryLimitHelpers
+
+ match do |block|
+ verify_count(&block)
+ end
+
+ failure_message_when_negated do |actual|
+ failure_message
+ end
end
diff --git a/spec/support/trace/trace_helpers.rb b/spec/support/trace/trace_helpers.rb
new file mode 100644
index 00000000000..c7802bbcb94
--- /dev/null
+++ b/spec/support/trace/trace_helpers.rb
@@ -0,0 +1,27 @@
+module TraceHelpers
+ def create_legacy_trace(build, content)
+ File.open(legacy_trace_path(build), 'wb') { |stream| stream.write(content) }
+ end
+
+ def create_legacy_trace_in_db(build, content)
+ build.update_column(:trace, content)
+ end
+
+ def legacy_trace_path(build)
+ legacy_trace_dir = File.join(Settings.gitlab_ci.builds_path,
+ build.created_at.utc.strftime("%Y_%m"),
+ build.project_id.to_s)
+
+ FileUtils.mkdir_p(legacy_trace_dir)
+
+ File.join(legacy_trace_dir, "#{build.id}.log")
+ end
+
+ def archived_trace_path(job_artifact)
+ disk_hash = Digest::SHA2.hexdigest(job_artifact.project_id.to_s)
+ creation_date = job_artifact.created_at.utc.strftime('%Y_%m_%d')
+
+ File.join(Gitlab.config.artifacts.path, disk_hash[0..1], disk_hash[2..3], disk_hash,
+ creation_date, job_artifact.job_id.to_s, job_artifact.id.to_s, 'job.log')
+ end
+end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index 2dd0925a8e6..01166865e88 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -355,7 +355,10 @@ describe ObjectStorage do
end
describe '.workhorse_authorize' do
- subject { uploader_class.workhorse_authorize }
+ let(:has_length) { true }
+ let(:maximum_size) { nil }
+
+ subject { uploader_class.workhorse_authorize(has_length: has_length, maximum_size: maximum_size) }
before do
# ensure that we use regular Fog libraries
@@ -371,10 +374,6 @@ describe ObjectStorage do
expect(subject[:TempPath]).to start_with(uploader_class.root)
expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH)
end
-
- it "does not return remote store" do
- is_expected.not_to have_key('RemoteObject')
- end
end
shared_examples 'uses remote storage' do
@@ -383,7 +382,7 @@ describe ObjectStorage do
expect(subject[:RemoteObject]).to have_key(:ID)
expect(subject[:RemoteObject]).to include(Timeout: a_kind_of(Integer))
- expect(subject[:RemoteObject][:Timeout]).to be(ObjectStorage::DIRECT_UPLOAD_TIMEOUT)
+ expect(subject[:RemoteObject][:Timeout]).to be(ObjectStorage::DirectUpload::TIMEOUT)
expect(subject[:RemoteObject]).to have_key(:GetURL)
expect(subject[:RemoteObject]).to have_key(:DeleteURL)
expect(subject[:RemoteObject]).to have_key(:StoreURL)
@@ -391,9 +390,31 @@ describe ObjectStorage do
expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH)
expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH)
end
+ end
- it "does not return local store" do
- is_expected.not_to have_key('TempPath')
+ shared_examples 'uses remote storage with multipart uploads' do
+ it_behaves_like 'uses remote storage' do
+ it "returns multipart upload" do
+ is_expected.to have_key(:RemoteObject)
+
+ expect(subject[:RemoteObject]).to have_key(:MultipartUpload)
+ expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartSize)
+ expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartURLs)
+ expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:CompleteURL)
+ expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:AbortURL)
+ expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(include(described_class::TMP_UPLOAD_PATH))
+ expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(described_class::TMP_UPLOAD_PATH)
+ expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(described_class::TMP_UPLOAD_PATH)
+ end
+ end
+ end
+
+ shared_examples 'uses remote storage without multipart uploads' do
+ it_behaves_like 'uses remote storage' do
+ it "does not return multipart upload" do
+ is_expected.to have_key(:RemoteObject)
+ expect(subject[:RemoteObject]).not_to have_key(:MultipartUpload)
+ end
end
end
@@ -416,6 +437,8 @@ describe ObjectStorage do
end
context 'uses AWS' do
+ let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
+
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "AWS",
@@ -425,18 +448,40 @@ describe ObjectStorage do
end
end
- it_behaves_like 'uses remote storage' do
- let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
+ context 'for known length' do
+ it_behaves_like 'uses remote storage without multipart uploads' do
+ it 'returns links for S3' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ end
+ end
+ end
+
+ context 'for unknown length' do
+ let(:has_length) { false }
+ let(:maximum_size) { 1.gigabyte }
- it 'returns links for S3' do
- expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
- expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
- expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ before do
+ stub_object_storage_multipart_init(storage_url)
+ end
+
+ it_behaves_like 'uses remote storage with multipart uploads' do
+ it 'returns links for S3' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url))
+ expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url)
+ end
end
end
end
context 'uses Google' do
+ let(:storage_url) { "https://storage.googleapis.com/uploads/" }
+
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "Google",
@@ -445,36 +490,71 @@ describe ObjectStorage do
end
end
- it_behaves_like 'uses remote storage' do
- let(:storage_url) { "https://storage.googleapis.com/uploads/" }
+ context 'for known length' do
+ it_behaves_like 'uses remote storage without multipart uploads' do
+ it 'returns links for Google Cloud' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ end
+ end
+ end
+
+ context 'for unknown length' do
+ let(:has_length) { false }
+ let(:maximum_size) { 1.gigabyte }
- it 'returns links for Google Cloud' do
- expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
- expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
- expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ it_behaves_like 'uses remote storage without multipart uploads' do
+ it 'returns links for Google Cloud' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ end
end
end
end
context 'uses GDK/minio' do
+ let(:storage_url) { "http://minio:9000/uploads/" }
+
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "AWS",
aws_access_key_id: "AWS_ACCESS_KEY_ID",
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
- endpoint: 'http://127.0.0.1:9000',
+ endpoint: 'http://minio:9000',
path_style: true,
region: "gdk" }
end
end
- it_behaves_like 'uses remote storage' do
- let(:storage_url) { "http://127.0.0.1:9000/uploads/" }
+ context 'for known length' do
+ it_behaves_like 'uses remote storage without multipart uploads' do
+ it 'returns links for S3' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ end
+ end
+ end
+
+ context 'for unknown length' do
+ let(:has_length) { false }
+ let(:maximum_size) { 1.gigabyte }
- it 'returns links for S3' do
- expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
- expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
- expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ before do
+ stub_object_storage_multipart_init(storage_url)
+ end
+
+ it_behaves_like 'uses remote storage with multipart uploads' do
+ it 'returns links for S3' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url))
+ expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url)
+ end
end
end
end