summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-07-13 18:09:35 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-07-13 18:09:35 +0000
commit7e064974b92de60a3ef4642905e8af98a364a7a0 (patch)
treeea95222e8b6040cd959dfe3404ee83a069bae2c7
parenta88c31d0ea1a79ca93fad357c3eb536b5e013e44 (diff)
downloadgitlab-ce-7e064974b92de60a3ef4642905e8af98a364a7a0.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/content_editor/services/hast_to_prosemirror_converter.js128
-rw-r--r--app/assets/javascripts/main.js58
-rw-r--r--app/assets/stylesheets/highlight/themes/monokai.scss2
-rw-r--r--app/assets/stylesheets/highlight/themes/white.scss1
-rw-r--r--app/models/packages/cleanup/policy.rb4
-rw-r--r--app/services/alert_management/alerts/update_service.rb26
-rw-r--r--app/services/incident_management/issuable_escalation_statuses/after_update_service.rb16
-rw-r--r--app/services/incident_management/issuable_escalation_statuses/build_service.rb17
-rw-r--r--app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb2
-rw-r--r--app/services/issuable_base_service.rb2
-rw-r--r--app/services/issues/update_service.rb3
-rw-r--r--app/services/packages/cleanup/execute_policy_service.rb98
-rw-r--r--app/services/packages/mark_package_files_for_destruction_service.rb35
-rw-r--r--config/feature_flags/development/use_keyset_aware_user_search_query.yml2
-rw-r--r--db/migrate/20220708142744_add_composite_index_for_protected_environments.rb16
-rw-r--r--db/migrate/20220708142803_add_composite_index_for_protected_environment_approval_rules.rb16
-rw-r--r--db/post_migrate/20220617142124_add_index_on_installable_package_files.rb20
-rw-r--r--db/post_migrate/20220617143228_replace_packages_index_on_project_id_and_status.rb22
-rw-r--r--db/post_migrate/20220629184402_unset_escalation_policies_for_alert_incidents.rb46
-rw-r--r--db/schema_migrations/202206171421241
-rw-r--r--db/schema_migrations/202206171432281
-rw-r--r--db/schema_migrations/202206291844021
-rw-r--r--db/schema_migrations/202207081427441
-rw-r--r--db/schema_migrations/202207081428031
-rw-r--r--db/structure.sql8
-rw-r--r--doc/ci/runners/saas/macos_saas_runner.md8
-rw-r--r--doc/development/i18n/proofreader.md1
-rw-r--r--doc/development/packages/debian_repository.md18
-rw-r--r--doc/operations/incident_management/alerts.md5
-rw-r--r--doc/operations/incident_management/incidents.md10
-rw-r--r--doc/operations/incident_management/paging.md1
-rw-r--r--doc/user/project/merge_requests/csv_export.md2
-rw-r--r--glfm_specification/example_snapshots/html.yml91
-rw-r--r--glfm_specification/example_snapshots/prosemirror_json.yml462
-rw-r--r--qa/qa/specs/features/browser_ui/5_package/container_registry/container_registry_spec.rb2
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb22
-rw-r--r--spec/frontend/content_editor/remark_markdown_processing_spec.js96
-rw-r--r--spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb72
-rw-r--r--spec/models/packages/cleanup/policy_spec.rb12
-rw-r--r--spec/services/alert_management/alerts/update_service_spec.rb51
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb56
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb6
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb15
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb6
-rw-r--r--spec/services/issues/update_service_spec.rb21
-rw-r--r--spec/services/packages/cleanup/execute_policy_service_spec.rb163
-rw-r--r--spec/services/packages/mark_package_files_for_destruction_service_spec.rb47
47 files changed, 1304 insertions, 390 deletions
diff --git a/app/assets/javascripts/content_editor/services/hast_to_prosemirror_converter.js b/app/assets/javascripts/content_editor/services/hast_to_prosemirror_converter.js
index a0be6c70993..5d675fb4851 100644
--- a/app/assets/javascripts/content_editor/services/hast_to_prosemirror_converter.js
+++ b/app/assets/javascripts/content_editor/services/hast_to_prosemirror_converter.js
@@ -138,6 +138,10 @@ class HastToProseMirrorConverterState {
return this.stack[this.stack.length - 1];
}
+ get topNode() {
+ return this.findInStack((item) => item.type === 'node');
+ }
+
/**
* Detects if the node stack is empty
*/
@@ -179,7 +183,7 @@ class HastToProseMirrorConverterState {
*/
addText(schema, text) {
if (!text) return;
- const nodes = this.top.content;
+ const nodes = this.topNode?.content;
const last = nodes[nodes.length - 1];
const node = schema.text(text, this.marks);
const merged = maybeMerge(last, node);
@@ -189,57 +193,92 @@ class HastToProseMirrorConverterState {
} else {
nodes.push(node);
}
-
- this.closeMarks();
}
/**
* Adds a mark to the set of marks stored temporarily
- * until addText is called.
- * @param {*} markType
- * @param {*} attrs
+ * until an inline node is created.
+ * @param {https://prosemirror.net/docs/ref/#model.MarkType} schemaType Mark schema type
+ * @param {https://github.com/syntax-tree/hast#nodes} hastNode AST node that the mark is based on
+ * @param {Object} attrs Mark attributes
+ * @param {Object} factorySpec Specifications on how th mark should be created
*/
- openMark(markType, attrs) {
- this.marks = markType.create(attrs).addToSet(this.marks);
+ openMark(schemaType, hastNode, attrs, factorySpec) {
+ const mark = schemaType.create(attrs);
+ this.stack.push({
+ type: 'mark',
+ mark,
+ attrs,
+ hastNode,
+ factorySpec,
+ });
+
+ this.marks = mark.addToSet(this.marks);
}
/**
- * Empties the temporary Mark set.
+ * Removes a mark from the list of active marks that
+ * are applied to inline nodes.
*/
- closeMarks() {
- this.marks = Mark.none;
+ closeMark() {
+ const { mark } = this.stack.pop();
+
+ this.marks = mark.removeFromSet(this.marks);
}
/**
* Adds a node to the stack data structure.
*
- * @param {Schema.NodeType} type ProseMirror Schema for the node
- * @param {HastNode} hastNode Hast node from which the ProseMirror node will be created
+ * @param {https://prosemirror.net/docs/ref/#model.NodeType} schemaType ProseMirror Schema for the node
+ * @param {https://github.com/syntax-tree/hast#nodes} hastNode Hast node from which the ProseMirror node will be created
* @param {*} attrs Node’s attributes
* @param {*} factorySpec The factory spec used to create the node factory
*/
- openNode(type, hastNode, attrs, factorySpec) {
- this.stack.push({ type, attrs, content: [], hastNode, factorySpec });
+ openNode(schemaType, hastNode, attrs, factorySpec) {
+ this.stack.push({
+ type: 'node',
+ schemaType,
+ attrs,
+ content: [],
+ hastNode,
+ factorySpec,
+ });
}
/**
* Removes the top ProseMirror node from the
* conversion stack and adds the node to the
* previous element.
- * @returns
*/
closeNode() {
- const { type, attrs, content } = this.stack.pop();
- const node = type.createAndFill(attrs, content);
-
- if (!node) return null;
-
- if (this.marks.length) {
- this.marks = Mark.none;
+ const { schemaType, attrs, content, factorySpec } = this.stack.pop();
+ const node =
+ factorySpec.type === 'inline' && this.marks.length
+ ? schemaType.createAndFill(attrs, content, this.marks)
+ : schemaType.createAndFill(attrs, content);
+
+ if (!node) {
+ /*
+ When the node returned by `createAndFill` is null is because the `content` passed as a parameter
+ doesn’t conform with the document schema. We are handling the most likely scenario here that happens
+ when a paragraph is inside another paragraph.
+
+ This scenario happens when the converter encounters a mark wrapping one or more paragraphs.
+ In this case, the converter will wrap the mark in a paragraph as well because ProseMirror does
+ not allow marks wrapping block nodes or being direct children of certain nodes like the root nodes
+ or list items.
+ */
+ if (
+ schemaType.name === 'paragraph' &&
+ content.some((child) => child.type.name === 'paragraph')
+ ) {
+ this.topNode.content.push(...content);
+ }
+ return null;
}
if (!this.empty) {
- this.top.content.push(node);
+ this.topNode.content.push(node);
}
return node;
@@ -247,9 +286,27 @@ class HastToProseMirrorConverterState {
closeUntil(hastNode) {
while (hastNode !== this.top?.hastNode) {
- this.closeNode();
+ if (this.top.type === 'node') {
+ this.closeNode();
+ } else {
+ this.closeMark();
+ }
}
}
+
+ buildDoc() {
+ let doc;
+
+ do {
+ if (this.top.type === 'node') {
+ doc = this.closeNode();
+ } else {
+ this.closeMark();
+ }
+ } while (!this.empty);
+
+ return doc;
+ }
}
/**
@@ -276,7 +333,7 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
},
text: {
selector: 'text',
- handle: (state, hastNode) => {
+ handle: (state, hastNode, parent) => {
const found = state.findInStack((node) => isFunction(node.factorySpec.processText));
const { value: text } = hastNode;
@@ -284,6 +341,7 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
return;
}
+ state.closeUntil(parent);
state.addText(schema, found ? found.factorySpec.processText(text) : text);
},
},
@@ -320,11 +378,7 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
} else if (factory.type === 'mark') {
const markType = schema.marks[proseMirrorName];
factory.handle = (state, hastNode, parent) => {
- state.openMark(markType, getAttrs(factory, hastNode, parent, source));
-
- if (factory.inlineContent) {
- state.addText(schema, hastNode.value);
- }
+ state.openMark(markType, hastNode, getAttrs(factory, hastNode, parent, source), factory);
};
} else if (factory.type === 'ignore') {
factory.handle = noop;
@@ -357,7 +411,7 @@ const findParent = (ancestors, parent) => {
return ancestors[ancestors.length - 1];
};
-const calcTextNodePosition = (textNode) => {
+const resolveNodePosition = (textNode) => {
const { position, value, type } = textNode;
if (type !== 'text' || (!position.start && !position.end) || (position.start && position.end)) {
@@ -418,7 +472,7 @@ const wrapInlineElements = (nodes, wrappableTags) =>
const wrapper = {
type: 'element',
tagName: 'p',
- position: calcTextNodePosition(child),
+ position: resolveNodePosition(child),
children: [child],
properties: { wrapper: true },
};
@@ -580,11 +634,5 @@ export const createProseMirrorDocFromMdastTree = ({
return factory.skipChildren === true ? SKIP : true;
});
- let doc;
-
- do {
- doc = state.closeNode();
- } while (!state.empty);
-
- return doc;
+ return state.buildDoc();
};
diff --git a/app/assets/javascripts/main.js b/app/assets/javascripts/main.js
index a725ef972d4..21d5decb15b 100644
--- a/app/assets/javascripts/main.js
+++ b/app/assets/javascripts/main.js
@@ -115,34 +115,6 @@ function deferredInitialisation() {
);
}
- const searchInputBox = document.querySelector('#search');
- if (searchInputBox) {
- searchInputBox.addEventListener(
- 'focus',
- () => {
- if (gon.features?.newHeaderSearch) {
- import(/* webpackChunkName: 'globalSearch' */ '~/header_search')
- .then(async ({ initHeaderSearchApp }) => {
- // In case the user started searching before we bootstrapped, let's pass the search along.
- const initialSearchValue = searchInputBox.value;
- await initHeaderSearchApp(initialSearchValue);
- // this is new #search input element. We need to re-find it.
- document.querySelector('#search').focus();
- })
- .catch(() => {});
- } else {
- import(/* webpackChunkName: 'globalSearch' */ './search_autocomplete')
- .then(({ default: initSearchAutocomplete }) => {
- const searchDropdown = initSearchAutocomplete();
- searchDropdown.onSearchInputFocus();
- })
- .catch(() => {});
- }
- },
- { once: true },
- );
- }
-
addSelectOnFocusBehaviour('.js-select-on-focus');
const glTooltipDelay = localStorage.getItem('gl-tooltip-delay');
@@ -169,6 +141,36 @@ function deferredInitialisation() {
}
}
+// loading this inside requestIdleCallback is causing issues
+// see https://gitlab.com/gitlab-org/gitlab/-/issues/365746
+const searchInputBox = document.querySelector('#search');
+if (searchInputBox) {
+ searchInputBox.addEventListener(
+ 'focus',
+ () => {
+ if (gon.features?.newHeaderSearch) {
+ import(/* webpackChunkName: 'globalSearch' */ '~/header_search')
+ .then(async ({ initHeaderSearchApp }) => {
+ // In case the user started searching before we bootstrapped, let's pass the search along.
+ const initialSearchValue = searchInputBox.value;
+ await initHeaderSearchApp(initialSearchValue);
+ // this is new #search input element. We need to re-find it.
+ document.querySelector('#search').focus();
+ })
+ .catch(() => {});
+ } else {
+ import(/* webpackChunkName: 'globalSearch' */ './search_autocomplete')
+ .then(({ default: initSearchAutocomplete }) => {
+ const searchDropdown = initSearchAutocomplete();
+ searchDropdown.onSearchInputFocus();
+ })
+ .catch(() => {});
+ }
+ },
+ { once: true },
+ );
+}
+
const $body = $('body');
const $document = $(document);
const bootstrapBreakpoint = bp.getBreakpointSize();
diff --git a/app/assets/stylesheets/highlight/themes/monokai.scss b/app/assets/stylesheets/highlight/themes/monokai.scss
index c19189ef31b..f83bad152c5 100644
--- a/app/assets/stylesheets/highlight/themes/monokai.scss
+++ b/app/assets/stylesheets/highlight/themes/monokai.scss
@@ -115,7 +115,7 @@ $monokai-gh: #75715e;
@include hljs-override('section', $monokai-gh);
@include hljs-override('bullet', $monokai-n);
@include hljs-override('subst', $monokai-p);
- @include hljs-override('symbol', $monokai-ni);
+ @include hljs-override('symbol', $monokai-ss);
// Line numbers
.file-line-num {
diff --git a/app/assets/stylesheets/highlight/themes/white.scss b/app/assets/stylesheets/highlight/themes/white.scss
index 8698e448c94..b6ca26e9b39 100644
--- a/app/assets/stylesheets/highlight/themes/white.scss
+++ b/app/assets/stylesheets/highlight/themes/white.scss
@@ -2,6 +2,7 @@
@import '../white_base';
@include conflict-colors('white');
+ @include hljs-override('symbol', $white-ss);
}
:root {
diff --git a/app/models/packages/cleanup/policy.rb b/app/models/packages/cleanup/policy.rb
index d7df90a4ce0..c0585a52e6e 100644
--- a/app/models/packages/cleanup/policy.rb
+++ b/app/models/packages/cleanup/policy.rb
@@ -27,6 +27,10 @@ module Packages
# fixed cadence of 12 hours
self.next_run_at = Time.zone.now + 12.hours
end
+
+ def keep_n_duplicated_package_files_disabled?
+ keep_n_duplicated_package_files == 'all'
+ end
end
end
end
diff --git a/app/services/alert_management/alerts/update_service.rb b/app/services/alert_management/alerts/update_service.rb
index 6bdceb0f27b..f273e15b159 100644
--- a/app/services/alert_management/alerts/update_service.rb
+++ b/app/services/alert_management/alerts/update_service.rb
@@ -12,7 +12,6 @@ module AlertManagement
@alert = alert
@param_errors = []
@status = params.delete(:status)
- @status_change_reason = params.delete(:status_change_reason)
super(project: alert.project, current_user: current_user, params: params)
end
@@ -37,7 +36,7 @@ module AlertManagement
private
- attr_reader :alert, :param_errors, :status, :status_change_reason
+ attr_reader :alert, :param_errors, :status
def allowed?
current_user&.can?(:update_alert_management_alert, alert)
@@ -130,37 +129,16 @@ module AlertManagement
def handle_status_change
add_status_change_system_note
resolve_todos if alert.resolved?
- sync_to_incident if should_sync_to_incident?
end
def add_status_change_system_note
- SystemNoteService.change_alert_status(alert, current_user, status_change_reason)
+ SystemNoteService.change_alert_status(alert, current_user)
end
def resolve_todos
todo_service.resolve_todos_for_target(alert, current_user)
end
- def sync_to_incident
- ::Issues::UpdateService.new(
- project: project,
- current_user: current_user,
- params: {
- escalation_status: {
- status: status,
- status_change_reason: " by changing the status of #{alert.to_reference(project)}"
- }
- }
- ).execute(alert.issue)
- end
-
- def should_sync_to_incident?
- alert.issue &&
- alert.issue.supports_escalation? &&
- alert.issue.escalation_status &&
- alert.issue.escalation_status.status != alert.status
- end
-
def filter_duplicate
# Only need to check if changing to a not-resolved status
return if params[:status_event].blank? || params[:status_event] == :resolve
diff --git a/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb b/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb
index b5f105a6c89..d11492e062a 100644
--- a/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb
+++ b/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb
@@ -6,7 +6,6 @@ module IncidentManagement
def initialize(issuable, current_user, **params)
@issuable = issuable
@escalation_status = issuable.escalation_status
- @alert = issuable.alert_management_alert
super(project: issuable.project, current_user: current_user, params: params)
end
@@ -19,26 +18,13 @@ module IncidentManagement
private
- attr_reader :issuable, :escalation_status, :alert
+ attr_reader :issuable, :escalation_status
def after_update
- sync_status_to_alert
add_status_system_note
add_timeline_event
end
- def sync_status_to_alert
- return unless alert
- return if alert.status == escalation_status.status
-
- ::AlertManagement::Alerts::UpdateService.new(
- alert,
- current_user,
- status: escalation_status.status_name,
- status_change_reason: " by changing the incident status of #{issuable.to_reference(project)}"
- ).execute
- end
-
def add_status_system_note
return unless escalation_status.status_previously_changed?
diff --git a/app/services/incident_management/issuable_escalation_statuses/build_service.rb b/app/services/incident_management/issuable_escalation_statuses/build_service.rb
index 9ebcf72a0c9..b3c57da03cb 100644
--- a/app/services/incident_management/issuable_escalation_statuses/build_service.rb
+++ b/app/services/incident_management/issuable_escalation_statuses/build_service.rb
@@ -5,30 +5,17 @@ module IncidentManagement
class BuildService < ::BaseProjectService
def initialize(issue)
@issue = issue
- @alert = issue.alert_management_alert
super(project: issue.project)
end
def execute
- return issue.escalation_status if issue.escalation_status
-
- issue.build_incident_management_issuable_escalation_status(alert_params)
+ issue.escalation_status || issue.build_incident_management_issuable_escalation_status
end
private
- attr_reader :issue, :alert
-
- def alert_params
- return {} unless alert
-
- {
- status_event: alert.status_event_for(alert.status_name)
- }
- end
+ attr_reader :issue
end
end
end
-
-IncidentManagement::IssuableEscalationStatuses::BuildService.prepend_mod
diff --git a/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb b/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb
index 1d0504a6e80..58777848151 100644
--- a/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb
+++ b/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb
@@ -5,7 +5,7 @@ module IncidentManagement
class PrepareUpdateService < ::BaseProjectService
include Gitlab::Utils::StrongMemoize
- SUPPORTED_PARAMS = %i[status status_change_reason].freeze
+ SUPPORTED_PARAMS = %i[status].freeze
def initialize(issuable, current_user, params)
@issuable = issuable
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 544b2170a02..acd6d45af7a 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -162,8 +162,6 @@ class IssuableBaseService < ::BaseProjectService
return unless result.success? && result[:escalation_status].present?
- @escalation_status_change_reason = result[:escalation_status].delete(:status_change_reason)
-
params[:incident_management_issuable_escalation_status_attributes] = result[:escalation_status]
end
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index d9210169005..afc61eed287 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -199,8 +199,7 @@ module Issues
::IncidentManagement::IssuableEscalationStatuses::AfterUpdateService.new(
issue,
- current_user,
- status_change_reason: @escalation_status_change_reason # Defined in IssuableBaseService before save
+ current_user
).execute
end
diff --git a/app/services/packages/cleanup/execute_policy_service.rb b/app/services/packages/cleanup/execute_policy_service.rb
new file mode 100644
index 00000000000..b432f6d0acb
--- /dev/null
+++ b/app/services/packages/cleanup/execute_policy_service.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+module Packages
+ module Cleanup
+ class ExecutePolicyService
+ include Gitlab::Utils::StrongMemoize
+
+ MAX_EXECUTION_TIME = 250.seconds
+
+ DUPLICATED_FILES_BATCH_SIZE = 10_000
+ MARK_PACKAGE_FILES_FOR_DESTRUCTION_SERVICE_BATCH_SIZE = 200
+
+ def initialize(policy)
+ @policy = policy
+ @counts = {
+ marked_package_files_total_count: 0,
+ unique_package_id_and_file_name_total_count: 0
+ }
+ end
+
+ def execute
+ cleanup_duplicated_files
+ end
+
+ private
+
+ def cleanup_duplicated_files
+ return if @policy.keep_n_duplicated_package_files_disabled?
+
+ result = installable_package_files.each_batch(of: DUPLICATED_FILES_BATCH_SIZE) do |package_files|
+ break :timeout if cleanup_duplicated_files_on(package_files) == :timeout
+ end
+
+ response_success(timeout: result == :timeout)
+ end
+
+ def cleanup_duplicated_files_on(package_files)
+ unique_package_id_and_file_name_from(package_files).each do |package_id, file_name|
+ result = remove_duplicated_files_for(package_id: package_id, file_name: file_name)
+ @counts[:marked_package_files_total_count] += result.payload[:marked_package_files_count]
+ @counts[:unique_package_id_and_file_name_total_count] += 1
+
+ break :timeout unless result.success?
+ end
+ end
+
+ def unique_package_id_and_file_name_from(package_files)
+ # This is a highly custom query for this service, that's why it's not in the model.
+ # rubocop: disable CodeReuse/ActiveRecord
+ package_files.group(:package_id, :file_name)
+ .having("COUNT(*) > #{@policy.keep_n_duplicated_package_files}")
+ .pluck(:package_id, :file_name)
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+
+ def remove_duplicated_files_for(package_id:, file_name:)
+ base = ::Packages::PackageFile.for_package_ids(package_id)
+ .installable
+ .with_file_name(file_name)
+ ids_to_keep = base.recent
+ .limit(@policy.keep_n_duplicated_package_files)
+ .pluck_primary_key
+
+ duplicated_package_files = base.id_not_in(ids_to_keep)
+ ::Packages::MarkPackageFilesForDestructionService.new(duplicated_package_files)
+ .execute(batch_deadline: batch_deadline, batch_size: MARK_PACKAGE_FILES_FOR_DESTRUCTION_SERVICE_BATCH_SIZE)
+ end
+
+ def project
+ @policy.project
+ end
+
+ def installable_package_files
+ ::Packages::PackageFile.installable
+ .for_package_ids(
+ ::Packages::Package.installable
+ .for_projects(project.id)
+ )
+ end
+
+ def batch_deadline
+ strong_memoize(:batch_deadline) do
+ MAX_EXECUTION_TIME.from_now
+ end
+ end
+
+ def response_success(timeout:)
+ ServiceResponse.success(
+ message: "Packages cleanup policy executed for project #{project.id}",
+ payload: {
+ timeout: timeout,
+ counts: @counts
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/packages/mark_package_files_for_destruction_service.rb b/app/services/packages/mark_package_files_for_destruction_service.rb
index 3672b44b409..e7fdd88843a 100644
--- a/app/services/packages/mark_package_files_for_destruction_service.rb
+++ b/app/services/packages/mark_package_files_for_destruction_service.rb
@@ -9,18 +9,41 @@ module Packages
@package_files = package_files
end
- def execute
- @package_files.each_batch(of: BATCH_SIZE) do |batched_package_files|
- batched_package_files.update_all(status: :pending_destruction)
+ def execute(batch_deadline: nil, batch_size: BATCH_SIZE)
+ timeout = false
+ updates_count = 0
+ min_batch_size = [batch_size, BATCH_SIZE].min
+
+ @package_files.each_batch(of: min_batch_size) do |batched_package_files|
+ if batch_deadline && Time.zone.now > batch_deadline
+ timeout = true
+ break
+ end
+
+ updates_count += batched_package_files.update_all(status: :pending_destruction)
end
- service_response_success('Package files are now pending destruction')
+ payload = { marked_package_files_count: updates_count }
+
+ return response_error(payload) if timeout
+
+ response_success(payload)
end
private
- def service_response_success(message)
- ServiceResponse.success(message: message)
+ def response_success(payload)
+ ServiceResponse.success(
+ message: 'Package files are now pending destruction',
+ payload: payload
+ )
+ end
+
+ def response_error(payload)
+ ServiceResponse.error(
+ message: 'Timeout while marking package files as pending destruction',
+ payload: payload
+ )
end
end
end
diff --git a/config/feature_flags/development/use_keyset_aware_user_search_query.yml b/config/feature_flags/development/use_keyset_aware_user_search_query.yml
index aee293c374b..8c2babfd1c7 100644
--- a/config/feature_flags/development/use_keyset_aware_user_search_query.yml
+++ b/config/feature_flags/development/use_keyset_aware_user_search_query.yml
@@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/367025
milestone: '15.2'
type: development
group: group::optimize
-default_enabled: false
+default_enabled: true
diff --git a/db/migrate/20220708142744_add_composite_index_for_protected_environments.rb b/db/migrate/20220708142744_add_composite_index_for_protected_environments.rb
new file mode 100644
index 00000000000..ab93f5ca9ca
--- /dev/null
+++ b/db/migrate/20220708142744_add_composite_index_for_protected_environments.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+class AddCompositeIndexForProtectedEnvironments < Gitlab::Database::Migration[2.0]
+ disable_ddl_transaction!
+
+ # skips the `required_` part because index limit is 63 characters
+ INDEX_NAME = 'index_protected_environments_on_approval_count_and_created_at'
+
+ def up
+ add_concurrent_index :protected_environments, %i[required_approval_count created_at], name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index :protected_environments, %i[required_approval_count created_at], name: INDEX_NAME
+ end
+end
diff --git a/db/migrate/20220708142803_add_composite_index_for_protected_environment_approval_rules.rb b/db/migrate/20220708142803_add_composite_index_for_protected_environment_approval_rules.rb
new file mode 100644
index 00000000000..6952489588d
--- /dev/null
+++ b/db/migrate/20220708142803_add_composite_index_for_protected_environment_approval_rules.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+class AddCompositeIndexForProtectedEnvironmentApprovalRules < Gitlab::Database::Migration[2.0]
+ disable_ddl_transaction!
+
+ # uses `pe_` instead of `protected_environment_` because index limit is 63 characters
+ INDEX_NAME = 'index_pe_approval_rules_on_required_approvals_and_created_at'
+
+ def up
+ add_concurrent_index :protected_environment_approval_rules, %i[required_approvals created_at], name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index :protected_environment_approval_rules, %i[required_approvals created_at], name: INDEX_NAME
+ end
+end
diff --git a/db/post_migrate/20220617142124_add_index_on_installable_package_files.rb b/db/post_migrate/20220617142124_add_index_on_installable_package_files.rb
new file mode 100644
index 00000000000..e74c6c0935e
--- /dev/null
+++ b/db/post_migrate/20220617142124_add_index_on_installable_package_files.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+class AddIndexOnInstallablePackageFiles < Gitlab::Database::Migration[2.0]
+ disable_ddl_transaction!
+
+ INDEX_NAME = 'idx_pkgs_installable_package_files_on_package_id_id_file_name'
+ # See https://gitlab.com/gitlab-org/gitlab/-/blob/e3ed2c1f65df2e137fc714485d7d42264a137968/app/models/packages/package_file.rb#L16
+ DEFAULT_STATUS = 0
+
+ def up
+ add_concurrent_index :packages_package_files,
+ [:package_id, :id, :file_name],
+ where: "(status = #{DEFAULT_STATUS})",
+ name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :packages_package_files, INDEX_NAME
+ end
+end
diff --git a/db/post_migrate/20220617143228_replace_packages_index_on_project_id_and_status.rb b/db/post_migrate/20220617143228_replace_packages_index_on_project_id_and_status.rb
new file mode 100644
index 00000000000..d1e70f04468
--- /dev/null
+++ b/db/post_migrate/20220617143228_replace_packages_index_on_project_id_and_status.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class ReplacePackagesIndexOnProjectIdAndStatus < Gitlab::Database::Migration[2.0]
+ disable_ddl_transaction!
+
+ NEW_INDEX_NAME = 'index_packages_packages_on_project_id_and_status_and_id'
+ OLD_INDEX_NAME = 'index_packages_packages_on_project_id_and_status'
+
+ def up
+ add_concurrent_index :packages_packages,
+ [:project_id, :status, :id],
+ name: NEW_INDEX_NAME
+ remove_concurrent_index_by_name :packages_packages, OLD_INDEX_NAME
+ end
+
+ def down
+ add_concurrent_index :packages_packages,
+ [:project_id, :status],
+ name: OLD_INDEX_NAME
+ remove_concurrent_index_by_name :packages_packages, NEW_INDEX_NAME
+ end
+end
diff --git a/db/post_migrate/20220629184402_unset_escalation_policies_for_alert_incidents.rb b/db/post_migrate/20220629184402_unset_escalation_policies_for_alert_incidents.rb
new file mode 100644
index 00000000000..89adc4b2703
--- /dev/null
+++ b/db/post_migrate/20220629184402_unset_escalation_policies_for_alert_incidents.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+class UnsetEscalationPoliciesForAlertIncidents < Gitlab::Database::Migration[2.0]
+ disable_ddl_transaction!
+
+ restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ class EscalationStatus < MigrationRecord
+ include EachBatch
+
+ self.table_name = 'incident_management_issuable_escalation_statuses'
+
+ scope :having_alert_policy, -> do
+ joins(
+ 'INNER JOIN alert_management_alerts ' \
+ 'ON alert_management_alerts.issue_id ' \
+ '= incident_management_issuable_escalation_statuses.issue_id'
+ )
+ end
+ end
+
+ def up
+ EscalationStatus.each_batch do |escalation_statuses|
+ escalation_statuses
+ .where.not(policy_id: nil)
+ .having_alert_policy
+ .update_all(policy_id: nil, escalations_started_at: nil)
+ end
+ end
+
+ def down
+ # no-op
+ #
+ # We cannot retrieve the exact nullified values. We could
+ # approximately guess what the values are via the alert's
+ # escalation policy. However, that may not be accurate
+ # in all cases, as an alert's escalation policy is implictly
+ # inferred from the project rather than explicit, like an incident.
+ # So we won't backfill potentially incorrect data.
+ #
+ # This data is functionally safe to delete, as the relevant
+ # fields are read-only, and exclusively informational.
+ #
+ # Re-running the migration will have no effect.
+ end
+end
diff --git a/db/schema_migrations/20220617142124 b/db/schema_migrations/20220617142124
new file mode 100644
index 00000000000..c8fd06f2c10
--- /dev/null
+++ b/db/schema_migrations/20220617142124
@@ -0,0 +1 @@
+668404076e9cfc91817b8ae3ec995a69ec0db283153bbe497a81eb83c2188ceb \ No newline at end of file
diff --git a/db/schema_migrations/20220617143228 b/db/schema_migrations/20220617143228
new file mode 100644
index 00000000000..cb4ac555bc3
--- /dev/null
+++ b/db/schema_migrations/20220617143228
@@ -0,0 +1 @@
+547fc0071177395133497cbcec9a9d9ed058fe74f632f5e84d9a6416047503f2 \ No newline at end of file
diff --git a/db/schema_migrations/20220629184402 b/db/schema_migrations/20220629184402
new file mode 100644
index 00000000000..7e8b0c47bd1
--- /dev/null
+++ b/db/schema_migrations/20220629184402
@@ -0,0 +1 @@
+9414b08c3eacadffd8759739da163eb378776d3ecdb06dab7c66e259ff1bed29 \ No newline at end of file
diff --git a/db/schema_migrations/20220708142744 b/db/schema_migrations/20220708142744
new file mode 100644
index 00000000000..980c0b43c52
--- /dev/null
+++ b/db/schema_migrations/20220708142744
@@ -0,0 +1 @@
+b93ab540270a4b743c12fe5d1d6963cfeb29ee3b0a1e4e012cd4b3d1b3a08cde \ No newline at end of file
diff --git a/db/schema_migrations/20220708142803 b/db/schema_migrations/20220708142803
new file mode 100644
index 00000000000..4eb59905dd0
--- /dev/null
+++ b/db/schema_migrations/20220708142803
@@ -0,0 +1 @@
+7929540cf382f282f75f2f9c9dd6196d426ed1edb1f6744da1f0a627e7fb0cfc \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 9ad896d801c..14b1a1ba862 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -27116,6 +27116,8 @@ CREATE INDEX idx_pkgs_debian_project_distribution_keys_on_distribution_id ON pac
CREATE UNIQUE INDEX idx_pkgs_dep_links_on_pkg_id_dependency_id_dependency_type ON packages_dependency_links USING btree (package_id, dependency_id, dependency_type);
+CREATE INDEX idx_pkgs_installable_package_files_on_package_id_id_file_name ON packages_package_files USING btree (package_id, id, file_name) WHERE (status = 0);
+
CREATE INDEX idx_proj_feat_usg_on_jira_dvcs_cloud_last_sync_at_and_proj_id ON project_feature_usages USING btree (jira_dvcs_cloud_last_sync_at, project_id) WHERE (jira_dvcs_cloud_last_sync_at IS NOT NULL);
CREATE INDEX idx_proj_feat_usg_on_jira_dvcs_server_last_sync_at_and_proj_id ON project_feature_usages USING btree (jira_dvcs_server_last_sync_at, project_id) WHERE (jira_dvcs_server_last_sync_at IS NOT NULL);
@@ -29096,7 +29098,7 @@ CREATE INDEX index_packages_packages_on_project_id_and_created_at ON packages_pa
CREATE INDEX index_packages_packages_on_project_id_and_package_type ON packages_packages USING btree (project_id, package_type);
-CREATE INDEX index_packages_packages_on_project_id_and_status ON packages_packages USING btree (project_id, status);
+CREATE INDEX index_packages_packages_on_project_id_and_status_and_id ON packages_packages USING btree (project_id, status, id);
CREATE INDEX index_packages_packages_on_project_id_and_version ON packages_packages USING btree (project_id, version);
@@ -29162,6 +29164,8 @@ CREATE INDEX index_path_locks_on_project_id ON path_locks USING btree (project_i
CREATE INDEX index_path_locks_on_user_id ON path_locks USING btree (user_id);
+CREATE INDEX index_pe_approval_rules_on_required_approvals_and_created_at ON protected_environment_approval_rules USING btree (required_approvals, created_at);
+
CREATE UNIQUE INDEX index_personal_access_tokens_on_token_digest ON personal_access_tokens USING btree (token_digest);
CREATE INDEX index_personal_access_tokens_on_user_id ON personal_access_tokens USING btree (user_id);
@@ -29426,6 +29430,8 @@ CREATE INDEX index_protected_environment_deploy_access_levels_on_group_id ON pro
CREATE INDEX index_protected_environment_deploy_access_levels_on_user_id ON protected_environment_deploy_access_levels USING btree (user_id);
+CREATE INDEX index_protected_environments_on_approval_count_and_created_at ON protected_environments USING btree (required_approval_count, created_at);
+
CREATE UNIQUE INDEX index_protected_environments_on_group_id_and_name ON protected_environments USING btree (group_id, name) WHERE (group_id IS NOT NULL);
CREATE INDEX index_protected_environments_on_project_id ON protected_environments USING btree (project_id);
diff --git a/doc/ci/runners/saas/macos_saas_runner.md b/doc/ci/runners/saas/macos_saas_runner.md
index 2f61a74a06c..09a0cc975f2 100644
--- a/doc/ci/runners/saas/macos_saas_runner.md
+++ b/doc/ci/runners/saas/macos_saas_runner.md
@@ -4,9 +4,9 @@ group: Runner
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
-# SaaS runners on macOS (Limited Availability) **(PREMIUM SAAS)**
+# SaaS runners on macOS (Beta) **(PREMIUM SAAS)**
-SaaS runners on macOS are in [Limited Availability](../../../policy/alpha-beta-support.md#limited-availability-la) for approved open source programs and customers in Premium and Ultimate plans.
+SaaS runners on macOS are in [Beta]](../../../policy/alpha-beta-support.md#beta-features) for approved open source programs and customers in Premium and Ultimate plans.
SaaS runners on macOS provide an on-demand macOS build environment integrated with
GitLab SaaS [CI/CD](../../../ci/index.md).
@@ -22,11 +22,11 @@ Jobs handled by macOS shared runners on GitLab.com **time out after 2 hours**, r
## Access request process
-While in limited availability, to run CI jobs on the macOS runners, GitLab SaaS customer namespaces must be explicitly added to the macOS `allow-list`. Customers who participated in the beta have already been added.
+While in beta, to run CI jobs on the macOS runners, GitLab SaaS customer namespaces must be explicitly added to the macOS `allow-list`.
After you have been added, you can use the macOS runners for any projects in your namespace.
-To request access, open a [limited availability access request](https://gitlab.com/gitlab-com/runner-saas-macos-limited-availability/-/issues/new).
+To request access, open an [access request](https://gitlab.com/gitlab-com/runner-saas-macos-limited-availability/-/issues/new).
The expected turnaround for activation is two business days.
## Quickstart
diff --git a/doc/development/i18n/proofreader.md b/doc/development/i18n/proofreader.md
index 9431fce4255..2fcf6e33613 100644
--- a/doc/development/i18n/proofreader.md
+++ b/doc/development/i18n/proofreader.md
@@ -62,6 +62,7 @@ are very appreciative of the work done by translators and proofreaders!
- Michael Hahnle - [GitLab](https://gitlab.com/mhah), [Crowdin](https://crowdin.com/profile/mhah)
- Katrin Leinweber - [GitLab](https://gitlab.com/katrinleinweber), [Crowdin](https://crowdin.com/profile/katrinleinweber)
- Justman10000 - [GitLab](https://gitlab.com/Justman10000), [Crowdin](https://crowdin.com/profile/Justman10000)
+ - Vladislav Wanner - [GitLab](https://gitlab.com/RumBugen), [Crowdin](https://crowdin.com/profile/RumBugen)
- Greek
- Proofreaders needed.
- Hebrew
diff --git a/doc/development/packages/debian_repository.md b/doc/development/packages/debian_repository.md
index 6f65e04878b..a417ced2e65 100644
--- a/doc/development/packages/debian_repository.md
+++ b/doc/development/packages/debian_repository.md
@@ -110,12 +110,12 @@ sequenceDiagram
DebianProjectPackages->>+FindOrCreateIncomingService: Create "incoming" package
DebianProjectPackages->>+CreatePackageFileService: Create "unknown" file
Note over DebianProjectPackages: If `.changes` file
- DebianProjectPackages->>+ProcessChangesWorker:
+ DebianProjectPackages->>+ProcessChangesWorker: Schedule worker to process the file
DebianProjectPackages->>+Client: 202 Created
- ProcessChangesWorker->>+ProcessChangesService:
- ProcessChangesService->>+ExtractChangesMetadataService:
- ExtractChangesMetadataService->>+ExtractMetadataService:
- ExtractMetadataService->>+ParseDebian822Service:
+ ProcessChangesWorker->>+ProcessChangesService: Start service
+ ProcessChangesService->>+ExtractChangesMetadataService: Extract changesmetadata
+ ExtractChangesMetadataService->>+ExtractMetadataService: Extract file metadata
+ ExtractMetadataService->>+ParseDebian822Service: run `dpkg --field` to get control file
ExtractMetadataService->>+ExtractDebMetadataService: If .deb or .udeb
ExtractDebMetadataService->>+ParseDebian822Service: run `dpkg --field` to get control file
ParseDebian822Service-->>-ExtractDebMetadataService: Parse String as Debian RFC822 control data format
@@ -125,8 +125,8 @@ sequenceDiagram
ExtractMetadataService-->>-ExtractChangesMetadataService: Parse Metadata file
ExtractChangesMetadataService-->>-ProcessChangesService: Return list of files and hashes from the .changes file
loop process files listed in .changes
- ProcessChangesService->>+ExtractMetadataService:
- ExtractMetadataService->>+ParseDebian822Service:
+ ProcessChangesService->>+ExtractMetadataService: Process file
+ ExtractMetadataService->>+ParseDebian822Service: run `dpkg --field` to get control file
ExtractMetadataService->>+ExtractDebMetadataService: If .deb or .udeb
ExtractDebMetadataService->>+ParseDebian822Service: run `dpkg --field` to get control file
ParseDebian822Service-->>-ExtractDebMetadataService: Parse String as Debian RFC822 control data format
@@ -135,8 +135,8 @@ sequenceDiagram
ParseDebian822Service-->>-ExtractMetadataService: Parse String as Debian RFC822 control data format
ExtractMetadataService-->>-ProcessChangesService: Use parsed metadata to update "unknown" (or known) file
end
- ProcessChangesService->>+GenerateDistributionWorker:
- GenerateDistributionWorker->>+GenerateDistributionService:
+ ProcessChangesService->>+GenerateDistributionWorker: Find distribution and start service
+ GenerateDistributionWorker->>+GenerateDistributionService: Generate distribution
GenerateDistributionService->>+GenerateDistributionService: generate component files based on new archs and updates from .changes
GenerateDistributionService->>+GenerateDistributionKeyService: generate GPG key for distribution
GenerateDistributionKeyService-->>-GenerateDistributionService: GPG key
diff --git a/doc/operations/incident_management/alerts.md b/doc/operations/incident_management/alerts.md
index af42571f82f..a4b34807094 100644
--- a/doc/operations/incident_management/alerts.md
+++ b/doc/operations/incident_management/alerts.md
@@ -168,8 +168,9 @@ by changing the status. Setting the status to:
- **Acknowledged** limits on-call pages based on the project's [escalation policy](escalation_policies.md).
- **Triggered** from **Resolved** restarts the alert escalating from the beginning.
-For [alerts with an associated incident](alerts.md#create-an-incident-from-an-alert),
-updating the alert status also updates the incident status.
+In GitLab 15.1 and earlier, updating the status of an [alert with an associated incident](alerts.md#create-an-incident-from-an-alert)
+also updates the incident status. In [GitLab 15.2 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/356057),
+the incident status is independent and does not update when the alert status changes.
### Create an incident from an alert
diff --git a/doc/operations/incident_management/incidents.md b/doc/operations/incident_management/incidents.md
index bd7ef113a55..a6324595312 100644
--- a/doc/operations/incident_management/incidents.md
+++ b/doc/operations/incident_management/incidents.md
@@ -278,8 +278,9 @@ by changing the status. Setting the status to:
- **Acknowledged** limits on-call pages based on the selected [escalation policy](#change-escalation-policy).
- **Triggered** from **Resolved** restarts the incident escalating from the beginning.
-For [incidents created from alerts](alerts.md#create-an-incident-from-an-alert),
-updating the incident status also updates the alert status.
+In GitLab 15.1 and earlier, updating the status of an [incident created from an alert](alerts.md#create-an-incident-from-an-alert)
+also updates the alert status. In [GitLab 15.2 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/356057),
+the alert status is independent and does not update when the incident status changes.
### Change escalation policy **(PREMIUM)**
@@ -296,8 +297,9 @@ Selecting an escalation policy updates the incident status to **Triggered** and
Deselecting an escalation policy halts escalation. Refer to the [incident status](#change-incident-status)
to manage on-call paging once escalation has begun.
-For [incidents created from alerts](alerts.md#create-an-incident-from-an-alert),
-the incident's escalation policy reflects the alert's escalation policy and cannot be changed.
+In GitLab 15.1 and earlier, the escalation policy for [incidents created from alerts](alerts.md#create-an-incident-from-an-alert)
+reflects the alert's escalation policy and cannot be changed. In [GitLab 15.2 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/356057),
+the incident escalation policy is independent and can be changed.
### Manage incidents from Slack
diff --git a/doc/operations/incident_management/paging.md b/doc/operations/incident_management/paging.md
index 936c297e555..ef5f6c81383 100644
--- a/doc/operations/incident_management/paging.md
+++ b/doc/operations/incident_management/paging.md
@@ -62,4 +62,3 @@ the rule fires. You can respond to a page or stop incident escalations by
[unsetting the incident's escalation policy](incidents.md#change-escalation-policy).
To avoid duplicate pages, [incidents created from alerts](alerts.md#create-an-incident-from-an-alert) do not support independent escalation.
-Instead, the status and escalation policy fields are synced between the alert and the incident.
diff --git a/doc/user/project/merge_requests/csv_export.md b/doc/user/project/merge_requests/csv_export.md
index aaa9bec945f..2adcc4d4575 100644
--- a/doc/user/project/merge_requests/csv_export.md
+++ b/doc/user/project/merge_requests/csv_export.md
@@ -10,7 +10,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
Exporting merge requests CSV enables you and your team to export all the data collected from merge requests into a comma-separated values (CSV) file, which stores tabular data in plain text.
-To export merge requests to CSV, navigate to your **Merge requests** from the sidebar of a project and select **Export to CSV**.
+To export merge requests to CSV, navigate to your **Merge requests** from the sidebar of a project and select **Export as CSV**.
## CSV Output
diff --git a/glfm_specification/example_snapshots/html.yml b/glfm_specification/example_snapshots/html.yml
index 6a9acdd03a4..8544d1bc172 100644
--- a/glfm_specification/example_snapshots/html.yml
+++ b/glfm_specification/example_snapshots/html.yml
@@ -1351,8 +1351,8 @@
<p data-sourcepos="1:1-2:3" dir="auto"><code> </code>
aaa</p>
wysiwyg: |-
- <p><code>
- aaa</code></p>
+ <p>
+ aaa</p>
04_05__leaf_blocks__fenced_code_blocks__021:
canonical: |
<pre><code>aaa
@@ -1711,8 +1711,7 @@
<p data-sourcepos="3:1-3:5"><em>foo</em></p>
</del>
wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ <p><em><s>foo</s></em></p>
04_06__leaf_blocks__html_blocks__021:
canonical: |
<p><del><em>foo</em></del></p>
@@ -5381,7 +5380,7 @@
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><em>(<strong>foo</strong>)</em></p>
wysiwyg: |-
- <p><em>(</em><strong>foo</strong>)</p>
+ <p><em>(</em><strong><em>foo</em></strong><em>)</em></p>
06_05__inlines__emphasis_and_strong_emphasis__044:
canonical: |
<p><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
@@ -5390,15 +5389,15 @@
<p data-sourcepos="1:1-2:25" dir="auto"><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
<em>Asclepias physocarpa</em>)</strong></p>
wysiwyg: |-
- <p><strong>Gomphocarpus (</strong><em>Gomphocarpus physocarpus</em>, syn.
- <em>Asclepias physocarpa</em>)</p>
+ <p><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
+ <em>Asclepias physocarpa</em>)</strong></p>
06_05__inlines__emphasis_and_strong_emphasis__045:
canonical: |
<p><strong>foo &quot;<em>bar</em>&quot; foo</strong></p>
static: |-
<p data-sourcepos="1:1-1:19" dir="auto"><strong>foo "<em>bar</em>" foo</strong></p>
wysiwyg: |-
- <p><strong>foo "</strong><em>bar</em>" foo</p>
+ <p><strong>foo "<em>bar</em>" foo</strong></p>
06_05__inlines__emphasis_and_strong_emphasis__046:
canonical: |
<p><strong>foo</strong>bar</p>
@@ -5426,7 +5425,7 @@
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><em>(<strong>foo</strong>)</em></p>
wysiwyg: |-
- <p><em>(</em><strong>foo</strong>)</p>
+ <p><em>(</em><strong><em>foo</em></strong><em>)</em></p>
06_05__inlines__emphasis_and_strong_emphasis__050:
canonical: |
<p>__foo__bar</p>
@@ -5461,7 +5460,7 @@
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><em>foo <a href="/url">bar</a></em></p>
wysiwyg: |-
- <p><em>foo </em><a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
+ <p><em>foo </em><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><em>bar</em></a></p>
06_05__inlines__emphasis_and_strong_emphasis__055:
canonical: |
<p><em>foo
@@ -5478,7 +5477,7 @@
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><em>foo <strong>bar</strong> baz</em></p>
wysiwyg: |-
- <p><em>foo </em><strong>bar</strong> baz</p>
+ <p><em>foo </em><strong><em>bar</em></strong><em> baz</em></p>
06_05__inlines__emphasis_and_strong_emphasis__057:
canonical: |
<p><em>foo <em>bar</em> baz</em></p>
@@ -5506,14 +5505,14 @@
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><em>foo <strong>bar</strong> baz</em></p>
wysiwyg: |-
- <p><em>foo </em><strong>bar</strong> baz</p>
+ <p><em>foo </em><strong><em>bar</em></strong><em> baz</em></p>
06_05__inlines__emphasis_and_strong_emphasis__061:
canonical: |
<p><em>foo<strong>bar</strong>baz</em></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><em>foo<strong>bar</strong>baz</em></p>
wysiwyg: |-
- <p><em>foo</em><strong>bar</strong>baz</p>
+ <p><em>foo</em><strong><em>bar</em></strong><em>baz</em></p>
06_05__inlines__emphasis_and_strong_emphasis__062:
canonical: |
<p><em>foo**bar</em></p>
@@ -5527,21 +5526,21 @@
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><em><strong>foo</strong> bar</em></p>
wysiwyg: |-
- <p><strong><em>foo</em></strong> bar</p>
+ <p><strong><em>foo</em></strong><em> bar</em></p>
06_05__inlines__emphasis_and_strong_emphasis__064:
canonical: |
<p><em>foo <strong>bar</strong></em></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><em>foo <strong>bar</strong></em></p>
wysiwyg: |-
- <p><em>foo </em><strong>bar</strong></p>
+ <p><em>foo </em><strong><em>bar</em></strong></p>
06_05__inlines__emphasis_and_strong_emphasis__065:
canonical: |
<p><em>foo<strong>bar</strong></em></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto"><em>foo<strong>bar</strong></em></p>
wysiwyg: |-
- <p><em>foo</em><strong>bar</strong></p>
+ <p><em>foo</em><strong><em>bar</em></strong></p>
06_05__inlines__emphasis_and_strong_emphasis__066:
canonical: |
<p>foo<em><strong>bar</strong></em>baz</p>
@@ -5562,7 +5561,7 @@
static: |-
<p data-sourcepos="1:1-1:27" dir="auto"><em>foo <strong>bar <em>baz</em> bim</strong> bop</em></p>
wysiwyg: |-
- <p><em>foo </em><strong>bar </strong><em>baz</em> bim bop</p>
+ <p><em>foo </em><strong><em>bar baz</em> bim</strong> bop</p>
06_05__inlines__emphasis_and_strong_emphasis__069:
canonical: |
<p><em>foo <a href="/url"><em>bar</em></a></em></p>
@@ -5590,7 +5589,7 @@
static: |-
<p data-sourcepos="1:1-1:19" dir="auto"><strong>foo <a href="/url">bar</a></strong></p>
wysiwyg: |-
- <p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
+ <p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><strong>bar</strong></a></p>
06_05__inlines__emphasis_and_strong_emphasis__073:
canonical: |
<p><strong>foo
@@ -5607,7 +5606,7 @@
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><strong>foo <em>bar</em> baz</strong></p>
wysiwyg: |-
- <p><strong>foo </strong><em>bar</em> baz</p>
+ <p><strong>foo <em>bar</em> baz</strong></p>
06_05__inlines__emphasis_and_strong_emphasis__075:
canonical: |
<p><strong>foo <strong>bar</strong> baz</strong></p>
@@ -5635,28 +5634,28 @@
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><strong>foo <em>bar</em> baz</strong></p>
wysiwyg: |-
- <p><strong>foo </strong><em>bar</em> baz</p>
+ <p><strong>foo <em>bar</em> baz</strong></p>
06_05__inlines__emphasis_and_strong_emphasis__079:
canonical: |
<p><strong>foo<em>bar</em>baz</strong></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><strong>foo<em>bar</em>baz</strong></p>
wysiwyg: |-
- <p><strong>foo</strong><em>bar</em>baz</p>
+ <p><strong>foo<em>bar</em>baz</strong></p>
06_05__inlines__emphasis_and_strong_emphasis__080:
canonical: |
<p><strong><em>foo</em> bar</strong></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><strong><em>foo</em> bar</strong></p>
wysiwyg: |-
- <p><strong><em>foo</em></strong> bar</p>
+ <p><strong><em>foo</em> bar</strong></p>
06_05__inlines__emphasis_and_strong_emphasis__081:
canonical: |
<p><strong>foo <em>bar</em></strong></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><strong>foo <em>bar</em></strong></p>
wysiwyg: |-
- <p><strong>foo </strong><em>bar</em></p>
+ <p><strong>foo <em>bar</em></strong></p>
06_05__inlines__emphasis_and_strong_emphasis__082:
canonical: |
<p><strong>foo <em>bar <strong>baz</strong>
@@ -5665,15 +5664,15 @@
<p data-sourcepos="1:1-2:10" dir="auto"><strong>foo <em>bar <strong>baz</strong>
bim</em> bop</strong></p>
wysiwyg: |-
- <p><strong>foo </strong><em>bar </em><strong>baz</strong>
- bim bop</p>
+ <p><strong>foo <em>bar baz</em></strong><em>
+ bim</em> bop</p>
06_05__inlines__emphasis_and_strong_emphasis__083:
canonical: |
<p><strong>foo <a href="/url"><em>bar</em></a></strong></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto"><strong>foo <a href="/url"><em>bar</em></a></strong></p>
wysiwyg: |-
- <p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><em>bar</em></a></p>
+ <p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><strong><em>bar</em></strong></a></p>
06_05__inlines__emphasis_and_strong_emphasis__084:
canonical: |
<p>__ is not an empty emphasis</p>
@@ -5932,7 +5931,7 @@
static: |-
<p data-sourcepos="1:1-1:26" dir="auto"><em>foo <strong>bar *baz bim</strong> bam</em></p>
wysiwyg: |-
- <p><em>foo </em><strong>bar *baz bim</strong> bam</p>
+ <p><em>foo </em><strong><em>bar *baz bim</em></strong><em> bam</em></p>
06_05__inlines__emphasis_and_strong_emphasis__121:
canonical: |
<p>**foo <strong>bar baz</strong></p>
@@ -5988,14 +5987,14 @@
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><em>a <code>*</code></em></p>
wysiwyg: |-
- <p><em>a </em><code>*</code></p>
+ <p><em>a <code>*</code></em></p>
06_05__inlines__emphasis_and_strong_emphasis__129:
canonical: |
<p><em>a <code>_</code></em></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><em>a <code>_</code></em></p>
wysiwyg: |-
- <p><em>a </em><code>_</code></p>
+ <p><em>a <code>_</code></em></p>
06_05__inlines__emphasis_and_strong_emphasis__130:
canonical: |
<p>**a<a href="http://foo.bar/?q=**">http://foo.bar/?q=**</a></p>
@@ -6267,15 +6266,14 @@
static: |-
<p data-sourcepos="1:1-1:30" dir="auto"><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link </a><em>foo </em><strong>bar</strong><code>#</code></p>
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link <em>foo </em><strong><em>bar<code>#</code></em></strong></a></p>
06_07__inlines__links__033:
canonical: |
<p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
static: |-
<p data-sourcepos="1:1-1:25" dir="auto"><a href="/uri"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="moon" decoding="async" class="lazy" data-src="moon.jpg"></a></p>
wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri"><img src="moon.jpg" alt="moon"></a></p>
06_07__inlines__links__034:
canonical: |
<p>[foo <a href="/uri">bar</a>](/uri)</p>
@@ -6289,7 +6287,7 @@
static: |-
<p data-sourcepos="1:1-1:37" dir="auto">[foo <em>[bar <a href="/uri">baz</a>](/uri)</em>](/uri)</p>
wysiwyg: |-
- <p>[foo <em>[bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">baz</a>](/uri)](/uri)</p>
+ <p>[foo <em>[bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri"><em>baz</em></a><em>](/uri)</em>](/uri)</p>
06_07__inlines__links__036:
canonical: |
<p><img src="uri3" alt="[foo](uri2)" /></p>
@@ -6366,15 +6364,14 @@
static: |-
<p data-sourcepos="1:1-1:29" dir="auto"><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link </a><em>foo </em><strong>bar</strong><code>#</code></p>
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link <em>foo </em><strong><em>bar<code>#</code></em></strong></a></p>
06_07__inlines__links__047:
canonical: |
<p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto"><a href="/uri"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="moon" decoding="async" class="lazy" data-src="moon.jpg"></a></p>
wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri"><img src="moon.jpg" alt="moon"></a></p>
06_07__inlines__links__048:
canonical: |
<p>[foo <a href="/uri">bar</a>]<a href="/uri">ref</a></p>
@@ -6388,7 +6385,7 @@
static: |-
<p data-sourcepos="1:1-1:27" dir="auto">[foo <em>bar <a href="/uri">baz</a></em>]<a href="/uri">ref</a></p>
wysiwyg: |-
- <p>[foo <em>bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">baz</a>]<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">ref</a></p>
+ <p>[foo <em>bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri"><em>baz</em></a>]<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">ref</a></p>
06_07__inlines__links__050:
canonical: |
<p>*<a href="/uri">foo*</a></p>
@@ -6553,7 +6550,7 @@
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><a href="/url" title="title"><em>foo</em> bar</a></p>
wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar</p>
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em> bar</a></p>
06_07__inlines__links__071:
canonical: |
<p><a href="/url" title="title">Foo</a></p>
@@ -6584,14 +6581,14 @@
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><a href="/url" title="title"><em>foo</em> bar</a></p>
wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar</p>
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em> bar</a></p>
06_07__inlines__links__075:
canonical: |
<p>[<a href="/url" title="title"><em>foo</em> bar</a>]</p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">[<a href="/url" title="title"><em>foo</em> bar</a>]</p>
wysiwyg: |-
- <p>[<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar]</p>
+ <p>[<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em> bar</a>]</p>
06_07__inlines__links__076:
canonical: |
<p>[[bar <a href="/url">foo</a></p>
@@ -7301,8 +7298,8 @@
<p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>
bar</em></p>
wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ <p><em>foo<br>
+ bar</em></p>
06_13__inlines__hard_line_breaks__007:
canonical: |
<p><em>foo<br />
@@ -7311,8 +7308,8 @@
<p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>
bar</em></p>
wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ <p><em>foo<br>
+ bar</em></p>
06_13__inlines__hard_line_breaks__008:
canonical: |
<p><code>code span</code></p>
@@ -7415,11 +7412,11 @@
07_01__gitlab_specific_markdown__footnotes__001:
canonical: ""
static: |-
- <p data-sourcepos="1:1-1:27" dir="auto">footnote reference tag <sup class="footnote-ref"><a href="#fn-1-794" id="fnref-1-794" data-footnote-ref>1</a></sup></p>
+ <p data-sourcepos="1:1-1:27" dir="auto">footnote reference tag <sup class="footnote-ref"><a href="#fn-1-4719" id="fnref-1-4719" data-footnote-ref>1</a></sup></p>
<section data-footnotes class="footnotes">
<ol>
- <li id="fn-1-794">
- <p data-sourcepos="3:7-3:19">footnote text <a href="#fnref-1-794" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <li id="fn-1-4719">
+ <p data-sourcepos="3:7-3:19">footnote text <a href="#fnref-1-4719" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
</ol>
</section>
diff --git a/glfm_specification/example_snapshots/prosemirror_json.yml b/glfm_specification/example_snapshots/prosemirror_json.yml
index 85d499ef552..f0bc20ca1ed 100644
--- a/glfm_specification/example_snapshots/prosemirror_json.yml
+++ b/glfm_specification/example_snapshots/prosemirror_json.yml
@@ -2667,11 +2667,6 @@
"content": [
{
"type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
"text": "\naaa"
}
]
@@ -3259,8 +3254,28 @@
]
}
04_06__leaf_blocks__html_blocks__020: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ },
+ {
+ "type": "strike"
+ }
+ ],
+ "text": "foo"
+ }
+ ]
+ }
+ ]
+ }
04_06__leaf_blocks__html_blocks__021: |-
{
"type": "doc",
@@ -11691,12 +11706,20 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "foo"
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
"text": ")"
}
]
@@ -11723,6 +11746,9 @@
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -11730,12 +11756,20 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
"text": ", syn.\n"
},
{
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -11743,6 +11777,11 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
"text": ")"
}
]
@@ -11769,6 +11808,9 @@
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -11776,6 +11818,11 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
"text": "\" foo"
}
]
@@ -11857,12 +11904,20 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "foo"
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
"text": ")"
}
]
@@ -11971,6 +12026,9 @@
"title": null,
"canonicalSrc": null
}
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
@@ -12020,12 +12078,20 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
"text": " baz"
}
]
@@ -12121,12 +12187,20 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
"text": " baz"
}
]
@@ -12154,12 +12228,20 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
"text": "baz"
}
]
@@ -12207,6 +12289,11 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
"text": " bar"
}
]
@@ -12234,6 +12321,9 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
@@ -12263,6 +12353,9 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
@@ -12351,22 +12444,25 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
- "text": "bar "
+ "text": "bar baz"
},
{
"type": "text",
"marks": [
{
- "type": "italic"
+ "type": "bold"
}
],
- "text": "baz"
+ "text": " bim"
},
{
"type": "text",
- "text": " bim bop"
+ "text": " bop"
}
]
}
@@ -12469,6 +12565,9 @@
"title": null,
"canonicalSrc": null
}
+ },
+ {
+ "type": "bold"
}
],
"text": "bar"
@@ -12517,6 +12616,9 @@
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -12524,6 +12626,11 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
"text": " baz"
}
]
@@ -12618,6 +12725,9 @@
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -12625,6 +12735,11 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
"text": " baz"
}
]
@@ -12651,6 +12766,9 @@
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -12658,6 +12776,11 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
"text": "baz"
}
]
@@ -12685,6 +12808,11 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
"text": " bar"
}
]
@@ -12711,6 +12839,9 @@
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -12740,23 +12871,26 @@
"type": "text",
"marks": [
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
- "text": "bar "
+ "text": "bar baz"
},
{
"type": "text",
"marks": [
{
- "type": "bold"
+ "type": "italic"
}
],
- "text": "baz"
+ "text": "\nbim"
},
{
"type": "text",
- "text": "\nbim bop"
+ "text": " bop"
}
]
}
@@ -12792,6 +12926,9 @@
}
},
{
+ "type": "bold"
+ },
+ {
"type": "italic"
}
],
@@ -13602,12 +13739,20 @@
"marks": [
{
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar *baz bim"
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
"text": " bam"
}
]
@@ -13799,6 +13944,9 @@
"type": "text",
"marks": [
{
+ "type": "italic"
+ },
+ {
"type": "code"
}
],
@@ -13828,6 +13976,9 @@
"type": "text",
"marks": [
{
+ "type": "italic"
+ },
+ {
"type": "code"
}
],
@@ -14770,6 +14921,16 @@
"type": "text",
"marks": [
{
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ },
+ {
"type": "italic"
}
],
@@ -14779,7 +14940,20 @@
"type": "text",
"marks": [
{
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ },
+ {
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
@@ -14788,6 +14962,22 @@
"type": "text",
"marks": [
{
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ },
+ {
+ "type": "bold"
+ },
+ {
+ "type": "italic"
+ },
+ {
"type": "code"
}
],
@@ -14798,8 +14988,38 @@
]
}
06_07__inlines__links__033: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "image",
+ "attrs": {
+ "src": "moon.jpg",
+ "alt": "moon",
+ "title": null,
+ "uploading": false,
+ "canonicalSrc": null
+ },
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
06_07__inlines__links__034: |-
{
"type": "doc",
@@ -14867,13 +15087,25 @@
"title": null,
"canonicalSrc": null
}
+ },
+ {
+ "type": "italic"
}
],
"text": "baz"
},
{
"type": "text",
- "text": "](/uri)](/uri)"
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
+ "text": "](/uri)"
+ },
+ {
+ "type": "text",
+ "text": "](/uri)"
}
]
}
@@ -15160,6 +15392,16 @@
"type": "text",
"marks": [
{
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ },
+ {
"type": "italic"
}
],
@@ -15169,7 +15411,20 @@
"type": "text",
"marks": [
{
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ },
+ {
"type": "bold"
+ },
+ {
+ "type": "italic"
}
],
"text": "bar"
@@ -15178,6 +15433,22 @@
"type": "text",
"marks": [
{
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ },
+ {
+ "type": "bold"
+ },
+ {
+ "type": "italic"
+ },
+ {
"type": "code"
}
],
@@ -15188,8 +15459,38 @@
]
}
06_07__inlines__links__047: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "image",
+ "attrs": {
+ "src": "moon.jpg",
+ "alt": "moon",
+ "title": null,
+ "uploading": false,
+ "canonicalSrc": null
+ },
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "/uri",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
06_07__inlines__links__048: |-
{
"type": "doc",
@@ -15273,6 +15574,9 @@
"title": null,
"canonicalSrc": null
}
+ },
+ {
+ "type": "italic"
}
],
"text": "baz"
@@ -15847,6 +16151,18 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "/url",
+ "target": "_blank",
+ "class": null,
+ "title": "title",
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": " bar"
}
]
@@ -15966,6 +16282,18 @@
},
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "/url",
+ "target": "_blank",
+ "class": null,
+ "title": "title",
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": " bar"
}
]
@@ -16004,7 +16332,23 @@
},
{
"type": "text",
- "text": " bar]"
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "/url",
+ "target": "_blank",
+ "class": null,
+ "title": "title",
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": " bar"
+ },
+ {
+ "type": "text",
+ "text": "]"
}
]
}
@@ -18256,11 +18600,79 @@
]
}
06_13__inlines__hard_line_breaks__006: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
+ "text": "foo"
+ },
+ {
+ "type": "hardBreak",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ]
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
+ "text": "\nbar"
+ }
+ ]
+ }
+ ]
+ }
06_13__inlines__hard_line_breaks__007: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
+ "text": "foo"
+ },
+ {
+ "type": "hardBreak",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ]
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
+ "text": "\nbar"
+ }
+ ]
+ }
+ ]
+ }
06_13__inlines__hard_line_breaks__008: |-
{
"type": "doc",
diff --git a/qa/qa/specs/features/browser_ui/5_package/container_registry/container_registry_spec.rb b/qa/qa/specs/features/browser_ui/5_package/container_registry/container_registry_spec.rb
index 2b644528309..6918c087a4e 100644
--- a/qa/qa/specs/features/browser_ui/5_package/container_registry/container_registry_spec.rb
+++ b/qa/qa/specs/features/browser_ui/5_package/container_registry/container_registry_spec.rb
@@ -79,7 +79,7 @@ module QA
Page::Project::Registry::Show.perform do |registry|
expect(registry).to have_registry_repository(project.name)
- registry.click_on_image(registry_repository.name)
+ registry.click_on_image(project.name)
expect(registry).to have_tag('master')
registry.click_delete
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index def7b1f5420..ddfed73e2ca 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -141,6 +141,8 @@ RSpec.describe 'Projects > Settings > Repository settings' do
end
context 'remote mirror settings' do
+ let(:ssh_url) { 'ssh://user@localhost/project.git' }
+
before do
visit project_settings_repository_path(project)
end
@@ -150,11 +152,12 @@ RSpec.describe 'Projects > Settings > Repository settings' do
end
it 'creates a push mirror that mirrors all branches', :js do
- expect(find('.js-mirror-protected-hidden', visible: false).value).to eq('0')
+ expect(page).to have_css('.js-mirror-protected-hidden[value="0"]', visible: false)
- fill_in 'url', with: 'ssh://user@localhost/project.git'
- select 'SSH public key', from: 'Authentication method'
+ fill_in 'url', with: ssh_url
+ expect(page).to have_css(".js-mirror-url-hidden[value=\"#{ssh_url}\"]", visible: false)
+ select 'SSH public key', from: 'Authentication method'
select_direction
Sidekiq::Testing.fake! do
@@ -167,13 +170,14 @@ RSpec.describe 'Projects > Settings > Repository settings' do
expect(project.remote_mirrors.first.only_protected_branches).to eq(false)
end
- it 'creates a push mirror that only mirrors protected branches', :js,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/337394' do
+ it 'creates a push mirror that only mirrors protected branches', :js do
find('#only_protected_branches').click
- expect(find('.js-mirror-protected-hidden', visible: false).value).to eq('1')
+ expect(page).to have_css('.js-mirror-protected-hidden[value="1"]', visible: false)
+
+ fill_in 'url', with: ssh_url
+ expect(page).to have_css(".js-mirror-url-hidden[value=\"#{ssh_url}\"]", visible: false)
- fill_in 'url', with: 'ssh://user@localhost/project.git'
select 'SSH public key', from: 'Authentication method'
select_direction
@@ -190,7 +194,9 @@ RSpec.describe 'Projects > Settings > Repository settings' do
it 'creates a push mirror that keeps divergent refs', :js do
select_direction
- fill_in 'url', with: 'ssh://user@localhost/project.git'
+ fill_in 'url', with: ssh_url
+ expect(page).to have_css(".js-mirror-url-hidden[value=\"#{ssh_url}\"]", visible: false)
+
fill_in 'Password', with: 'password'
check 'Keep divergent refs'
diff --git a/spec/frontend/content_editor/remark_markdown_processing_spec.js b/spec/frontend/content_editor/remark_markdown_processing_spec.js
index d1690c4f6c1..cd63720a433 100644
--- a/spec/frontend/content_editor/remark_markdown_processing_spec.js
+++ b/spec/frontend/content_editor/remark_markdown_processing_spec.js
@@ -931,19 +931,101 @@ Paragraph
`,
expectedDoc: doc(div(source('<div>div</div>'), paragraph(source('div'), 'div'))),
},
+ {
+ markdown: `
+[![moon](moon.jpg)](/uri)
+`,
+ expectedDoc: doc(
+ paragraph(
+ source('[![moon](moon.jpg)](/uri)'),
+ link(
+ { ...source('[![moon](moon.jpg)](/uri)'), href: '/uri' },
+ image({ ...source('![moon](moon.jpg)'), src: 'moon.jpg', alt: 'moon' }),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+<del>
+
+*foo*
+
+</del>
+`,
+ expectedDoc: doc(
+ paragraph(
+ source('*foo*'),
+ strike(source('<del>\n\n*foo*\n\n</del>'), italic(source('*foo*'), 'foo')),
+ ),
+ ),
+ expectedMarkdown: '*foo*',
+ },
+ {
+ markdown: `
+~[moon](moon.jpg) and [sun](sun.jpg)~
+`,
+ expectedDoc: doc(
+ paragraph(
+ source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
+ strike(
+ source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
+ link({ ...source('[moon](moon.jpg)'), href: 'moon.jpg' }, 'moon'),
+ ),
+ strike(source('~[moon](moon.jpg) and [sun](sun.jpg)~'), ' and '),
+ strike(
+ source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
+ link({ ...source('[sun](sun.jpg)'), href: 'sun.jpg' }, 'sun'),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+<del>
+
+**Paragraph 1**
+
+_Paragraph 2_
+
+</del>
+ `,
+ expectedDoc: doc(
+ paragraph(
+ source('**Paragraph 1**'),
+ strike(
+ source('<del>\n\n**Paragraph 1**\n\n_Paragraph 2_\n\n</del>'),
+ bold(source('**Paragraph 1**'), 'Paragraph 1'),
+ ),
+ ),
+ paragraph(
+ source('_Paragraph 2_'),
+ strike(
+ source('<del>\n\n**Paragraph 1**\n\n_Paragraph 2_\n\n</del>'),
+ italic(source('_Paragraph 2_'), 'Paragraph 2'),
+ ),
+ ),
+ ),
+ expectedMarkdown: `**Paragraph 1**
+
+_Paragraph 2_`,
+ },
];
const runOnly = examples.find((example) => example.only === true);
const runExamples = runOnly ? [runOnly] : examples;
- it.each(runExamples)('processes %s correctly', async ({ markdown, expectedDoc }) => {
- const trimmed = markdown.trim();
- const document = await deserialize(trimmed);
+ it.each(runExamples)(
+ 'processes %s correctly',
+ async ({ markdown, expectedDoc, expectedMarkdown }) => {
+ const trimmed = markdown.trim();
+ const document = await deserialize(trimmed);
- expect(expectedDoc).not.toBeFalsy();
- expect(document.toJSON()).toEqual(expectedDoc.toJSON());
- expect(serialize(document)).toEqual(trimmed);
- });
+ expect(expectedDoc).not.toBeFalsy();
+ expect(document.toJSON()).toEqual(expectedDoc.toJSON());
+ expect(serialize(document)).toEqual(expectedMarkdown || trimmed);
+ },
+ );
/**
* DISCLAIMER: THIS IS A SECURITY ORIENTED TEST THAT ENSURES
diff --git a/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb b/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb
new file mode 100644
index 00000000000..bd821714605
--- /dev/null
+++ b/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe UnsetEscalationPoliciesForAlertIncidents do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:alerts) { table(:alert_management_alerts) }
+ let(:escalation_policies) { table(:incident_management_escalation_policies) }
+ let(:escalation_statuses) { table(:incident_management_issuable_escalation_statuses) }
+ let(:current_time) { Time.current.change(usec: 0) }
+
+ let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
+ let!(:project_namespace) { namespaces.create!(name: 'project', path: 'project', type: 'project') }
+ let!(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: project_namespace.id) }
+ let!(:policy) { escalation_policies.create!(project_id: project.id, name: 'escalation policy') }
+
+ # Escalation status with policy from alert; Policy & escalation start time should be nullified
+ let!(:issue_1) { create_issue }
+ let!(:escalation_status_1) { create_status(issue_1, policy, current_time) }
+ let!(:alert_1) { create_alert(1, issue_1) }
+
+ # Escalation status without policy, but with alert; Should be ignored
+ let!(:issue_2) { create_issue }
+ let!(:escalation_status_2) { create_status(issue_2, nil, current_time) }
+ let!(:alert_2) { create_alert(2, issue_2) }
+
+ # Escalation status without alert, but with policy; Should be ignored
+ let!(:issue_3) { create_issue }
+ let!(:escalation_status_3) { create_status(issue_3, policy, current_time) }
+
+ # Alert without issue; Should be ignored
+ let!(:alert_3) { create_alert(3) }
+
+ it 'removes the escalation policy if the incident corresponds to an alert' do
+ expect { migrate! }
+ .to change { escalation_status_1.reload.policy_id }.from(policy.id).to(nil)
+ .and change { escalation_status_1.escalations_started_at }.from(current_time).to(nil)
+ .and not_change { policy_attrs(escalation_status_2) }
+ .and not_change { policy_attrs(escalation_status_3) }
+ end
+
+ private
+
+ def create_issue
+ issues.create!(project_id: project.id)
+ end
+
+ def create_status(issue, policy = nil, escalations_started_at = nil)
+ escalation_statuses.create!(
+ issue_id: issue.id,
+ policy_id: policy&.id,
+ escalations_started_at: escalations_started_at
+ )
+ end
+
+ def create_alert(iid, issue = nil)
+ alerts.create!(
+ project_id: project.id,
+ started_at: current_time,
+ title: "alert #{iid}",
+ iid: iid.to_s,
+ issue_id: issue&.id
+ )
+ end
+
+ def policy_attrs(escalation_status)
+ escalation_status.reload.slice(:policy_id, :escalations_started_at)
+ end
+end
diff --git a/spec/models/packages/cleanup/policy_spec.rb b/spec/models/packages/cleanup/policy_spec.rb
index c08ae4aa7e7..9f4568932d2 100644
--- a/spec/models/packages/cleanup/policy_spec.rb
+++ b/spec/models/packages/cleanup/policy_spec.rb
@@ -25,4 +25,16 @@ RSpec.describe Packages::Cleanup::Policy, type: :model do
it { is_expected.to contain_exactly(active_policy) }
end
+
+ describe '#keep_n_duplicated_package_files_disabled?' do
+ subject { policy.keep_n_duplicated_package_files_disabled? }
+
+ %w[all 1].each do |value|
+ context "with value set to #{value}" do
+ let(:policy) { build(:packages_cleanup_policy, keep_n_duplicated_package_files: value) }
+
+ it { is_expected.to eq(value == 'all') }
+ end
+ end
+ end
end
diff --git a/spec/services/alert_management/alerts/update_service_spec.rb b/spec/services/alert_management/alerts/update_service_spec.rb
index 9bdc9970807..8375c8cdf7d 100644
--- a/spec/services/alert_management/alerts/update_service_spec.rb
+++ b/spec/services/alert_management/alerts/update_service_spec.rb
@@ -249,57 +249,6 @@ RSpec.describe AlertManagement::Alerts::UpdateService do
it_behaves_like 'adds a system note'
end
-
- context 'with an associated issue' do
- let_it_be(:issue, reload: true) { create(:issue, project: project) }
-
- before do
- alert.update!(issue: issue)
- end
-
- shared_examples 'does not sync with the incident status' do
- specify do
- expect(::Issues::UpdateService).not_to receive(:new)
- expect { response }.to change { alert.acknowledged? }.to(true)
- end
- end
-
- it_behaves_like 'does not sync with the incident status'
-
- context 'when the issue is an incident' do
- before do
- issue.update!(issue_type: Issue.issue_types[:incident])
- end
-
- it_behaves_like 'does not sync with the incident status'
-
- context 'when the incident has an escalation status' do
- let_it_be(:escalation_status, reload: true) { create(:incident_management_issuable_escalation_status, issue: issue) }
-
- it 'updates the incident escalation status with the new alert status' do
- expect(::Issues::UpdateService).to receive(:new).once.and_call_original
- expect(described_class).to receive(:new).once.and_call_original
-
- expect { response }.to change { escalation_status.reload.acknowledged? }.to(true)
- .and change { alert.reload.acknowledged? }.to(true)
- end
-
- context 'when the statuses match' do
- before do
- escalation_status.update!(status_event: :acknowledge)
- end
-
- it_behaves_like 'does not sync with the incident status'
- end
- end
- end
- end
-
- context 'when a status change reason is included' do
- let(:params) { { status: new_status, status_change_reason: ' by changing the incident status' } }
-
- it_behaves_like 'adds a system note', /changed the status to \*\*Acknowledged\*\* by changing the incident status/
- end
end
end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb
index 88c5f6125b9..4b0c8d9113c 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb
@@ -7,14 +7,11 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::AfterUpdateServic
let_it_be(:escalation_status, reload: true) { create(:incident_management_issuable_escalation_status, :triggered) }
let_it_be(:issue, reload: true) { escalation_status.issue }
let_it_be(:project) { issue.project }
- let_it_be(:alert) { create(:alert_management_alert, issue: issue, project: project) }
- let(:status_event) { :acknowledge }
- let(:update_params) { { incident_management_issuable_escalation_status_attributes: { status_event: status_event } } }
let(:service) { IncidentManagement::IssuableEscalationStatuses::AfterUpdateService.new(issue, current_user) }
subject(:result) do
- issue.update!(update_params)
+ issue.update!(incident_management_issuable_escalation_status_attributes: update_params)
service.execute
end
@@ -22,22 +19,15 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::AfterUpdateServic
issue.project.add_developer(current_user)
end
- shared_examples 'does not attempt to update the alert' do
- specify do
- expect(::AlertManagement::Alerts::UpdateService).not_to receive(:new)
-
- expect(result).to be_success
- end
- end
+ context 'with status attributes' do
+ let(:status_event) { :acknowledge }
+ let(:update_params) { { status_event: status_event } }
- shared_examples 'adds a status change system note' do
- specify do
+ it 'adds a status change system note' do
expect { result }.to change { issue.reload.notes.count }.by(1)
end
- end
- shared_examples 'adds a status change timeline event' do
- specify do
+ it 'adds a status change timeline event' do
expect(IncidentManagement::TimelineEvents::CreateService)
.to receive(:change_incident_status)
.with(issue, current_user, escalation_status)
@@ -47,35 +37,13 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::AfterUpdateServic
end
end
- context 'with status attributes' do
- it_behaves_like 'adds a status change system note'
- it_behaves_like 'adds a status change timeline event'
-
- it 'updates the alert with the new alert status' do
- expect(::AlertManagement::Alerts::UpdateService).to receive(:new).once.and_call_original
- expect(described_class).to receive(:new).once.and_call_original
-
- expect { result }.to change { escalation_status.reload.acknowledged? }.to(true)
- .and change { alert.reload.acknowledged? }.to(true)
- end
-
- context 'when incident is not associated with an alert' do
- before do
- alert.destroy!
- end
-
- it_behaves_like 'does not attempt to update the alert'
- it_behaves_like 'adds a status change system note'
- it_behaves_like 'adds a status change timeline event'
- end
-
- context 'when new status matches the current status' do
- let(:status_event) { :trigger }
-
- it_behaves_like 'does not attempt to update the alert'
+ context 'with non-status attributes' do
+ let(:update_params) { { updated_at: Time.current } }
- specify { expect { result }.not_to change { issue.reload.notes.count } }
- specify { expect { result }.not_to change { issue.reload.incident_management_timeline_events.count } }
+ it 'does not add a status change system note or timeline event' do
+ expect { result }
+ .to not_change { issue.reload.notes.count }
+ .and not_change { issue.reload.incident_management_timeline_events.count }
end
end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb
index c20a0688ac2..b5c5238d483 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb
@@ -11,10 +11,4 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::BuildService do
subject(:execute) { service.execute }
it_behaves_like 'initializes new escalation status with expected attributes'
-
- context 'with associated alert' do
- let_it_be(:alert) { create(:alert_management_alert, :acknowledged, project: project, issue: incident) }
-
- it_behaves_like 'initializes new escalation status with expected attributes', { status_event: :acknowledge }
- end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
index 2c7d330766c..b6ae03a19fe 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
@@ -27,19 +27,4 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::CreateService do
expect { execute }.not_to change { incident.reload.escalation_status }
end
end
-
- context 'with associated alert' do
- before do
- create(:alert_management_alert, :acknowledged, project: project, issue: incident)
- end
-
- it 'creates an escalation status matching the alert attributes' do
- expect { execute }.to change { incident.reload.escalation_status }.from(nil)
- expect(incident.escalation_status).to have_attributes(
- status_name: :acknowledged,
- policy_id: nil,
- escalations_started_at: nil
- )
- end
- end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
index 6c99631fcb0..761cc5c92ea 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
@@ -102,10 +102,4 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateServ
it_behaves_like 'successful response', { status_event: :acknowledge }
end
end
-
- context 'with status_change_reason param' do
- let(:params) { { status_change_reason: ' by changing the incident status' } }
-
- it_behaves_like 'successful response', { status_change_reason: ' by changing the incident status' }
- end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index d11fe772023..e2e8828ae89 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -1146,11 +1146,11 @@ RSpec.describe Issues::UpdateService, :mailer do
let(:opts) { { escalation_status: { status: 'acknowledged' } } }
let(:escalation_update_class) { ::IncidentManagement::IssuableEscalationStatuses::AfterUpdateService }
- shared_examples 'updates the escalation status record' do |expected_status, expected_reason = nil|
+ shared_examples 'updates the escalation status record' do |expected_status|
let(:service_double) { instance_double(escalation_update_class) }
it 'has correct value' do
- expect(escalation_update_class).to receive(:new).with(issue, user, status_change_reason: expected_reason).and_return(service_double)
+ expect(escalation_update_class).to receive(:new).with(issue, user).and_return(service_double)
expect(service_double).to receive(:execute)
update_issue(opts)
@@ -1193,23 +1193,6 @@ RSpec.describe Issues::UpdateService, :mailer do
it_behaves_like 'updates the escalation status record', :acknowledged
- context 'with associated alert' do
- let!(:alert) { create(:alert_management_alert, issue: issue, project: project) }
-
- it 'syncs the update back to the alert' do
- update_issue(opts)
-
- expect(issue.escalation_status.status_name).to eq(:acknowledged)
- expect(alert.reload.status_name).to eq(:acknowledged)
- end
- end
-
- context 'with a status change reason provided' do
- let(:opts) { { escalation_status: { status: 'acknowledged', status_change_reason: ' by changing the alert status' } } }
-
- it_behaves_like 'updates the escalation status record', :acknowledged, ' by changing the alert status'
- end
-
context 'with unsupported status value' do
let(:opts) { { escalation_status: { status: 'unsupported-status' } } }
diff --git a/spec/services/packages/cleanup/execute_policy_service_spec.rb b/spec/services/packages/cleanup/execute_policy_service_spec.rb
new file mode 100644
index 00000000000..93335c4a821
--- /dev/null
+++ b/spec/services/packages/cleanup/execute_policy_service_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Cleanup::ExecutePolicyService do
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:policy) { create(:packages_cleanup_policy, project: project) }
+
+ let(:service) { described_class.new(policy) }
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ context 'with the keep_n_duplicated_files parameter' do
+ let_it_be(:package1) { create(:package, project: project) }
+ let_it_be(:package2) { create(:package, project: project) }
+ let_it_be(:package3) { create(:package, project: project) }
+ let_it_be(:package4) { create(:package, :pending_destruction, project: project) }
+
+ let_it_be(:package_file1_1) { create(:package_file, package: package1, file_name: 'file_name1') }
+ let_it_be(:package_file1_2) { create(:package_file, package: package1, file_name: 'file_name1') }
+ let_it_be(:package_file1_3) { create(:package_file, package: package1, file_name: 'file_name1') }
+
+ let_it_be(:package_file1_4) { create(:package_file, package: package1, file_name: 'file_name2') }
+ let_it_be(:package_file1_5) { create(:package_file, package: package1, file_name: 'file_name2') }
+ let_it_be(:package_file1_6) { create(:package_file, package: package1, file_name: 'file_name2') }
+ let_it_be(:package_file1_7) do
+ create(:package_file, :pending_destruction, package: package1, file_name: 'file_name2')
+ end
+
+ let_it_be(:package_file2_1) { create(:package_file, package: package2, file_name: 'file_name1') }
+ let_it_be(:package_file2_2) { create(:package_file, package: package2, file_name: 'file_name1') }
+ let_it_be(:package_file2_3) { create(:package_file, package: package2, file_name: 'file_name1') }
+ let_it_be(:package_file2_4) { create(:package_file, package: package2, file_name: 'file_name1') }
+
+ let_it_be(:package_file3_1) { create(:package_file, package: package3, file_name: 'file_name_test') }
+
+ let_it_be(:package_file4_1) { create(:package_file, package: package4, file_name: 'file_name1') }
+ let_it_be(:package_file4_2) { create(:package_file, package: package4, file_name: 'file_name1') }
+
+ let(:package_files_1) { package1.package_files.installable }
+ let(:package_files_2) { package2.package_files.installable }
+ let(:package_files_3) { package3.package_files.installable }
+
+ context 'set to less than the total number of duplicated files' do
+ before do
+ # for each package file duplicate, we keep only the most recent one
+ policy.update!(keep_n_duplicated_package_files: '1')
+ end
+
+ shared_examples 'keeping the most recent package files' do
+ let(:response_payload) do
+ {
+ counts: {
+ marked_package_files_total_count: 7,
+ unique_package_id_and_file_name_total_count: 3
+ },
+ timeout: false
+ }
+ end
+
+ it 'only keeps the most recent package files' do
+ expect { execute }.to change { ::Packages::PackageFile.installable.count }.by(-7)
+
+ expect(package_files_1).to contain_exactly(package_file1_3, package_file1_6)
+ expect(package_files_2).to contain_exactly(package_file2_4)
+ expect(package_files_3).to contain_exactly(package_file3_1)
+
+ expect(execute).to be_success
+ expect(execute.message).to eq("Packages cleanup policy executed for project #{project.id}")
+ expect(execute.payload).to eq(response_payload)
+ end
+ end
+
+ it_behaves_like 'keeping the most recent package files'
+
+ context 'when the service needs to loop' do
+ before do
+ stub_const("#{described_class.name}::DUPLICATED_FILES_BATCH_SIZE", 2)
+ end
+
+ it_behaves_like 'keeping the most recent package files' do
+ before do
+ expect(::Packages::MarkPackageFilesForDestructionService)
+ .to receive(:new).exactly(3).times.and_call_original
+ end
+ end
+
+ context 'when a timeout is hit' do
+ let(:response_payload) do
+ {
+ counts: {
+ marked_package_files_total_count: 4,
+ unique_package_id_and_file_name_total_count: 3
+ },
+ timeout: true
+ }
+ end
+
+ let(:service_timeout_response) do
+ ServiceResponse.error(
+ message: 'Timeout while marking package files as pending destruction',
+ payload: { marked_package_files_count: 0 }
+ )
+ end
+
+ before do
+ mock_service_timeout(on_iteration: 3)
+ end
+
+ it 'keeps part of the most recent package files' do
+ expect { execute }
+ .to change { ::Packages::PackageFile.installable.count }.by(-4)
+ .and not_change { package_files_2.count } # untouched because of the timeout
+ .and not_change { package_files_3.count } # untouched because of the timeout
+
+ expect(package_files_1).to contain_exactly(package_file1_3, package_file1_6)
+ expect(execute).to be_success
+ expect(execute.message).to eq("Packages cleanup policy executed for project #{project.id}")
+ expect(execute.payload).to eq(response_payload)
+ end
+
+ def mock_service_timeout(on_iteration:)
+ execute_call_count = 1
+ expect_next_instances_of(::Packages::MarkPackageFilesForDestructionService, 3) do |service|
+ expect(service).to receive(:execute).and_wrap_original do |m, *args|
+ # timeout if we are on the right iteration
+ if execute_call_count == on_iteration
+ service_timeout_response
+ else
+ execute_call_count += 1
+ m.call(*args)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'set to more than the total number of duplicated files' do
+ before do
+ # using the biggest value for keep_n_duplicated_package_files
+ policy.update!(keep_n_duplicated_package_files: '50')
+ end
+
+ it 'keeps all package files' do
+ expect { execute }.not_to change { ::Packages::PackageFile.installable.count }
+ end
+ end
+
+ context 'set to all' do
+ before do
+ policy.update!(keep_n_duplicated_package_files: 'all')
+ end
+
+ it 'skips the policy' do
+ expect(::Packages::MarkPackageFilesForDestructionService).not_to receive(:new)
+ expect { execute }.not_to change { ::Packages::PackageFile.installable.count }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/mark_package_files_for_destruction_service_spec.rb b/spec/services/packages/mark_package_files_for_destruction_service_spec.rb
index a836de1f7f6..66534338003 100644
--- a/spec/services/packages/mark_package_files_for_destruction_service_spec.rb
+++ b/spec/services/packages/mark_package_files_for_destruction_service_spec.rb
@@ -6,9 +6,11 @@ RSpec.describe Packages::MarkPackageFilesForDestructionService, :aggregate_failu
let(:service) { described_class.new(package_files) }
describe '#execute', :aggregate_failures do
- subject { service.execute }
+ let(:batch_deadline) { nil }
- shared_examples 'executing successfully' do
+ subject { service.execute(batch_deadline: batch_deadline) }
+
+ shared_examples 'executing successfully' do |marked_package_files_count: 0|
it 'marks package files for destruction' do
expect { subject }
.to change { ::Packages::PackageFile.pending_destruction.count }.by(package_files.size)
@@ -17,6 +19,7 @@ RSpec.describe Packages::MarkPackageFilesForDestructionService, :aggregate_failu
it 'executes successfully' do
expect(subject).to be_success
expect(subject.message).to eq('Package files are now pending destruction')
+ expect(subject.payload).to eq(marked_package_files_count: marked_package_files_count)
end
end
@@ -30,13 +33,49 @@ RSpec.describe Packages::MarkPackageFilesForDestructionService, :aggregate_failu
let_it_be(:package_file) { create(:package_file) }
let_it_be(:package_files) { ::Packages::PackageFile.id_in(package_file.id) }
- it_behaves_like 'executing successfully'
+ it_behaves_like 'executing successfully', marked_package_files_count: 1
end
context 'with many package files' do
let_it_be(:package_files) { ::Packages::PackageFile.id_in(create_list(:package_file, 3).map(&:id)) }
- it_behaves_like 'executing successfully'
+ it_behaves_like 'executing successfully', marked_package_files_count: 3
+
+ context 'with a batch deadline' do
+ let_it_be(:batch_deadline) { 250.seconds.from_now }
+
+ context 'when the deadline is not hit' do
+ before do
+ expect(Time.zone).to receive(:now).and_return(batch_deadline - 10.seconds)
+ end
+
+ it_behaves_like 'executing successfully', marked_package_files_count: 3
+ end
+
+ context 'when the deadline is hit' do
+ it 'does not execute the batch loop' do
+ expect(Time.zone).to receive(:now).and_return(batch_deadline + 10.seconds)
+ expect { subject }.to not_change { ::Packages::PackageFile.pending_destruction.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq('Timeout while marking package files as pending destruction')
+ expect(subject.payload).to eq(marked_package_files_count: 0)
+ end
+ end
+ end
+
+ context 'when a batch size is defined' do
+ let_it_be(:batch_deadline) { 250.seconds.from_now }
+
+ let(:batch_size) { 2 }
+
+ subject { service.execute(batch_deadline: batch_deadline, batch_size: batch_size) }
+
+ before do
+ expect(Time.zone).to receive(:now).twice.and_call_original
+ end
+
+ it_behaves_like 'executing successfully', marked_package_files_count: 3
+ end
end
context 'with an error during the update' do