summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorOleh Prypin <oleh@pryp.in>2021-01-18 20:39:02 +0100
committerGitHub <noreply@github.com>2021-01-18 20:39:02 +0100
commitf0445be718da83541ea3401aad882f3937147263 (patch)
tree26bd361a410d8bea33ce259321fad63e7f3c61af
parent423c44a451db7e5f63147b1c1519661d745fc43a (diff)
downloadpygments-git-f0445be718da83541ea3401aad882f3937147263.tar.gz
Replace tests that assert on token output with auto-updatable samples (#1649)
-rw-r--r--Contributing.md20
-rw-r--r--tests/conftest.py5
-rw-r--r--tests/lexers/apacheconf/test_directive_no_args.txt14
-rw-r--r--tests/lexers/apacheconf/test_fix_lock_absolute_path.txt9
-rw-r--r--tests/lexers/apacheconf/test_include_globs.txt9
-rw-r--r--tests/lexers/apacheconf/test_malformed_scoped_directive_closing_tag.txt20
-rw-r--r--tests/lexers/apacheconf/test_multi_include_globs.txt9
-rw-r--r--tests/lexers/apacheconf/test_multi_include_globs_root.txt9
-rw-r--r--tests/lexers/apacheconf/test_multiline_argument.txt21
-rw-r--r--tests/lexers/apacheconf/test_multiline_comment.txt12
-rw-r--r--tests/lexers/apacheconf/test_normal_scoped_directive.txt14
-rw-r--r--tests/lexers/asm/test_cpuid.txt9
-rw-r--r--tests/lexers/bibtex/test_basic_bst.txt54
-rw-r--r--tests/lexers/bibtex/test_comment.txt7
-rw-r--r--tests/lexers/bibtex/test_entry.txt63
-rw-r--r--tests/lexers/bibtex/test_mismatched_brace.txt10
-rw-r--r--tests/lexers/bibtex/test_missing_body.txt10
-rw-r--r--tests/lexers/bibtex/test_preamble.txt11
-rw-r--r--tests/lexers/bibtex/test_string.txt15
-rw-r--r--tests/lexers/c/test_label.txt31
-rw-r--r--tests/lexers/c/test_label_followed_by_statement.txt35
-rw-r--r--tests/lexers/c/test_label_space_before_colon.txt32
-rw-r--r--tests/lexers/c/test_numbers.txt20
-rw-r--r--tests/lexers/c/test_preproc_file.txt9
-rw-r--r--tests/lexers/c/test_preproc_file2.txt9
-rw-r--r--tests/lexers/c/test_switch.txt56
-rw-r--r--tests/lexers/c/test_switch_space_before_colon.txt58
-rw-r--r--tests/lexers/cfm/test_basic_comment.txt8
-rw-r--r--tests/lexers/cfm/test_nested_comment.txt12
-rw-r--r--tests/lexers/coffeescript/test_beware_infinite_loop.txt14
-rw-r--r--tests/lexers/coffeescript/test_mixed_slashes.txt13
-rw-r--r--tests/lexers/conftest.py89
-rw-r--r--tests/lexers/console/test_comment_after_prompt.txt6
-rw-r--r--tests/lexers/console/test_newline_in_echo.txt15
-rw-r--r--tests/lexers/console/test_newline_in_ls.txt15
-rw-r--r--tests/lexers/console/test_virtualenv.txt11
-rw-r--r--tests/lexers/coq/test_unicode.txt15
-rw-r--r--tests/lexers/cpp/test_good_comment.txt6
-rw-r--r--tests/lexers/cpp/test_open_comment.txt5
-rw-r--r--tests/lexers/crystal/test_annotation.txt16
-rw-r--r--tests/lexers/crystal/test_array_access.txt11
-rw-r--r--tests/lexers/crystal/test_chars.txt25
-rw-r--r--tests/lexers/crystal/test_constant_and_module.txt14
-rw-r--r--tests/lexers/crystal/test_escaped_bracestring.txt19
-rw-r--r--tests/lexers/crystal/test_escaped_interpolation.txt9
-rw-r--r--tests/lexers/crystal/test_interpolation_nested_curly.txt56
-rw-r--r--tests/lexers/crystal/test_lib.txt58
-rw-r--r--tests/lexers/crystal/test_macro.txt76
-rw-r--r--tests/lexers/crystal/test_operator_methods.txt18
-rw-r--r--tests/lexers/crystal/test_percent_strings.txt41
-rw-r--r--tests/lexers/crystal/test_percent_strings_special.txt31
-rw-r--r--tests/lexers/crystal/test_pseudo_builtins.txt20
-rw-r--r--tests/lexers/crystal/test_pseudo_keywords.txt50
-rw-r--r--tests/lexers/crystal/test_range_syntax1.txt8
-rw-r--r--tests/lexers/crystal/test_range_syntax2.txt10
-rw-r--r--tests/lexers/csound/test_braced_strings.txt11
-rw-r--r--tests/lexers/csound/test_comments.txt16
-rw-r--r--tests/lexers/csound/test_escape_sequences.txt122
-rw-r--r--tests/lexers/csound/test_function_like_macro_definitions.txt44
-rw-r--r--tests/lexers/csound/test_function_like_macros.txt40
-rw-r--r--tests/lexers/csound/test_global_value_identifiers.txt30
-rw-r--r--tests/lexers/csound/test_goto_statements.txt176
-rw-r--r--tests/lexers/csound/test_include_directives.txt14
-rw-r--r--tests/lexers/csound/test_includestr_directives.txt11
-rw-r--r--tests/lexers/csound/test_instrument_blocks.txt42
-rw-r--r--tests/lexers/csound/test_keywords.txt62
-rw-r--r--tests/lexers/csound/test_labels.txt13
-rw-r--r--tests/lexers/csound/test_macro_preprocessor_directives.txt20
-rw-r--r--tests/lexers/csound/test_name.txt9
-rw-r--r--tests/lexers/csound/test_numbers.txt52
-rw-r--r--tests/lexers/csound/test_object_like_macro_definitions.txt30
-rw-r--r--tests/lexers/csound/test_operators.txt114
-rw-r--r--tests/lexers/csound/test_other_preprocessor_directives.txt26
-rw-r--r--tests/lexers/csound/test_printks_and_prints_escape_sequences.txt290
-rw-r--r--tests/lexers/csound/test_quoted_strings.txt9
-rw-r--r--tests/lexers/csound/test_user_defined_opcodes.txt24
-rw-r--r--tests/lexers/doscon/test_gt_only.txt11
-rw-r--r--tests/lexers/ezhil/test_function.txt99
-rw-r--r--tests/lexers/ezhil/test_gcd_expr.txt21
-rw-r--r--tests/lexers/ezhil/test_if_statement.txt28
-rw-r--r--tests/lexers/ezhil/test_sum.txt8
-rw-r--r--tests/lexers/gas/test_comments.txt29
-rw-r--r--tests/lexers/gdscript/test_comment.txt6
-rw-r--r--tests/lexers/gdscript/test_export_array.txt17
-rw-r--r--tests/lexers/gdscript/test_function_with_types.txt33
-rw-r--r--tests/lexers/gdscript/test_inner_class.txt20
-rw-r--r--tests/lexers/gdscript/test_multiline_string.txt8
-rw-r--r--tests/lexers/gdscript/test_signal.txt15
-rw-r--r--tests/lexers/gdscript/test_simple_function.txt22
-rw-r--r--tests/lexers/gdscript/test_variable_declaration_and_assigment.txt12
-rw-r--r--tests/lexers/haskell/test_promoted_names.txt10
-rw-r--r--tests/lexers/http/test_application_calendar_xml.txt28
-rw-r--r--tests/lexers/http/test_application_xml.txt28
-rw-r--r--tests/lexers/http/test_http_status_line.txt12
-rw-r--r--tests/lexers/http/test_http_status_line_without_reason_phrase.txt10
-rw-r--r--tests/lexers/http/test_http_status_line_without_reason_phrase_rfc_7230.txt11
-rw-r--r--tests/lexers/idris/test_compiler_directive.txt20
-rw-r--r--tests/lexers/idris/test_reserved_word.txt29
-rw-r--r--tests/lexers/ini/test_indented_entries_1.txt16
-rw-r--r--tests/lexers/ini/test_indented_entries_2.txt20
-rw-r--r--tests/lexers/ini/test_indented_entries_3.txt20
-rw-r--r--tests/lexers/java/test_enhanced_for.txt21
-rw-r--r--tests/lexers/java/test_numeric_literals.txt34
-rw-r--r--tests/lexers/json/test_basic.txt30
-rw-r--r--tests/lexers/json/test_basic_bare.txt23
-rw-r--r--tests/lexers/julia/test_unicode.txt37
-rw-r--r--tests/lexers/kotlin/test_can_cope_generics_in_destructuring.txt27
-rw-r--r--tests/lexers/kotlin/test_can_cope_with_backtick_names_in_functions.txt8
-rw-r--r--tests/lexers/kotlin/test_can_cope_with_commas_and_dashes_in_backtick_Names.txt8
-rw-r--r--tests/lexers/kotlin/test_can_cope_with_destructuring.txt16
-rw-r--r--tests/lexers/kotlin/test_can_cope_with_generics.txt32
-rw-r--r--tests/lexers/kotlin/test_should_cope_with_multiline_comments.txt10
-rw-r--r--tests/lexers/mason/test_handles_tags_correctly.txt69
-rw-r--r--tests/lexers/matlab/test_classes_with_properties.txt105
-rw-r--r--tests/lexers/matlab/test_command_mode.txt12
-rw-r--r--tests/lexers/matlab/test_comment_after_continuation.txt25
-rw-r--r--tests/lexers/matlab/test_keywords_ended_by_newline.txt36
-rw-r--r--tests/lexers/matlab/test_line_continuation.txt25
-rw-r--r--tests/lexers/matlab/test_multiple_spaces_variable_assignment.txt13
-rw-r--r--tests/lexers/matlab/test_one_space_assignment.txt13
-rw-r--r--tests/lexers/matlab/test_operator_multiple_space.txt13
-rw-r--r--tests/lexers/matlab/test_single_line.txt18
-rw-r--r--tests/lexers/md/test_bold_fenced_by_asterisk.txt15
-rw-r--r--tests/lexers/md/test_bold_fenced_by_underscore.txt15
-rw-r--r--tests/lexers/md/test_bulleted_list_1.txt14
-rw-r--r--tests/lexers/md/test_bulleted_list_2.txt14
-rw-r--r--tests/lexers/md/test_bulleted_list_3.txt14
-rw-r--r--tests/lexers/md/test_bulleted_list_4.txt19
-rw-r--r--tests/lexers/md/test_code_block_fenced_by_backticks.txt15
-rw-r--r--tests/lexers/md/test_code_block_with_language.txt16
-rw-r--r--tests/lexers/md/test_escape_italics.txt23
-rw-r--r--tests/lexers/md/test_inline_code.txt36
-rw-r--r--tests/lexers/md/test_inline_code_after_block.txt19
-rw-r--r--tests/lexers/md/test_inline_code_in_list.txt26
-rw-r--r--tests/lexers/md/test_invalid_bold.txt31
-rw-r--r--tests/lexers/md/test_invalid_italics.txt31
-rw-r--r--tests/lexers/md/test_italics_and_bold.txt21
-rw-r--r--tests/lexers/md/test_italics_fenced_by_asterisk.txt15
-rw-r--r--tests/lexers/md/test_italics_fenced_by_underscore.txt15
-rw-r--r--tests/lexers/md/test_italics_no_multiline.txt10
-rw-r--r--tests/lexers/md/test_links.txt23
-rw-r--r--tests/lexers/md/test_mentions.txt10
-rw-r--r--tests/lexers/md/test_numbered_list.txt14
-rw-r--r--tests/lexers/md/test_quote.txt10
-rw-r--r--tests/lexers/md/test_reference_style_links.txt18
-rw-r--r--tests/lexers/md/test_strikethrough.txt9
-rw-r--r--tests/lexers/md/test_task_list.txt34
-rw-r--r--tests/lexers/md/test_topics.txt10
-rw-r--r--tests/lexers/objectivec/test_literal_number_bool.txt7
-rw-r--r--tests/lexers/objectivec/test_literal_number_bool_expression.txt9
-rw-r--r--tests/lexers/objectivec/test_literal_number_expression.txt11
-rw-r--r--tests/lexers/objectivec/test_literal_number_int.txt9
-rw-r--r--tests/lexers/objectivec/test_literal_number_nested_expression.txt15
-rw-r--r--tests/lexers/objectivec/test_module_import.txt9
-rw-r--r--tests/lexers/peg/test_basic.txt17
-rw-r--r--tests/lexers/peg/test_modified_strings.txt21
-rw-r--r--tests/lexers/peg/test_operators.txt29
-rw-r--r--tests/lexers/php/test_string_escaping_run.txt16
-rw-r--r--tests/lexers/powershell/test_remoting_session.txt19
-rw-r--r--tests/lexers/powershell/test_session.txt28
-rw-r--r--tests/lexers/praat/test_broken_unquoted_string.txt16
-rw-r--r--tests/lexers/praat/test_function_call.txt20
-rw-r--r--tests/lexers/praat/test_inline_if.txt27
-rw-r--r--tests/lexers/praat/test_interpolated_indexed_numeric_with_precision.txt6
-rw-r--r--tests/lexers/praat/test_interpolated_local_numeric_with_precision.txt6
-rw-r--r--tests/lexers/praat/test_interpolated_numeric_hash.txt6
-rw-r--r--tests/lexers/praat/test_interpolated_numeric_indexed.txt6
-rw-r--r--tests/lexers/praat/test_interpolated_numeric_with_precision.txt6
-rw-r--r--tests/lexers/praat/test_interpolated_string_hash.txt6
-rw-r--r--tests/lexers/praat/test_interpolated_string_indexed.txt6
-rw-r--r--tests/lexers/praat/test_interpolation_boundary.txt14
-rw-r--r--tests/lexers/praat/test_numeric_assignment.txt11
-rw-r--r--tests/lexers/praat/test_string_assignment.txt12
-rw-r--r--tests/lexers/praat/test_string_escaped_quotes.txt13
-rw-r--r--tests/lexers/promql/test_expression_and_comment.txt15
-rw-r--r--tests/lexers/promql/test_function_delta.txt19
-rw-r--r--tests/lexers/promql/test_function_multi_line.txt80
-rw-r--r--tests/lexers/promql/test_function_multi_line_with_offset.txt87
-rw-r--r--tests/lexers/promql/test_function_sum_with_args.txt19
-rw-r--r--tests/lexers/promql/test_metric.txt6
-rw-r--r--tests/lexers/promql/test_metric_multiple_labels.txt19
-rw-r--r--tests/lexers/promql/test_metric_multiple_labels_with_spaces.txt22
-rw-r--r--tests/lexers/promql/test_metric_one_label.txt13
-rw-r--r--tests/lexers/properties/test_comments.txt12
-rw-r--r--tests/lexers/properties/test_escaped_space_in_key.txt10
-rw-r--r--tests/lexers/properties/test_escaped_space_in_value.txt10
-rw-r--r--tests/lexers/properties/test_just_key.txt6
-rw-r--r--tests/lexers/properties/test_just_key_with_space.txt6
-rw-r--r--tests/lexers/properties/test_leading_whitespace_comments.txt6
-rw-r--r--tests/lexers/properties/test_space_delimited_kv_pair.txt7
-rw-r--r--tests/lexers/python/test_fstring_01a.txt25
-rw-r--r--tests/lexers/python/test_fstring_01b.txt25
-rw-r--r--tests/lexers/python/test_fstring_02a.txt13
-rw-r--r--tests/lexers/python/test_fstring_02b.txt13
-rw-r--r--tests/lexers/python/test_fstring_03a.txt14
-rw-r--r--tests/lexers/python/test_fstring_03b.txt14
-rw-r--r--tests/lexers/python/test_fstring_04a.txt13
-rw-r--r--tests/lexers/python/test_fstring_04b.txt13
-rw-r--r--tests/lexers/python/test_fstring_05a.txt16
-rw-r--r--tests/lexers/python/test_fstring_05b.txt16
-rw-r--r--tests/lexers/python/test_fstring_06a.txt16
-rw-r--r--tests/lexers/python/test_fstring_06b.txt16
-rw-r--r--tests/lexers/python/test_fstring_07a.txt17
-rw-r--r--tests/lexers/python/test_fstring_07b.txt17
-rw-r--r--tests/lexers/python/test_fstring_08a.txt15
-rw-r--r--tests/lexers/python/test_fstring_08b.txt15
-rw-r--r--tests/lexers/python/test_fstring_09a.txt14
-rw-r--r--tests/lexers/python/test_fstring_09b.txt14
-rw-r--r--tests/lexers/python/test_fstring_10a.txt18
-rw-r--r--tests/lexers/python/test_fstring_10b.txt18
-rw-r--r--tests/lexers/python/test_fstring_11a.txt18
-rw-r--r--tests/lexers/python/test_fstring_11b.txt18
-rw-r--r--tests/lexers/python/test_fstring_12a.txt16
-rw-r--r--tests/lexers/python/test_fstring_12b.txt16
-rw-r--r--tests/lexers/python/test_fstring_13a.txt17
-rw-r--r--tests/lexers/python/test_fstring_13b.txt17
-rw-r--r--tests/lexers/python/test_fstring_14a.txt20
-rw-r--r--tests/lexers/python/test_fstring_14b.txt20
-rw-r--r--tests/lexers/python/test_fstring_15a.txt42
-rw-r--r--tests/lexers/python/test_fstring_15b.txt42
-rw-r--r--tests/lexers/python/test_fstring_16a.txt18
-rw-r--r--tests/lexers/python/test_fstring_16b.txt18
-rw-r--r--tests/lexers/python/test_fstring_17a.txt14
-rw-r--r--tests/lexers/python/test_fstring_17b.txt14
-rw-r--r--tests/lexers/python/test_fstring_18a.txt25
-rw-r--r--tests/lexers/python/test_fstring_18b.txt25
-rw-r--r--tests/lexers/python/test_fstring_19a.txt46
-rw-r--r--tests/lexers/python/test_fstring_19b.txt46
-rw-r--r--tests/lexers/python/test_fstring_20a.txt17
-rw-r--r--tests/lexers/python/test_fstring_20b.txt17
-rw-r--r--tests/lexers/python/test_fstring_21a.txt15
-rw-r--r--tests/lexers/python/test_fstring_21b.txt15
-rw-r--r--tests/lexers/python/test_fstring_22a.txt14
-rw-r--r--tests/lexers/python/test_fstring_22b.txt14
-rw-r--r--tests/lexers/python/test_fstring_23a.txt11
-rw-r--r--tests/lexers/python/test_fstring_23b.txt11
-rw-r--r--tests/lexers/python/test_fstring_24a.txt23
-rw-r--r--tests/lexers/python/test_fstring_24b.txt23
-rw-r--r--tests/lexers/python/test_fstring_25a.txt24
-rw-r--r--tests/lexers/python/test_fstring_25b.txt24
-rw-r--r--tests/lexers/python/test_fstring_26a.txt20
-rw-r--r--tests/lexers/python/test_fstring_26b.txt20
-rw-r--r--tests/lexers/python/test_fstring_27a.txt11
-rw-r--r--tests/lexers/python/test_fstring_27b.txt11
-rw-r--r--tests/lexers/python/test_fstring_28a.txt11
-rw-r--r--tests/lexers/python/test_fstring_28b.txt11
-rw-r--r--tests/lexers/python/test_fstring_29a.txt15
-rw-r--r--tests/lexers/python/test_fstring_29b.txt15
-rw-r--r--tests/lexers/python/test_fstring_30a.txt16
-rw-r--r--tests/lexers/python/test_fstring_30b.txt16
-rw-r--r--tests/lexers/python/test_fstring_31a.txt15
-rw-r--r--tests/lexers/python/test_fstring_31b.txt15
-rw-r--r--tests/lexers/python/test_fstring_32a.txt15
-rw-r--r--tests/lexers/python/test_fstring_32b.txt15
-rw-r--r--tests/lexers/python/test_fstring_33a.txt15
-rw-r--r--tests/lexers/python/test_fstring_33b.txt15
-rw-r--r--tests/lexers/python/test_fstring_34a.txt20
-rw-r--r--tests/lexers/python/test_fstring_34b.txt20
-rw-r--r--tests/lexers/python/test_fstring_35a.txt15
-rw-r--r--tests/lexers/python/test_fstring_35b.txt15
-rw-r--r--tests/lexers/python/test_fstring_36a.txt16
-rw-r--r--tests/lexers/python/test_fstring_36b.txt16
-rw-r--r--tests/lexers/python/test_needs_name.txt55
-rw-r--r--tests/lexers/python/test_pep_515.txt28
-rw-r--r--tests/lexers/python/test_raw_fstring.txt46
-rw-r--r--tests/lexers/python/test_walrus_operator.txt21
-rw-r--r--tests/lexers/python2/test_cls_builtin.txt34
-rw-r--r--tests/lexers/qbasic/test_keywords_with_dollar.txt22
-rw-r--r--tests/lexers/r/test_call.txt12
-rw-r--r--tests/lexers/r/test_custom_operator.txt10
-rw-r--r--tests/lexers/r/test_dot_indexing.txt9
-rw-r--r--tests/lexers/r/test_dot_name.txt10
-rw-r--r--tests/lexers/r/test_indexing.txt9
-rw-r--r--tests/lexers/r/test_name1.txt6
-rw-r--r--tests/lexers/r/test_name2.txt8
-rw-r--r--tests/lexers/r/test_name3.txt8
-rw-r--r--tests/lexers/ruby/test_escaped_bracestring.txt19
-rw-r--r--tests/lexers/ruby/test_interpolation_nested_curly.txt56
-rw-r--r--tests/lexers/ruby/test_operator_methods.txt9
-rw-r--r--tests/lexers/ruby/test_range_syntax1.txt8
-rw-r--r--tests/lexers/ruby/test_range_syntax2.txt8
-rw-r--r--tests/lexers/ruby/test_range_syntax3.txt10
-rw-r--r--tests/lexers/shell/test_array_nums.txt14
-rw-r--r--tests/lexers/shell/test_curly_no_escape_and_quotes.txt15
-rw-r--r--tests/lexers/shell/test_curly_with_escape.txt13
-rw-r--r--tests/lexers/shell/test_end_of_line_nums.txt15
-rw-r--r--tests/lexers/shell/test_parsed_single.txt8
-rw-r--r--tests/lexers/shell/test_short_variable_names.txt26
-rw-r--r--tests/lexers/shexc/test_prefixed_name_starting_with_number.txt8
-rw-r--r--tests/lexers/smarty/test_nested_curly.txt18
-rw-r--r--tests/lexers/systemverilog/test_basic.txt181
-rw-r--r--tests/lexers/systemverilog/test_classes.txt95
-rw-r--r--tests/lexers/systemverilog/test_numbers.txt168
-rw-r--r--tests/lexers/systemverilog/test_operators.txt213
-rw-r--r--tests/lexers/turtle/test_prefixed_name_starting_with_number.txt8
-rw-r--r--tests/lexers/typescript/test_function_definition.txt18
-rw-r--r--tests/lexers/usd/test_attribute.txt174
-rw-r--r--tests/lexers/usd/test_composition_arcs.txt101
-rw-r--r--tests/lexers/usd/test_metadata.txt36
-rw-r--r--tests/lexers/usd/test_numbers.txt21
-rw-r--r--tests/lexers/usd/test_outer_match_at_sign.txt14
-rw-r--r--tests/lexers/usd/test_outer_match_double.txt12
-rw-r--r--tests/lexers/usd/test_outer_match_single.txt12
-rw-r--r--tests/lexers/usd/test_string_multiple_line.txt20
-rw-r--r--tests/lexers/usd/test_string_priority.txt10
-rw-r--r--tests/lexers/usd/test_string_single_line.txt6
-rw-r--r--tests/lexers/vbscript/test_floats.txt34
-rw-r--r--tests/lexers/vbscript/test_floats_multiple.txt7
-rw-r--r--tests/lexers/vbscript/test_integers.txt14
-rw-r--r--tests/lexers/vbscript/test_invalid_character.txt10
-rw-r--r--tests/lexers/vbscript/test_names.txt18
-rw-r--r--tests/lexers/vbscript/test_reject_almost_float.txt7
-rw-r--r--tests/lexers/vbscript/test_unterminated_string.txt7
-rw-r--r--tests/lexers/whiley/test_whiley_operator.txt10
-rw-r--r--tests/lexers/yaml/test_yaml.txt13
-rw-r--r--tests/lexers/yang/test_float_value.txt11
-rw-r--r--tests/lexers/yang/test_integer_value.txt11
-rw-r--r--tests/lexers/yang/test_namespace_1.txt11
-rw-r--r--tests/lexers/yang/test_namespace_2.txt13
-rw-r--r--tests/lexers/yang/test_revision_date.txt11
-rw-r--r--tests/lexers/yang/test_string_value.txt11
-rw-r--r--tests/test_apache_conf.py133
-rw-r--r--tests/test_asm.py67
-rw-r--r--tests/test_basic.py72
-rw-r--r--tests/test_bibtex.py239
-rw-r--r--tests/test_cfm.py43
-rw-r--r--tests/test_clexer.py264
-rw-r--r--tests/test_coffeescript.py32
-rw-r--r--tests/test_coq.py33
-rw-r--r--tests/test_cpp.py24
-rw-r--r--tests/test_crystal.py459
-rw-r--r--tests/test_csound.py523
-rw-r--r--tests/test_data.py81
-rw-r--r--tests/test_ezhil.py176
-rw-r--r--tests/test_gdscript.py166
-rw-r--r--tests/test_grammar_notation.py93
-rw-r--r--tests/test_haskell.py30
-rw-r--r--tests/test_hdl.py686
-rw-r--r--tests/test_idris.py64
-rw-r--r--tests/test_ini_lexer.py81
-rw-r--r--tests/test_java.py60
-rw-r--r--tests/test_javascript.py18
-rw-r--r--tests/test_julia.py58
-rw-r--r--tests/test_kotlin.py132
-rw-r--r--tests/test_markdown_lexer.py458
-rw-r--r--tests/test_matlab.py305
-rw-r--r--tests/test_objectiveclexer.py95
-rw-r--r--tests/test_php.py36
-rw-r--r--tests/test_praat.py212
-rw-r--r--tests/test_promql.py309
-rw-r--r--tests/test_properties.py95
-rw-r--r--tests/test_python.py889
-rw-r--r--tests/test_qbasiclexer.py40
-rw-r--r--tests/test_r.py111
-rw-r--r--tests/test_rdf.py41
-rw-r--r--tests/test_ruby.py148
-rw-r--r--tests/test_shell.py238
-rw-r--r--tests/test_smarty.py38
-rw-r--r--tests/test_templates.py63
-rw-r--r--tests/test_textfmts.py79
-rwxr-xr-xtests/test_usd.py506
-rw-r--r--tests/test_whiley.py30
-rw-r--r--tests/test_yang.py102
363 files changed, 7742 insertions, 7333 deletions
diff --git a/Contributing.md b/Contributing.md
index b66ba717..dbc8752e 100644
--- a/Contributing.md
+++ b/Contributing.md
@@ -38,7 +38,19 @@ Contribution checklist
curious about ``compiled.py`` -- this file exists for backwards compatibility
reasons.)
* Use the standard importing convention: ``from token import Punctuation``
-* If you have a tricky case, you can use the ``testcase`` formatter to produce
- an unit test quickly. Run
- ``python -m pygments -l lua -f testcase <<< "local a = 5"``. This will
- produce a test case function skeleton. \ No newline at end of file
+* For large test cases that assert on the tokens produced by a lexer, use tools:
+ * You can use the ``testcase`` formatter to produce a piece of code that
+ can be pasted into a unittest file:
+ ``python -m pygments -l lua -f testcase <<< "local a = 5"``
+ * But most large snippets should instead be put as a sample file under
+ ``tests/lexers/<lexer_alias>/*.txt``. These files are automatically
+ picked up as individual tests, asserting that the input produces the
+ expected tokens.
+
+ To add a new test, create a file with just your code snippet under a
+ subdirectory based on your lexer's main alias. Then run
+ ``pytest --update-goldens tests/lexers`` to auto-populate the currently
+ expected tokens. Check that they look good and check in the file.
+
+ Also run the same command whenever you need to update the test if the
+ actual produced tokens change (assuming the change is expected).
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 00000000..def1b770
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,5 @@
+import pytest
+
+
+def pytest_addoption(parser):
+ parser.addoption('--update-goldens', action='store_true', help='reset golden master benchmarks')
diff --git a/tests/lexers/apacheconf/test_directive_no_args.txt b/tests/lexers/apacheconf/test_directive_no_args.txt
new file mode 100644
index 00000000..3c8df4cb
--- /dev/null
+++ b/tests/lexers/apacheconf/test_directive_no_args.txt
@@ -0,0 +1,14 @@
+---input---
+Example
+ServerName localhost
+
+---tokens---
+'Example' Name.Builtin
+'' Text
+'\n' Text
+
+'ServerName' Name.Builtin
+' ' Text
+'localhost' Text
+'' Text
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_fix_lock_absolute_path.txt b/tests/lexers/apacheconf/test_fix_lock_absolute_path.txt
new file mode 100644
index 00000000..25a19b6d
--- /dev/null
+++ b/tests/lexers/apacheconf/test_fix_lock_absolute_path.txt
@@ -0,0 +1,9 @@
+---input---
+LockFile /var/lock/apache2/accept.lock
+
+---tokens---
+'LockFile' Name.Builtin
+' ' Text
+'/var/lock/apache2/accept.lock' Literal.String.Other
+'' Text
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_include_globs.txt b/tests/lexers/apacheconf/test_include_globs.txt
new file mode 100644
index 00000000..4430d9ef
--- /dev/null
+++ b/tests/lexers/apacheconf/test_include_globs.txt
@@ -0,0 +1,9 @@
+---input---
+Include /etc/httpd/conf.d/*.conf
+
+---tokens---
+'Include' Name.Builtin
+' ' Text
+'/etc/httpd/conf.d/*.conf' Literal.String.Other
+'' Text
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_malformed_scoped_directive_closing_tag.txt b/tests/lexers/apacheconf/test_malformed_scoped_directive_closing_tag.txt
new file mode 100644
index 00000000..a6f82466
--- /dev/null
+++ b/tests/lexers/apacheconf/test_malformed_scoped_directive_closing_tag.txt
@@ -0,0 +1,20 @@
+---input---
+<VirtualHost "test">
+</VirtualHost
+>
+
+---tokens---
+'<VirtualHost' Name.Tag
+' ' Text
+'"test"' Literal.String
+'>' Name.Tag
+'\n' Text
+
+'<' Error
+'/' Error
+'VirtualHost' Name.Builtin
+'' Text
+'\n' Text
+
+'>' Error
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_multi_include_globs.txt b/tests/lexers/apacheconf/test_multi_include_globs.txt
new file mode 100644
index 00000000..316bc1e3
--- /dev/null
+++ b/tests/lexers/apacheconf/test_multi_include_globs.txt
@@ -0,0 +1,9 @@
+---input---
+Include /etc/httpd/conf.d/*/*.conf
+
+---tokens---
+'Include' Name.Builtin
+' ' Text
+'/etc/httpd/conf.d/*/*.conf' Literal.String.Other
+'' Text
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_multi_include_globs_root.txt b/tests/lexers/apacheconf/test_multi_include_globs_root.txt
new file mode 100644
index 00000000..34b20cd6
--- /dev/null
+++ b/tests/lexers/apacheconf/test_multi_include_globs_root.txt
@@ -0,0 +1,9 @@
+---input---
+Include /*conf/*.conf
+
+---tokens---
+'Include' Name.Builtin
+' ' Text
+'/*conf/*.conf' Literal.String.Other
+'' Text
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_multiline_argument.txt b/tests/lexers/apacheconf/test_multiline_argument.txt
new file mode 100644
index 00000000..fa2e0691
--- /dev/null
+++ b/tests/lexers/apacheconf/test_multiline_argument.txt
@@ -0,0 +1,21 @@
+---input---
+SecAction \
+ "id:'900001', \
+ phase:1, \
+ t:none, \
+ setvar:tx.critical_anomaly_score=5, \
+ setvar:tx.error_anomaly_score=4, \
+ setvar:tx.warning_anomaly_score=3, \
+ setvar:tx.notice_anomaly_score=2, \
+ nolog, \
+ pass"
+
+---tokens---
+'SecAction' Name.Builtin
+' ' Text
+'\\\n' Text
+
+' ' Text
+'"id:\'900001\', \\\n phase:1, \\\n t:none, \\\n setvar:tx.critical_anomaly_score=5, \\\n setvar:tx.error_anomaly_score=4, \\\n setvar:tx.warning_anomaly_score=3, \\\n setvar:tx.notice_anomaly_score=2, \\\n nolog, \\\n pass"' Literal.String.Double
+'' Text
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_multiline_comment.txt b/tests/lexers/apacheconf/test_multiline_comment.txt
new file mode 100644
index 00000000..87a4e893
--- /dev/null
+++ b/tests/lexers/apacheconf/test_multiline_comment.txt
@@ -0,0 +1,12 @@
+---input---
+#SecAction \
+ "id:'900004', \
+ phase:1, \
+ t:none, \
+ setvar:tx.anomaly_score_blocking=on, \
+ nolog, \
+ pass"
+
+---tokens---
+'#SecAction \\\n "id:\'900004\', \\\n phase:1, \\\n t:none, \\\n setvar:tx.anomaly_score_blocking=on, \\\n nolog, \\\n pass"' Comment
+'\n' Text
diff --git a/tests/lexers/apacheconf/test_normal_scoped_directive.txt b/tests/lexers/apacheconf/test_normal_scoped_directive.txt
new file mode 100644
index 00000000..30881139
--- /dev/null
+++ b/tests/lexers/apacheconf/test_normal_scoped_directive.txt
@@ -0,0 +1,14 @@
+---input---
+<VirtualHost "test">
+</VirtualHost>
+
+---tokens---
+'<VirtualHost' Name.Tag
+' ' Text
+'"test"' Literal.String
+'>' Name.Tag
+'\n' Text
+
+'</VirtualHost' Name.Tag
+'>' Name.Tag
+'\n' Text
diff --git a/tests/lexers/asm/test_cpuid.txt b/tests/lexers/asm/test_cpuid.txt
new file mode 100644
index 00000000..cf73a2c3
--- /dev/null
+++ b/tests/lexers/asm/test_cpuid.txt
@@ -0,0 +1,9 @@
+# CPU is a valid directive, and we don't want to parse this as
+# cpu id, but as a single token. See bug #1517
+
+---input---
+cpuid
+
+---tokens---
+'cpuid' Name.Function
+'\n' Text
diff --git a/tests/lexers/bibtex/test_basic_bst.txt b/tests/lexers/bibtex/test_basic_bst.txt
new file mode 100644
index 00000000..7e8129a4
--- /dev/null
+++ b/tests/lexers/bibtex/test_basic_bst.txt
@@ -0,0 +1,54 @@
+---input---
+% BibTeX standard bibliography style `plain'
+
+INTEGERS { output.state before.all }
+
+FUNCTION {sort.format.title}
+{ 't :=
+"A " #2
+ "An " #3
+ "The " #4 t chop.word
+ chop.word
+chop.word
+sortify
+#1 global.max$ substring$
+}
+
+ITERATE {call.type$}
+
+---tokens---
+"% BibTeX standard bibliography style `plain'" Comment
+'\n\n' Text
+
+'INTEGERS { output.state before.all }' Comment
+'\n\n' Text
+
+'FUNCTION {sort.format.title}' Comment
+'\n' Text
+
+"{ 't :=" Comment
+'\n' Text
+
+'"A " #2' Comment
+'\n ' Text
+'"An " #3' Comment
+'\n ' Text
+'"The " #4 t chop.word' Comment
+'\n ' Text
+'chop.word' Comment
+'\n' Text
+
+'chop.word' Comment
+'\n' Text
+
+'sortify' Comment
+'\n' Text
+
+'#1 global.max$ substring$' Comment
+'\n' Text
+
+'}' Comment
+'\n\n' Text
+
+'ITERATE {call.type$}' Comment
+'\n' Text
diff --git a/tests/lexers/bibtex/test_comment.txt b/tests/lexers/bibtex/test_comment.txt
new file mode 100644
index 00000000..abb594ad
--- /dev/null
+++ b/tests/lexers/bibtex/test_comment.txt
@@ -0,0 +1,7 @@
+---input---
+@COMMENT{test}
+
+---tokens---
+'@COMMENT' Comment
+'{test}' Comment
+'\n' Text
diff --git a/tests/lexers/bibtex/test_entry.txt b/tests/lexers/bibtex/test_entry.txt
new file mode 100644
index 00000000..3d1ee837
--- /dev/null
+++ b/tests/lexers/bibtex/test_entry.txt
@@ -0,0 +1,63 @@
+---input---
+This is a comment.
+
+@ARTICLE{ruckenstein-diffusion,
+ author = "Liu, Hongquin" # and # "Ruckenstein, Eli",
+ year = 1997,
+ month = JAN,
+ pages = "888-895"
+}
+
+---tokens---
+'This is a comment.' Comment
+'\n\n' Text
+
+'@ARTICLE' Name.Class
+'{' Punctuation
+'ruckenstein-diffusion' Name.Label
+',' Punctuation
+'\n ' Text
+'author' Name.Attribute
+' ' Text
+'=' Punctuation
+' ' Text
+'"' Literal.String
+'Liu, Hongquin' Literal.String
+'"' Literal.String
+' ' Text
+'#' Punctuation
+' ' Text
+'and' Name.Variable
+' ' Text
+'#' Punctuation
+' ' Text
+'"' Literal.String
+'Ruckenstein, Eli' Literal.String
+'"' Literal.String
+',' Punctuation
+'\n ' Text
+'year' Name.Attribute
+' ' Text
+'=' Punctuation
+' ' Text
+'1997' Literal.Number
+',' Punctuation
+'\n ' Text
+'month' Name.Attribute
+' ' Text
+'=' Punctuation
+' ' Text
+'JAN' Name.Variable
+',' Punctuation
+'\n ' Text
+'pages' Name.Attribute
+' ' Text
+'=' Punctuation
+' ' Text
+'"' Literal.String
+'888-895' Literal.String
+'"' Literal.String
+'\n' Text
+
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/bibtex/test_mismatched_brace.txt b/tests/lexers/bibtex/test_mismatched_brace.txt
new file mode 100644
index 00000000..9f7f74f0
--- /dev/null
+++ b/tests/lexers/bibtex/test_mismatched_brace.txt
@@ -0,0 +1,10 @@
+---input---
+@PREAMBLE(""}
+
+---tokens---
+'@PREAMBLE' Name.Class
+'(' Punctuation
+'"' Literal.String
+'"' Literal.String
+'}' Error
+'\n' Text
diff --git a/tests/lexers/bibtex/test_missing_body.txt b/tests/lexers/bibtex/test_missing_body.txt
new file mode 100644
index 00000000..882fa700
--- /dev/null
+++ b/tests/lexers/bibtex/test_missing_body.txt
@@ -0,0 +1,10 @@
+---input---
+@ARTICLE xxx
+
+---tokens---
+'@ARTICLE' Name.Class
+' ' Text
+'x' Error
+'x' Error
+'x' Error
+'\n' Text
diff --git a/tests/lexers/bibtex/test_preamble.txt b/tests/lexers/bibtex/test_preamble.txt
new file mode 100644
index 00000000..9b4d4760
--- /dev/null
+++ b/tests/lexers/bibtex/test_preamble.txt
@@ -0,0 +1,11 @@
+---input---
+@PREAMBLE{"% some LaTeX code here"}
+
+---tokens---
+'@PREAMBLE' Name.Class
+'{' Punctuation
+'"' Literal.String
+'% some LaTeX code here' Literal.String
+'"' Literal.String
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/bibtex/test_string.txt b/tests/lexers/bibtex/test_string.txt
new file mode 100644
index 00000000..0a521bdf
--- /dev/null
+++ b/tests/lexers/bibtex/test_string.txt
@@ -0,0 +1,15 @@
+---input---
+@STRING(SCI = "Science")
+
+---tokens---
+'@STRING' Name.Class
+'(' Punctuation
+'SCI' Name.Attribute
+' ' Text
+'=' Punctuation
+' ' Text
+'"' Literal.String
+'Science' Literal.String
+'"' Literal.String
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/c/test_label.txt b/tests/lexers/c/test_label.txt
new file mode 100644
index 00000000..722961ba
--- /dev/null
+++ b/tests/lexers/c/test_label.txt
@@ -0,0 +1,31 @@
+---input---
+int main()
+{
+foo:
+ goto foo;
+}
+
+---tokens---
+'int' Keyword.Type
+' ' Text
+'main' Name.Function
+'(' Punctuation
+')' Punctuation
+'\n' Text
+
+'{' Punctuation
+'\n' Text
+
+'foo' Name.Label
+':' Punctuation
+'\n' Text
+
+' ' Text
+'goto' Keyword
+' ' Text
+'foo' Name
+';' Punctuation
+'\n' Text
+
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/c/test_label_followed_by_statement.txt b/tests/lexers/c/test_label_followed_by_statement.txt
new file mode 100644
index 00000000..4119a731
--- /dev/null
+++ b/tests/lexers/c/test_label_followed_by_statement.txt
@@ -0,0 +1,35 @@
+---input---
+int main()
+{
+foo:return 0;
+ goto foo;
+}
+
+---tokens---
+'int' Keyword.Type
+' ' Text
+'main' Name.Function
+'(' Punctuation
+')' Punctuation
+'\n' Text
+
+'{' Punctuation
+'\n' Text
+
+'foo' Name.Label
+':' Punctuation
+'return' Keyword
+' ' Text
+'0' Literal.Number.Integer
+';' Punctuation
+'\n' Text
+
+' ' Text
+'goto' Keyword
+' ' Text
+'foo' Name
+';' Punctuation
+'\n' Text
+
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/c/test_label_space_before_colon.txt b/tests/lexers/c/test_label_space_before_colon.txt
new file mode 100644
index 00000000..53548e86
--- /dev/null
+++ b/tests/lexers/c/test_label_space_before_colon.txt
@@ -0,0 +1,32 @@
+---input---
+int main()
+{
+foo :
+ goto foo;
+}
+
+---tokens---
+'int' Keyword.Type
+' ' Text
+'main' Name.Function
+'(' Punctuation
+')' Punctuation
+'\n' Text
+
+'{' Punctuation
+'\n' Text
+
+'foo' Name.Label
+' ' Text
+':' Punctuation
+'\n' Text
+
+' ' Text
+'goto' Keyword
+' ' Text
+'foo' Name
+';' Punctuation
+'\n' Text
+
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/c/test_numbers.txt b/tests/lexers/c/test_numbers.txt
new file mode 100644
index 00000000..64735121
--- /dev/null
+++ b/tests/lexers/c/test_numbers.txt
@@ -0,0 +1,20 @@
+---input---
+42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23
+
+---tokens---
+'42' Literal.Number.Integer
+' ' Text
+'23.42' Literal.Number.Float
+' ' Text
+'23.' Literal.Number.Float
+' ' Text
+'.42' Literal.Number.Float
+' ' Text
+'023' Literal.Number.Oct
+' ' Text
+'0xdeadbeef' Literal.Number.Hex
+' ' Text
+'23e+42' Literal.Number.Float
+' ' Text
+'42e-23' Literal.Number.Float
+'\n' Text
diff --git a/tests/lexers/c/test_preproc_file.txt b/tests/lexers/c/test_preproc_file.txt
new file mode 100644
index 00000000..c00af1da
--- /dev/null
+++ b/tests/lexers/c/test_preproc_file.txt
@@ -0,0 +1,9 @@
+---input---
+#include <foo>
+
+---tokens---
+'#' Comment.Preproc
+'include' Comment.Preproc
+' ' Text
+'<foo>' Comment.PreprocFile
+'\n' Comment.Preproc
diff --git a/tests/lexers/c/test_preproc_file2.txt b/tests/lexers/c/test_preproc_file2.txt
new file mode 100644
index 00000000..a0ce5a69
--- /dev/null
+++ b/tests/lexers/c/test_preproc_file2.txt
@@ -0,0 +1,9 @@
+---input---
+#include "foo.h"
+
+---tokens---
+'#' Comment.Preproc
+'include' Comment.Preproc
+' ' Text
+'"foo.h"' Comment.PreprocFile
+'\n' Comment.Preproc
diff --git a/tests/lexers/c/test_switch.txt b/tests/lexers/c/test_switch.txt
new file mode 100644
index 00000000..9f882f7f
--- /dev/null
+++ b/tests/lexers/c/test_switch.txt
@@ -0,0 +1,56 @@
+---input---
+int main()
+{
+ switch (0)
+ {
+ case 0:
+ default:
+ ;
+ }
+}
+
+---tokens---
+'int' Keyword.Type
+' ' Text
+'main' Name.Function
+'(' Punctuation
+')' Punctuation
+'\n' Text
+
+'{' Punctuation
+'\n' Text
+
+' ' Text
+'switch' Keyword
+' ' Text
+'(' Punctuation
+'0' Literal.Number.Integer
+')' Punctuation
+'\n' Text
+
+' ' Text
+'{' Punctuation
+'\n' Text
+
+' ' Text
+'case' Keyword
+' ' Text
+'0' Literal.Number.Integer
+':' Operator
+'\n' Text
+
+' ' Text
+'default' Keyword
+':' Operator
+'\n' Text
+
+' ' Text
+';' Punctuation
+'\n' Text
+
+' ' Text
+'}' Punctuation
+'\n' Text
+
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/c/test_switch_space_before_colon.txt b/tests/lexers/c/test_switch_space_before_colon.txt
new file mode 100644
index 00000000..c2de099c
--- /dev/null
+++ b/tests/lexers/c/test_switch_space_before_colon.txt
@@ -0,0 +1,58 @@
+---input---
+int main()
+{
+ switch (0)
+ {
+ case 0 :
+ default :
+ ;
+ }
+}
+
+---tokens---
+'int' Keyword.Type
+' ' Text
+'main' Name.Function
+'(' Punctuation
+')' Punctuation
+'\n' Text
+
+'{' Punctuation
+'\n' Text
+
+' ' Text
+'switch' Keyword
+' ' Text
+'(' Punctuation
+'0' Literal.Number.Integer
+')' Punctuation
+'\n' Text
+
+' ' Text
+'{' Punctuation
+'\n' Text
+
+' ' Text
+'case' Keyword
+' ' Text
+'0' Literal.Number.Integer
+' ' Text
+':' Operator
+'\n' Text
+
+' ' Text
+'default' Keyword
+' ' Text
+':' Operator
+'\n' Text
+
+' ' Text
+';' Punctuation
+'\n' Text
+
+' ' Text
+'}' Punctuation
+'\n' Text
+
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/cfm/test_basic_comment.txt b/tests/lexers/cfm/test_basic_comment.txt
new file mode 100644
index 00000000..c07a72af
--- /dev/null
+++ b/tests/lexers/cfm/test_basic_comment.txt
@@ -0,0 +1,8 @@
+---input---
+<!--- cfcomment --->
+
+---tokens---
+'<!---' Comment.Multiline
+' cfcomment ' Comment.Multiline
+'--->' Comment.Multiline
+'\n' Text
diff --git a/tests/lexers/cfm/test_nested_comment.txt b/tests/lexers/cfm/test_nested_comment.txt
new file mode 100644
index 00000000..f8aaf4c7
--- /dev/null
+++ b/tests/lexers/cfm/test_nested_comment.txt
@@ -0,0 +1,12 @@
+---input---
+<!--- nested <!--- cfcomment ---> --->
+
+---tokens---
+'<!---' Comment.Multiline
+' nested ' Comment.Multiline
+'<!---' Comment.Multiline
+' cfcomment ' Comment.Multiline
+'--->' Comment.Multiline
+' ' Comment.Multiline
+'--->' Comment.Multiline
+'\n' Text
diff --git a/tests/lexers/coffeescript/test_beware_infinite_loop.txt b/tests/lexers/coffeescript/test_beware_infinite_loop.txt
new file mode 100644
index 00000000..6ae9b753
--- /dev/null
+++ b/tests/lexers/coffeescript/test_beware_infinite_loop.txt
@@ -0,0 +1,14 @@
+# This demonstrates the case that "This isn't really guarding" comment
+# refers to.
+
+---input---
+/a/x;
+
+---tokens---
+'' Text
+'/' Operator
+'a' Name.Other
+'/' Operator
+'x' Name.Other
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/coffeescript/test_mixed_slashes.txt b/tests/lexers/coffeescript/test_mixed_slashes.txt
new file mode 100644
index 00000000..7577b665
--- /dev/null
+++ b/tests/lexers/coffeescript/test_mixed_slashes.txt
@@ -0,0 +1,13 @@
+---input---
+a?/foo/:1/2;
+
+---tokens---
+'a' Name.Other
+'?' Operator
+'/foo/' Literal.String.Regex
+':' Operator
+'1' Literal.Number.Integer
+'/' Operator
+'2' Literal.Number.Integer
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/conftest.py b/tests/lexers/conftest.py
new file mode 100644
index 00000000..46bf5f1d
--- /dev/null
+++ b/tests/lexers/conftest.py
@@ -0,0 +1,89 @@
+"""
+ Generated lexer tests
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Checks that lexers output the expected tokens for each sample
+ under lexers/*/test_*.txt.
+
+ After making a change, rather than updating the samples manually,
+ run `pytest --update-goldens tests/lexers`.
+
+ To add a new sample, create a new file matching this pattern.
+ The directory must match the alias of the lexer to be used.
+ Populate only the input, then just `--update-goldens`.
+
+ :copyright: Copyright 2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+from pathlib import Path
+
+import pytest
+
+import pygments.lexers
+
+
+def pytest_collect_file(parent, path):
+ if path.ext == '.txt':
+ return LexerTestFile.from_parent(parent, fspath=path)
+
+
+class LexerTestFile(pytest.File):
+ def collect(self):
+ yield LexerTestItem.from_parent(self, name='test')
+
+
+class LexerTestItem(pytest.Item):
+ def __init__(self, name, parent):
+ super().__init__(name, parent)
+ self.lexer = Path(str(self.fspath)).parent.name
+
+ content = self.fspath.read_text('utf-8')
+ content, _, self.expected = content.partition('\n---tokens---\n')
+ if content.startswith('---input---\n'):
+ content = '\n' + content
+ self.comment, _, self.input = content.rpartition('\n---input---\n')
+ if not self.input.endswith('\n'):
+ self.input += '\n'
+ self.comment = self.comment.strip()
+
+ @classmethod
+ def _prettyprint_tokens(cls, tokens):
+ for tok, val in tokens:
+ yield '{!r:<13} {}'.format(val, str(tok)[6:])
+ if val.endswith('\n'):
+ yield ''
+
+ def runtest(self):
+ lexer = pygments.lexers.get_lexer_by_name(self.lexer)
+ tokens = lexer.get_tokens(self.input)
+ self.actual = '\n'.join(self._prettyprint_tokens(tokens)).rstrip('\n') + '\n'
+ if not self.config.getoption('--update-goldens'):
+ assert self.actual == self.expected
+
+ def _test_file_rel_path(self):
+ return Path(str(self.fspath)).relative_to(Path(__file__).parent.parent.parent)
+
+ def repr_failure(self, excinfo):
+ if isinstance(excinfo.value, AssertionError):
+ message = (
+ 'The tokens produced by the "{}" lexer differ from the '
+ 'expected ones in the file "{}".\n'
+ 'Run `pytest tests/lexers --update-goldens` to update it.'
+ ).format(self.lexer, self._test_file_rel_path())
+ diff = str(excinfo.value).split('\n', 1)[-1]
+ return message + '\n\n' + diff
+
+ def reportinfo(self):
+ return self.fspath, None, str(self._test_file_rel_path())
+
+
+def pytest_runtest_teardown(item, nextitem):
+ if item.config.getoption('--update-goldens') and isinstance(item, LexerTestItem):
+ with item.fspath.open('w', encoding='utf-8') as f:
+ f.write(item.comment)
+ if item.comment:
+ f.write('\n\n')
+ f.write('---input---\n')
+ f.write(item.input)
+ f.write('\n---tokens---\n')
+ f.write(item.actual)
diff --git a/tests/lexers/console/test_comment_after_prompt.txt b/tests/lexers/console/test_comment_after_prompt.txt
new file mode 100644
index 00000000..f1157152
--- /dev/null
+++ b/tests/lexers/console/test_comment_after_prompt.txt
@@ -0,0 +1,6 @@
+---input---
+$# comment
+
+---tokens---
+'$' Generic.Prompt
+'# comment\n' Comment.Single
diff --git a/tests/lexers/console/test_newline_in_echo.txt b/tests/lexers/console/test_newline_in_echo.txt
new file mode 100644
index 00000000..73c39ae7
--- /dev/null
+++ b/tests/lexers/console/test_newline_in_echo.txt
@@ -0,0 +1,15 @@
+---input---
+$ echo \
+hi
+hi
+
+---tokens---
+'$ ' Generic.Prompt
+'echo' Name.Builtin
+' ' Text
+'\\\n' Literal.String.Escape
+
+'hi' Text
+'\n' Text
+
+'hi\n' Generic.Output
diff --git a/tests/lexers/console/test_newline_in_ls.txt b/tests/lexers/console/test_newline_in_ls.txt
new file mode 100644
index 00000000..b99a7695
--- /dev/null
+++ b/tests/lexers/console/test_newline_in_ls.txt
@@ -0,0 +1,15 @@
+---input---
+$ ls \
+hi
+hi
+
+---tokens---
+'$ ' Generic.Prompt
+'ls' Text
+' ' Text
+'\\\n' Literal.String.Escape
+
+'hi' Text
+'\n' Text
+
+'hi\n' Generic.Output
diff --git a/tests/lexers/console/test_virtualenv.txt b/tests/lexers/console/test_virtualenv.txt
new file mode 100644
index 00000000..2b0182f8
--- /dev/null
+++ b/tests/lexers/console/test_virtualenv.txt
@@ -0,0 +1,11 @@
+---input---
+(env) [~/project]$ foo -h
+
+---tokens---
+'(env)' Generic.Prompt.VirtualEnv
+' ' Text
+'[~/project]$ ' Generic.Prompt
+'foo' Text
+' ' Text
+'-h' Text
+'\n' Text
diff --git a/tests/lexers/coq/test_unicode.txt b/tests/lexers/coq/test_unicode.txt
new file mode 100644
index 00000000..2007f7d6
--- /dev/null
+++ b/tests/lexers/coq/test_unicode.txt
@@ -0,0 +1,15 @@
+---input---
+Check (α ≻ β).
+
+---tokens---
+'Check' Keyword.Namespace
+' ' Text
+'(' Operator
+'α' Name
+' ' Text
+'≻' Name.Builtin.Pseudo
+' ' Text
+'β' Name
+')' Operator
+'.' Operator
+'\n' Text
diff --git a/tests/lexers/cpp/test_good_comment.txt b/tests/lexers/cpp/test_good_comment.txt
new file mode 100644
index 00000000..a6998e7b
--- /dev/null
+++ b/tests/lexers/cpp/test_good_comment.txt
@@ -0,0 +1,6 @@
+---input---
+/* foo */
+
+---tokens---
+'/* foo */' Comment.Multiline
+'\n' Text
diff --git a/tests/lexers/cpp/test_open_comment.txt b/tests/lexers/cpp/test_open_comment.txt
new file mode 100644
index 00000000..3799214b
--- /dev/null
+++ b/tests/lexers/cpp/test_open_comment.txt
@@ -0,0 +1,5 @@
+---input---
+/* foo
+
+---tokens---
+'/* foo\n' Comment.Multiline
diff --git a/tests/lexers/crystal/test_annotation.txt b/tests/lexers/crystal/test_annotation.txt
new file mode 100644
index 00000000..8962e172
--- /dev/null
+++ b/tests/lexers/crystal/test_annotation.txt
@@ -0,0 +1,16 @@
+---input---
+@[FOO::Bar::Baz(opt: "xx")]
+
+---tokens---
+'@[' Operator
+'FOO::Bar::Baz' Name.Decorator
+'(' Punctuation
+'opt' Literal.String.Symbol
+':' Punctuation
+' ' Text
+'"' Literal.String.Double
+'xx' Literal.String.Double
+'"' Literal.String.Double
+')' Punctuation
+']' Operator
+'\n' Text
diff --git a/tests/lexers/crystal/test_array_access.txt b/tests/lexers/crystal/test_array_access.txt
new file mode 100644
index 00000000..99c27c75
--- /dev/null
+++ b/tests/lexers/crystal/test_array_access.txt
@@ -0,0 +1,11 @@
+---input---
+[5][5]?
+
+---tokens---
+'[' Operator
+'5' Literal.Number.Integer
+']' Operator
+'[' Operator
+'5' Literal.Number.Integer
+']?' Operator
+'\n' Text
diff --git a/tests/lexers/crystal/test_chars.txt b/tests/lexers/crystal/test_chars.txt
new file mode 100644
index 00000000..0a0c5b0a
--- /dev/null
+++ b/tests/lexers/crystal/test_chars.txt
@@ -0,0 +1,25 @@
+---input---
+'a'
+'я'
+'\u{1234}'
+'
+'
+'abc'
+
+---tokens---
+"'a'" Literal.String.Char
+'\n' Text
+
+"'я'" Literal.String.Char
+'\n' Text
+
+"'\\u{1234}'" Literal.String.Char
+'\n' Text
+
+"'\n'" Literal.String.Char
+'\n' Text
+
+"'" Error
+'abc' Name
+"'" Error
+'\n' Text
diff --git a/tests/lexers/crystal/test_constant_and_module.txt b/tests/lexers/crystal/test_constant_and_module.txt
new file mode 100644
index 00000000..547ec79c
--- /dev/null
+++ b/tests/lexers/crystal/test_constant_and_module.txt
@@ -0,0 +1,14 @@
+---input---
+HTTP
+HTTP::Server.new
+
+---tokens---
+'HTTP' Name.Constant
+'\n' Text
+
+'HTTP' Name
+'::' Operator
+'Server' Name
+'.' Operator
+'new' Name
+'\n' Text
diff --git a/tests/lexers/crystal/test_escaped_bracestring.txt b/tests/lexers/crystal/test_escaped_bracestring.txt
new file mode 100644
index 00000000..7bcf845e
--- /dev/null
+++ b/tests/lexers/crystal/test_escaped_bracestring.txt
@@ -0,0 +1,19 @@
+---input---
+str.gsub(%r{\\\\}, "/")
+
+---tokens---
+'str' Name
+'.' Operator
+'gsub' Name
+'(' Punctuation
+'%r{' Literal.String.Regex
+'\\\\' Literal.String.Regex
+'\\\\' Literal.String.Regex
+'}' Literal.String.Regex
+',' Punctuation
+' ' Text
+'"' Literal.String.Double
+'/' Literal.String.Double
+'"' Literal.String.Double
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/crystal/test_escaped_interpolation.txt b/tests/lexers/crystal/test_escaped_interpolation.txt
new file mode 100644
index 00000000..31ff9004
--- /dev/null
+++ b/tests/lexers/crystal/test_escaped_interpolation.txt
@@ -0,0 +1,9 @@
+---input---
+"\#{a + b}"
+
+---tokens---
+'"' Literal.String.Double
+'\\#' Literal.String.Escape
+'{a + b}' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/crystal/test_interpolation_nested_curly.txt b/tests/lexers/crystal/test_interpolation_nested_curly.txt
new file mode 100644
index 00000000..eceb04af
--- /dev/null
+++ b/tests/lexers/crystal/test_interpolation_nested_curly.txt
@@ -0,0 +1,56 @@
+---input---
+"A#{ (3..5).group_by { |x| x/2}.map do |k,v| "#{k}" end.join }" + "Z"
+
+---tokens---
+'"' Literal.String.Double
+'A' Literal.String.Double
+'#{' Literal.String.Interpol
+' ' Text
+'(' Punctuation
+'3' Literal.Number.Integer
+'..' Operator
+'5' Literal.Number.Integer
+')' Punctuation
+'.' Operator
+'group_by' Name
+' ' Text
+'{' Literal.String.Interpol
+' ' Text
+'|' Operator
+'x' Name
+'|' Operator
+' ' Text
+'x' Name
+'/' Operator
+'2' Literal.Number.Integer
+'}' Literal.String.Interpol
+'.' Operator
+'map' Name
+' ' Text
+'do' Keyword
+' ' Text
+'|' Operator
+'k' Name
+',' Punctuation
+'v' Name
+'|' Operator
+' ' Text
+'"' Literal.String.Double
+'#{' Literal.String.Interpol
+'k' Name
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+' ' Text
+'end' Keyword
+'.' Operator
+'join' Name
+' ' Text
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+' ' Text
+'+' Operator
+' ' Text
+'"' Literal.String.Double
+'Z' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/crystal/test_lib.txt b/tests/lexers/crystal/test_lib.txt
new file mode 100644
index 00000000..41c0bf07
--- /dev/null
+++ b/tests/lexers/crystal/test_lib.txt
@@ -0,0 +1,58 @@
+---input---
+@[Link("some")]
+lib LibSome
+@[CallConvention("X86_StdCall")]
+fun foo="some.foo"(thing : Void*) : LibC::Int
+end
+
+---tokens---
+'@[' Operator
+'Link' Name.Decorator
+'(' Punctuation
+'"' Literal.String.Double
+'some' Literal.String.Double
+'"' Literal.String.Double
+')' Punctuation
+']' Operator
+'\n' Text
+
+'lib' Keyword
+' ' Text
+'LibSome' Name.Namespace
+'\n' Text
+
+'@[' Operator
+'CallConvention' Name.Decorator
+'(' Punctuation
+'"' Literal.String.Double
+'X86_StdCall' Literal.String.Double
+'"' Literal.String.Double
+')' Punctuation
+']' Operator
+'\n' Text
+
+'fun' Keyword
+' ' Text
+'foo' Name.Function
+'=' Operator
+'"' Literal.String.Double
+'some.foo' Literal.String.Double
+'"' Literal.String.Double
+'(' Punctuation
+'thing' Name
+' ' Text
+':' Punctuation
+' ' Text
+'Void' Name
+'*' Operator
+')' Punctuation
+' ' Text
+':' Punctuation
+' ' Text
+'LibC' Name
+'::' Operator
+'Int' Name
+'\n' Text
+
+'end' Keyword
+'\n' Text
diff --git a/tests/lexers/crystal/test_macro.txt b/tests/lexers/crystal/test_macro.txt
new file mode 100644
index 00000000..a66c2883
--- /dev/null
+++ b/tests/lexers/crystal/test_macro.txt
@@ -0,0 +1,76 @@
+---input---
+def<=>(other : self) : Int
+{%for field in %w(first_name middle_name last_name)%}
+cmp={{field.id}}<=>other.{{field.id}}
+return cmp if cmp!=0
+{%end%}
+0
+end
+
+---tokens---
+'def' Keyword
+'<=>' Name.Function
+'(' Punctuation
+'other' Name
+' ' Text
+':' Punctuation
+' ' Text
+'self' Keyword
+')' Punctuation
+' ' Text
+':' Punctuation
+' ' Text
+'Int' Name
+'\n' Text
+
+'{%' Literal.String.Interpol
+'for' Keyword
+' ' Text
+'field' Name
+' ' Text
+'in' Keyword
+' ' Text
+'%w(' Literal.String.Other
+'first_name middle_name last_name' Literal.String.Other
+')' Literal.String.Other
+'%}' Literal.String.Interpol
+'\n' Text
+
+'cmp' Name
+'=' Operator
+'{{' Literal.String.Interpol
+'field' Name
+'.' Operator
+'id' Name
+'}}' Literal.String.Interpol
+'<=>' Operator
+'other' Name
+'.' Operator
+'{{' Literal.String.Interpol
+'field' Name
+'.' Operator
+'id' Name
+'}}' Literal.String.Interpol
+'\n' Text
+
+'return' Keyword
+' ' Text
+'cmp' Name
+' ' Text
+'if' Keyword
+' ' Text
+'cmp' Name
+'!=' Operator
+'0' Literal.Number.Integer
+'\n' Text
+
+'{%' Literal.String.Interpol
+'end' Keyword
+'%}' Literal.String.Interpol
+'\n' Text
+
+'0' Literal.Number.Integer
+'\n' Text
+
+'end' Keyword
+'\n' Text
diff --git a/tests/lexers/crystal/test_operator_methods.txt b/tests/lexers/crystal/test_operator_methods.txt
new file mode 100644
index 00000000..e4942ef0
--- /dev/null
+++ b/tests/lexers/crystal/test_operator_methods.txt
@@ -0,0 +1,18 @@
+---input---
+([] of Int32).[]?(5)
+
+---tokens---
+'(' Punctuation
+'[' Operator
+']' Operator
+' ' Text
+'of' Keyword
+' ' Text
+'Int32' Name
+')' Punctuation
+'.' Operator
+'[]?' Name.Operator
+'(' Punctuation
+'5' Literal.Number.Integer
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/crystal/test_percent_strings.txt b/tests/lexers/crystal/test_percent_strings.txt
new file mode 100644
index 00000000..dd53ec31
--- /dev/null
+++ b/tests/lexers/crystal/test_percent_strings.txt
@@ -0,0 +1,41 @@
+---input---
+%(hello ("world"))
+%[hello ["world"]]
+%{hello "world"}
+%<hello <"world">>
+%|hello "world"|
+
+---tokens---
+'%(' Literal.String.Other
+'hello ' Literal.String.Other
+'(' Literal.String.Other
+'"world"' Literal.String.Other
+')' Literal.String.Other
+')' Literal.String.Other
+'\n' Text
+
+'%[' Literal.String.Other
+'hello ' Literal.String.Other
+'[' Literal.String.Other
+'"world"' Literal.String.Other
+']' Literal.String.Other
+']' Literal.String.Other
+'\n' Text
+
+'%{' Literal.String.Other
+'hello "world"' Literal.String.Other
+'}' Literal.String.Other
+'\n' Text
+
+'%<' Literal.String.Other
+'hello ' Literal.String.Other
+'<' Literal.String.Other
+'"world"' Literal.String.Other
+'>' Literal.String.Other
+'>' Literal.String.Other
+'\n' Text
+
+'%|' Literal.String.Other
+'hello "world"' Literal.String.Other
+'|' Literal.String.Other
+'\n' Text
diff --git a/tests/lexers/crystal/test_percent_strings_special.txt b/tests/lexers/crystal/test_percent_strings_special.txt
new file mode 100644
index 00000000..96999af6
--- /dev/null
+++ b/tests/lexers/crystal/test_percent_strings_special.txt
@@ -0,0 +1,31 @@
+---input---
+%Q(hello \n #{name})
+%q(hello \n #{name})
+%w(foo\nbar baz)
+
+---tokens---
+'%Q(' Literal.String.Other
+'hello ' Literal.String.Other
+'\\n' Literal.String.Escape
+' ' Literal.String.Other
+'#{' Literal.String.Interpol
+'name' Name
+'}' Literal.String.Interpol
+')' Literal.String.Other
+'\n' Text
+
+'%q(' Literal.String.Other
+'hello ' Literal.String.Other
+'\\' Literal.String.Other
+'n ' Literal.String.Other
+'#' Literal.String.Other
+'{name}' Literal.String.Other
+')' Literal.String.Other
+'\n' Text
+
+'%w(' Literal.String.Other
+'foo' Literal.String.Other
+'\\' Literal.String.Other
+'nbar baz' Literal.String.Other
+')' Literal.String.Other
+'\n' Text
diff --git a/tests/lexers/crystal/test_pseudo_builtins.txt b/tests/lexers/crystal/test_pseudo_builtins.txt
new file mode 100644
index 00000000..3b1b471f
--- /dev/null
+++ b/tests/lexers/crystal/test_pseudo_builtins.txt
@@ -0,0 +1,20 @@
+---input---
+record Cls do
+def_equals s
+end
+
+---tokens---
+'record' Name.Builtin.Pseudo
+' ' Text
+'Cls' Name
+' ' Text
+'do' Keyword
+'\n' Text
+
+'def_equals' Name.Builtin.Pseudo
+' ' Text
+'s' Name
+'\n' Text
+
+'end' Keyword
+'\n' Text
diff --git a/tests/lexers/crystal/test_pseudo_keywords.txt b/tests/lexers/crystal/test_pseudo_keywords.txt
new file mode 100644
index 00000000..497200d3
--- /dev/null
+++ b/tests/lexers/crystal/test_pseudo_keywords.txt
@@ -0,0 +1,50 @@
+---input---
+def f(x : T, line = __LINE__) forall T
+if x.is_a?(String)
+pp! x
+end
+end
+
+---tokens---
+'def' Keyword
+' ' Text
+'f' Name.Function
+'(' Punctuation
+'x' Name
+' ' Text
+':' Punctuation
+' ' Text
+'T' Name
+',' Punctuation
+' ' Text
+'line' Name
+' ' Text
+'=' Operator
+' ' Text
+'__LINE__' Keyword.Pseudo
+')' Punctuation
+' ' Text
+'forall' Keyword.Pseudo
+' ' Text
+'T' Name
+'\n' Text
+
+'if' Keyword
+' ' Text
+'x' Name
+'.is_a?' Keyword.Pseudo
+'(' Punctuation
+'String' Name
+')' Punctuation
+'\n' Text
+
+'pp!' Name.Builtin.Pseudo
+' ' Text
+'x' Name
+'\n' Text
+
+'end' Keyword
+'\n' Text
+
+'end' Keyword
+'\n' Text
diff --git a/tests/lexers/crystal/test_range_syntax1.txt b/tests/lexers/crystal/test_range_syntax1.txt
new file mode 100644
index 00000000..72d47e85
--- /dev/null
+++ b/tests/lexers/crystal/test_range_syntax1.txt
@@ -0,0 +1,8 @@
+---input---
+1...3
+
+---tokens---
+'1' Literal.Number.Integer
+'...' Operator
+'3' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/crystal/test_range_syntax2.txt b/tests/lexers/crystal/test_range_syntax2.txt
new file mode 100644
index 00000000..a72f8380
--- /dev/null
+++ b/tests/lexers/crystal/test_range_syntax2.txt
@@ -0,0 +1,10 @@
+---input---
+1 .. 3
+
+---tokens---
+'1' Literal.Number.Integer
+' ' Text
+'..' Operator
+' ' Text
+'3' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/csound/test_braced_strings.txt b/tests/lexers/csound/test_braced_strings.txt
new file mode 100644
index 00000000..4ffa520d
--- /dev/null
+++ b/tests/lexers/csound/test_braced_strings.txt
@@ -0,0 +1,11 @@
+---input---
+{{
+characters$MACRO.
+}}
+
+---tokens---
+'{{' Literal.String
+'\ncharacters$MACRO.\n' Literal.String
+
+'}}' Literal.String
+'\n' Text
diff --git a/tests/lexers/csound/test_comments.txt b/tests/lexers/csound/test_comments.txt
new file mode 100644
index 00000000..3cb98c32
--- /dev/null
+++ b/tests/lexers/csound/test_comments.txt
@@ -0,0 +1,16 @@
+---input---
+/*
+ * comment
+ */
+; comment
+// comment
+
+---tokens---
+'/*\n * comment\n */' Comment.Multiline
+'\n' Text
+
+'; comment' Comment.Single
+'\n' Text
+
+'// comment' Comment.Single
+'\n' Text
diff --git a/tests/lexers/csound/test_escape_sequences.txt b/tests/lexers/csound/test_escape_sequences.txt
new file mode 100644
index 00000000..3a093275
--- /dev/null
+++ b/tests/lexers/csound/test_escape_sequences.txt
@@ -0,0 +1,122 @@
+---input---
+"\\"
+{{\\}}
+"\a"
+{{\a}}
+"\b"
+{{\b}}
+"\n"
+{{\n}}
+"\r"
+{{\r}}
+"\t"
+{{\t}}
+"\""
+{{\"}}
+"\012"
+{{\012}}
+"\345"
+{{\345}}
+"\67"
+{{\67}}
+
+---tokens---
+'"' Literal.String
+'\\\\' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\\\' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\a' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\a' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\b' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\b' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\n' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\n' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\r' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\r' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\t' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\t' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\"' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\"' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\012' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\012' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\345' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\345' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
+
+'"' Literal.String
+'\\67' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'{{' Literal.String
+'\\67' Literal.String.Escape
+'}}' Literal.String
+'\n' Text
diff --git a/tests/lexers/csound/test_function_like_macro_definitions.txt b/tests/lexers/csound/test_function_like_macro_definitions.txt
new file mode 100644
index 00000000..4ba93887
--- /dev/null
+++ b/tests/lexers/csound/test_function_like_macro_definitions.txt
@@ -0,0 +1,44 @@
+---input---
+#define MACRO(ARG1#ARG2) #macro_body#
+#define/**/
+MACRO(ARG1'ARG2' ARG3)/**/
+#\#macro
+body\##
+
+---tokens---
+'#define' Comment.Preproc
+' ' Text
+'MACRO' Comment.Preproc
+'(' Punctuation
+'ARG1' Comment.Preproc
+'#' Punctuation
+'ARG2' Comment.Preproc
+')' Punctuation
+' ' Text
+'#' Punctuation
+'macro_body' Comment.Preproc
+'#' Punctuation
+'\n' Text
+
+'#define' Comment.Preproc
+'/**/' Comment.Multiline
+'\n' Text
+
+'MACRO' Comment.Preproc
+'(' Punctuation
+'ARG1' Comment.Preproc
+"'" Punctuation
+'ARG2' Comment.Preproc
+"'" Punctuation
+' ' Text
+'ARG3' Comment.Preproc
+')' Punctuation
+'/**/' Comment.Multiline
+'\n' Text
+
+'#' Punctuation
+'\\#' Comment.Preproc
+'macro\nbody' Comment.Preproc
+'\\#' Comment.Preproc
+'#' Punctuation
+'\n' Text
diff --git a/tests/lexers/csound/test_function_like_macros.txt b/tests/lexers/csound/test_function_like_macros.txt
new file mode 100644
index 00000000..776e705d
--- /dev/null
+++ b/tests/lexers/csound/test_function_like_macros.txt
@@ -0,0 +1,40 @@
+---input---
+$MACRO.(((x#y\)))' "(#'x)\)x\))"# {{x\))x)\)(#'}});
+
+---tokens---
+'$MACRO.' Comment.Preproc
+'(' Punctuation
+'(' Comment.Preproc
+'(' Comment.Preproc
+'x#y\\)' Comment.Preproc
+')' Comment.Preproc
+')' Comment.Preproc
+"'" Punctuation
+' ' Comment.Preproc
+'"' Literal.String
+'(' Error
+'#' Error
+"'" Error
+'x' Literal.String
+')' Error
+'\\)' Comment.Preproc
+'x' Literal.String
+'\\)' Comment.Preproc
+')' Error
+'"' Literal.String
+'#' Punctuation
+' ' Comment.Preproc
+'{{' Literal.String
+'x' Literal.String
+'\\)' Comment.Preproc
+')' Error
+'x' Literal.String
+')' Error
+'\\)' Comment.Preproc
+'(' Error
+'#' Error
+"'" Error
+'}}' Literal.String
+')' Punctuation
+';' Comment.Single
+'\n' Text
diff --git a/tests/lexers/csound/test_global_value_identifiers.txt b/tests/lexers/csound/test_global_value_identifiers.txt
new file mode 100644
index 00000000..7dfa7607
--- /dev/null
+++ b/tests/lexers/csound/test_global_value_identifiers.txt
@@ -0,0 +1,30 @@
+---input---
+0dbfs
+A4
+kr
+ksmps
+nchnls
+nchnls_i
+sr
+
+---tokens---
+'0dbfs' Name.Variable.Global
+'\n' Text
+
+'A4' Name.Variable.Global
+'\n' Text
+
+'kr' Name.Variable.Global
+'\n' Text
+
+'ksmps' Name.Variable.Global
+'\n' Text
+
+'nchnls' Name.Variable.Global
+'\n' Text
+
+'nchnls_i' Name.Variable.Global
+'\n' Text
+
+'sr' Name.Variable.Global
+'\n' Text
diff --git a/tests/lexers/csound/test_goto_statements.txt b/tests/lexers/csound/test_goto_statements.txt
new file mode 100644
index 00000000..22253289
--- /dev/null
+++ b/tests/lexers/csound/test_goto_statements.txt
@@ -0,0 +1,176 @@
+---input---
+goto aLabel
+igoto aLabel
+kgoto aLabel
+reinit aLabel
+rigoto aLabel
+tigoto aLabel
+cggoto 1==0, aLabel
+cigoto 1==0, aLabel
+cingoto 1==0, aLabel
+ckgoto 1==0, aLabel
+cngoto 1==0, aLabel
+cnkgoto 1==0, aLabel
+timout 0, 0, aLabel
+loop_ge 0, 0, 0, aLabel
+loop_gt 0, 0, 0, aLabel
+loop_le 0, 0, 0, aLabel
+loop_lt 0, 0, 0, aLabel
+
+---tokens---
+'goto' Keyword
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'igoto' Keyword
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'kgoto' Keyword
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'reinit' Keyword.Pseudo
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'rigoto' Keyword.Pseudo
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'tigoto' Keyword.Pseudo
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'cggoto' Keyword.Pseudo
+' ' Text
+'1' Literal.Number.Integer
+'==' Operator
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'cigoto' Keyword.Pseudo
+' ' Text
+'1' Literal.Number.Integer
+'==' Operator
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'cingoto' Keyword.Pseudo
+' ' Text
+'1' Literal.Number.Integer
+'==' Operator
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'ckgoto' Keyword.Pseudo
+' ' Text
+'1' Literal.Number.Integer
+'==' Operator
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'cngoto' Keyword.Pseudo
+' ' Text
+'1' Literal.Number.Integer
+'==' Operator
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'cnkgoto' Keyword.Pseudo
+' ' Text
+'1' Literal.Number.Integer
+'==' Operator
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'timout' Keyword.Pseudo
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'loop_ge' Keyword.Pseudo
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'loop_gt' Keyword.Pseudo
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'loop_le' Keyword.Pseudo
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
+
+'loop_lt' Keyword.Pseudo
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'aLabel' Name.Label
+'\n' Text
diff --git a/tests/lexers/csound/test_include_directives.txt b/tests/lexers/csound/test_include_directives.txt
new file mode 100644
index 00000000..ea1c368d
--- /dev/null
+++ b/tests/lexers/csound/test_include_directives.txt
@@ -0,0 +1,14 @@
+---input---
+#include/**/"file.udo"
+#include/**/|file.udo|
+
+---tokens---
+'#include' Comment.Preproc
+'/**/' Comment.Multiline
+'"file.udo"' Literal.String
+'\n' Text
+
+'#include' Comment.Preproc
+'/**/' Comment.Multiline
+'|file.udo|' Literal.String
+'\n' Text
diff --git a/tests/lexers/csound/test_includestr_directives.txt b/tests/lexers/csound/test_includestr_directives.txt
new file mode 100644
index 00000000..bdd9cda5
--- /dev/null
+++ b/tests/lexers/csound/test_includestr_directives.txt
@@ -0,0 +1,11 @@
+---input---
+#includestr/**/"$MACRO..udo"
+
+---tokens---
+'#includestr' Comment.Preproc
+'/**/' Comment.Multiline
+'"' Literal.String
+'$MACRO.' Comment.Preproc
+'.udo' Literal.String
+'"' Literal.String
+'\n' Text
diff --git a/tests/lexers/csound/test_instrument_blocks.txt b/tests/lexers/csound/test_instrument_blocks.txt
new file mode 100644
index 00000000..1fa6941c
--- /dev/null
+++ b/tests/lexers/csound/test_instrument_blocks.txt
@@ -0,0 +1,42 @@
+---input---
+instr/**/1,/**/N_a_M_e_,/**/+Name/**///
+ iDuration = p3
+ outc:a(aSignal)
+endin
+
+---tokens---
+'instr' Keyword.Declaration
+'/**/' Comment.Multiline
+'1' Name.Function
+',' Punctuation
+'/**/' Comment.Multiline
+'N_a_M_e_' Name.Function
+',' Punctuation
+'/**/' Comment.Multiline
+'+' Punctuation
+'Name' Name.Function
+'/**/' Comment.Multiline
+'//' Comment.Single
+'\n' Text
+
+' ' Text
+'i' Keyword.Type
+'Duration' Name
+' ' Text
+'=' Operator
+' ' Text
+'p3' Name.Variable.Instance
+'\n' Text
+
+' ' Text
+'outc' Name.Builtin
+':' Punctuation
+'a' Keyword.Type
+'(' Punctuation
+'a' Keyword.Type
+'Signal' Name
+')' Punctuation
+'\n' Text
+
+'endin' Keyword.Declaration
+'\n' Text
diff --git a/tests/lexers/csound/test_keywords.txt b/tests/lexers/csound/test_keywords.txt
new file mode 100644
index 00000000..ccd8158c
--- /dev/null
+++ b/tests/lexers/csound/test_keywords.txt
@@ -0,0 +1,62 @@
+---input---
+do
+else
+elseif
+endif
+enduntil
+fi
+if
+ithen
+kthen
+od
+then
+until
+while
+return
+rireturn
+
+---tokens---
+'do' Keyword
+'\n' Text
+
+'else' Keyword
+'\n' Text
+
+'elseif' Keyword
+'\n' Text
+
+'endif' Keyword
+'\n' Text
+
+'enduntil' Keyword
+'\n' Text
+
+'fi' Keyword
+'\n' Text
+
+'if' Keyword
+'\n' Text
+
+'ithen' Keyword
+'\n' Text
+
+'kthen' Keyword
+'\n' Text
+
+'od' Keyword
+'\n' Text
+
+'then' Keyword
+'\n' Text
+
+'until' Keyword
+'\n' Text
+
+'while' Keyword
+'\n' Text
+
+'return' Keyword.Pseudo
+'\n' Text
+
+'rireturn' Keyword.Pseudo
+'\n' Text
diff --git a/tests/lexers/csound/test_labels.txt b/tests/lexers/csound/test_labels.txt
new file mode 100644
index 00000000..2f5de7f6
--- /dev/null
+++ b/tests/lexers/csound/test_labels.txt
@@ -0,0 +1,13 @@
+---input---
+aLabel:
+ label2:
+
+---tokens---
+'aLabel' Name.Label
+':' Punctuation
+'\n' Text
+
+' ' Text
+'label2' Name.Label
+':' Punctuation
+'\n' Text
diff --git a/tests/lexers/csound/test_macro_preprocessor_directives.txt b/tests/lexers/csound/test_macro_preprocessor_directives.txt
new file mode 100644
index 00000000..310cce25
--- /dev/null
+++ b/tests/lexers/csound/test_macro_preprocessor_directives.txt
@@ -0,0 +1,20 @@
+---input---
+#ifdef MACRO
+#ifndef MACRO
+#undef MACRO
+
+---tokens---
+'#ifdef' Comment.Preproc
+' ' Text
+'MACRO' Comment.Preproc
+'\n' Text
+
+'#ifndef' Comment.Preproc
+' ' Text
+'MACRO' Comment.Preproc
+'\n' Text
+
+'#undef' Comment.Preproc
+' ' Text
+'MACRO' Comment.Preproc
+'\n' Text
diff --git a/tests/lexers/csound/test_name.txt b/tests/lexers/csound/test_name.txt
new file mode 100644
index 00000000..2bb3f51a
--- /dev/null
+++ b/tests/lexers/csound/test_name.txt
@@ -0,0 +1,9 @@
+---input---
+kG:V
+
+---tokens---
+'k' Keyword.Type
+'G' Name
+':' Punctuation
+'V' Name
+'\n' Text
diff --git a/tests/lexers/csound/test_numbers.txt b/tests/lexers/csound/test_numbers.txt
new file mode 100644
index 00000000..87dc46fc
--- /dev/null
+++ b/tests/lexers/csound/test_numbers.txt
@@ -0,0 +1,52 @@
+---input---
+123 0123456789
+0xabcdef0123456789 0XABCDEF
+1e2
+3e+4
+5e-6
+7E8
+9E+0
+1E-2
+3.
+4.56
+.789
+
+---tokens---
+'123' Literal.Number.Integer
+' ' Text
+'0123456789' Literal.Number.Integer
+'\n' Text
+
+'0x' Keyword.Type
+'abcdef0123456789' Literal.Number.Hex
+' ' Text
+'0X' Keyword.Type
+'ABCDEF' Literal.Number.Hex
+'\n' Text
+
+'1e2' Literal.Number.Float
+'\n' Text
+
+'3e+4' Literal.Number.Float
+'\n' Text
+
+'5e-6' Literal.Number.Float
+'\n' Text
+
+'7E8' Literal.Number.Float
+'\n' Text
+
+'9E+0' Literal.Number.Float
+'\n' Text
+
+'1E-2' Literal.Number.Float
+'\n' Text
+
+'3.' Literal.Number.Float
+'\n' Text
+
+'4.56' Literal.Number.Float
+'\n' Text
+
+'.789' Literal.Number.Float
+'\n' Text
diff --git a/tests/lexers/csound/test_object_like_macro_definitions.txt b/tests/lexers/csound/test_object_like_macro_definitions.txt
new file mode 100644
index 00000000..fb2b7ab3
--- /dev/null
+++ b/tests/lexers/csound/test_object_like_macro_definitions.txt
@@ -0,0 +1,30 @@
+---input---
+# define MACRO#macro_body#
+#define/**/
+MACRO/**/
+#\#macro
+body\##
+
+---tokens---
+'# \tdefine' Comment.Preproc
+' ' Text
+'MACRO' Comment.Preproc
+'#' Punctuation
+'macro_body' Comment.Preproc
+'#' Punctuation
+'\n' Text
+
+'#define' Comment.Preproc
+'/**/' Comment.Multiline
+'\n' Text
+
+'MACRO' Comment.Preproc
+'/**/' Comment.Multiline
+'\n' Text
+
+'#' Punctuation
+'\\#' Comment.Preproc
+'macro\nbody' Comment.Preproc
+'\\#' Comment.Preproc
+'#' Punctuation
+'\n' Text
diff --git a/tests/lexers/csound/test_operators.txt b/tests/lexers/csound/test_operators.txt
new file mode 100644
index 00000000..107aa669
--- /dev/null
+++ b/tests/lexers/csound/test_operators.txt
@@ -0,0 +1,114 @@
+---input---
++
+-
+~
+!
+*
+/
+^
+%
+<<
+>>
+<
+>
+<=
+>=
+==
+!=
+&
+#
+|
+&&
+||
+?
+:
++=
+-=
+*=
+/=
+
+---tokens---
+'+' Operator
+'\n' Text
+
+'-' Operator
+'\n' Text
+
+'~' Operator
+'\n' Text
+
+'¬' Operator
+'\n' Text
+
+'!' Operator
+'\n' Text
+
+'*' Operator
+'\n' Text
+
+'/' Operator
+'\n' Text
+
+'^' Operator
+'\n' Text
+
+'%' Operator
+'\n' Text
+
+'<<' Operator
+'\n' Text
+
+'>>' Operator
+'\n' Text
+
+'<' Operator
+'\n' Text
+
+'>' Operator
+'\n' Text
+
+'<=' Operator
+'\n' Text
+
+'>=' Operator
+'\n' Text
+
+'==' Operator
+'\n' Text
+
+'!=' Operator
+'\n' Text
+
+'&' Operator
+'\n' Text
+
+'#' Operator
+'\n' Text
+
+'|' Operator
+'\n' Text
+
+'&&' Operator
+'\n' Text
+
+'||' Operator
+'\n' Text
+
+'?' Operator
+'\n' Text
+
+':' Operator
+'\n' Text
+
+'+=' Operator
+'\n' Text
+
+'-=' Operator
+'\n' Text
+
+'*=' Operator
+'\n' Text
+
+'/=' Operator
+'\n' Text
diff --git a/tests/lexers/csound/test_other_preprocessor_directives.txt b/tests/lexers/csound/test_other_preprocessor_directives.txt
new file mode 100644
index 00000000..ce3a4a0e
--- /dev/null
+++ b/tests/lexers/csound/test_other_preprocessor_directives.txt
@@ -0,0 +1,26 @@
+---input---
+#else
+#end
+#endif
+###
+@ 12345
+@@ 67890
+
+---tokens---
+'#else' Comment.Preproc
+'\n' Text
+
+'#end' Comment.Preproc
+'\n' Text
+
+'#endif' Comment.Preproc
+'\n' Text
+
+'###' Comment.Preproc
+'\n' Text
+
+'@ \t12345' Comment.Preproc
+'\n' Text
+
+'@@ \t67890' Comment.Preproc
+'\n' Text
diff --git a/tests/lexers/csound/test_printks_and_prints_escape_sequences.txt b/tests/lexers/csound/test_printks_and_prints_escape_sequences.txt
new file mode 100644
index 00000000..230598f6
--- /dev/null
+++ b/tests/lexers/csound/test_printks_and_prints_escape_sequences.txt
@@ -0,0 +1,290 @@
+---input---
+printks "%!"
+printks "%%"
+printks "%n"
+printks "%N"
+printks "%r"
+printks "%R"
+printks "%t"
+printks "%T"
+printks "\\a"
+printks "\\A"
+printks "\\b"
+printks "\\B"
+printks "\\n"
+printks "\\N"
+printks "\\r"
+printks "\\R"
+printks "\\t"
+printks "\\T"
+prints "%!"
+prints "%%"
+prints "%n"
+prints "%N"
+prints "%r"
+prints "%R"
+prints "%t"
+prints "%T"
+prints "\\a"
+prints "\\A"
+prints "\\b"
+prints "\\B"
+prints "\\n"
+prints "\\N"
+prints "\\r"
+prints "\\R"
+prints "\\t"
+prints "\\T"
+
+---tokens---
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%!' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%%' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%n' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%N' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%r' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%R' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%t' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'%T' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\a' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\A' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\b' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\B' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\n' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\N' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\r' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\R' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\t' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'printks' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\T' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%!' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%%' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%n' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%N' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%r' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%R' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%t' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'%T' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\a' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\A' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\b' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\B' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\n' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\N' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\r' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\R' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\t' Literal.String.Escape
+'"' Literal.String
+'\n' Text
+
+'prints' Name.Builtin
+' ' Text
+'"' Literal.String
+'\\\\T' Literal.String.Escape
+'"' Literal.String
+'\n' Text
diff --git a/tests/lexers/csound/test_quoted_strings.txt b/tests/lexers/csound/test_quoted_strings.txt
new file mode 100644
index 00000000..46aec4f3
--- /dev/null
+++ b/tests/lexers/csound/test_quoted_strings.txt
@@ -0,0 +1,9 @@
+---input---
+"characters$MACRO."
+
+---tokens---
+'"' Literal.String
+'characters' Literal.String
+'$MACRO.' Comment.Preproc
+'"' Literal.String
+'\n' Text
diff --git a/tests/lexers/csound/test_user_defined_opcodes.txt b/tests/lexers/csound/test_user_defined_opcodes.txt
new file mode 100644
index 00000000..0bf747f8
--- /dev/null
+++ b/tests/lexers/csound/test_user_defined_opcodes.txt
@@ -0,0 +1,24 @@
+---input---
+opcode/**/aUDO,/**/i[],/**/aik//
+ aUDO
+endop
+
+---tokens---
+'opcode' Keyword.Declaration
+'/**/' Comment.Multiline
+'aUDO' Name.Function
+',' Punctuation
+'/**/' Comment.Multiline
+'i[]' Keyword.Type
+',' Punctuation
+'/**/' Comment.Multiline
+'aik' Keyword.Type
+'//' Comment.Single
+'\n' Text
+
+' ' Text
+'aUDO' Name.Function
+'\n' Text
+
+'endop' Keyword.Declaration
+'\n' Text
diff --git a/tests/lexers/doscon/test_gt_only.txt b/tests/lexers/doscon/test_gt_only.txt
new file mode 100644
index 00000000..b37b8fae
--- /dev/null
+++ b/tests/lexers/doscon/test_gt_only.txt
@@ -0,0 +1,11 @@
+---input---
+> py
+hi
+
+---tokens---
+'>' Generic.Prompt
+' ' Text
+'py' Text
+'\n' Text
+
+'hi\n' Generic.Output
diff --git a/tests/lexers/ezhil/test_function.txt b/tests/lexers/ezhil/test_function.txt
new file mode 100644
index 00000000..6f88394e
--- /dev/null
+++ b/tests/lexers/ezhil/test_function.txt
@@ -0,0 +1,99 @@
+---input---
+# (C) முத்தையா அண்ணாமலை 2013, 2015
+நிரல்பாகம் gcd ( x, y )
+மு = max(x,y)
+ q = min(x,y)
+
+@( q == 0 ) ஆனால்
+ பின்கொடு மு
+முடி
+பின்கொடு gcd( மு - q , q )
+முடி
+
+---tokens---
+'# (C) முத்தையா அண்ணாமலை 2013, 2015\n' Comment.Single
+
+'நிரல்பாகம்' Keyword
+' ' Text
+'gcd' Name
+' ' Text
+'(' Punctuation
+' ' Text
+'x' Name
+',' Operator
+' ' Text
+'y' Name
+' ' Text
+')' Punctuation
+'\n' Text
+
+'மு' Name
+' ' Text
+'=' Operator
+' ' Text
+'max' Name.Builtin
+'(' Punctuation
+'x' Name
+',' Operator
+'y' Name
+')' Punctuation
+'\n' Text
+
+' ' Text
+'q' Name
+' ' Text
+'=' Operator
+' ' Text
+'min' Name.Builtin
+'(' Punctuation
+'x' Name
+',' Operator
+'y' Name
+')' Punctuation
+'\n' Text
+
+'\n' Text
+
+'@' Operator
+'(' Punctuation
+' ' Text
+'q' Name
+' ' Text
+'==' Operator
+' ' Text
+'0' Literal.Number.Integer
+' ' Text
+')' Punctuation
+' ' Text
+'ஆனால்' Keyword
+'\n' Text
+
+' ' Text
+'பின்கொடு' Keyword
+' ' Text
+'மு' Name
+'\n' Text
+
+'முடி' Keyword
+'\n' Text
+
+'பின்கொடு' Keyword
+' ' Text
+'gcd' Name
+'(' Punctuation
+' ' Text
+'மு' Name
+' ' Text
+'-' Operator
+' ' Text
+'q' Name
+' ' Text
+',' Operator
+' ' Text
+'q' Name
+' ' Text
+')' Punctuation
+'\n' Text
+
+'முடி' Keyword
+'\n' Text
diff --git a/tests/lexers/ezhil/test_gcd_expr.txt b/tests/lexers/ezhil/test_gcd_expr.txt
new file mode 100644
index 00000000..0e5cd291
--- /dev/null
+++ b/tests/lexers/ezhil/test_gcd_expr.txt
@@ -0,0 +1,21 @@
+---input---
+1^3+(5-5)*gcd(a,b)
+
+---tokens---
+'1' Literal.Number.Integer
+'^' Operator
+'3' Literal.Number.Integer
+'+' Operator
+'(' Punctuation
+'5' Literal.Number.Integer
+'-' Operator
+'5' Literal.Number.Integer
+')' Punctuation
+'*' Operator
+'gcd' Name
+'(' Punctuation
+'a' Name
+',' Operator
+'b' Name
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/ezhil/test_if_statement.txt b/tests/lexers/ezhil/test_if_statement.txt
new file mode 100644
index 00000000..b4f3b5be
--- /dev/null
+++ b/tests/lexers/ezhil/test_if_statement.txt
@@ -0,0 +1,28 @@
+---input---
+@( 0 > 3 ) ஆனால்
+ பதிப்பி "wont print"
+முடி
+
+---tokens---
+'@' Operator
+'(' Punctuation
+' ' Text
+'0' Literal.Number.Integer
+' ' Text
+'>' Operator
+' ' Text
+'3' Literal.Number.Integer
+' ' Text
+')' Punctuation
+' ' Text
+'ஆனால்' Keyword
+'\n' Text
+
+' ' Text
+'பதிப்பி' Keyword
+' ' Text
+'"wont print"' Literal.String
+'\n' Text
+
+'முடி' Keyword
+'\n' Text
diff --git a/tests/lexers/ezhil/test_sum.txt b/tests/lexers/ezhil/test_sum.txt
new file mode 100644
index 00000000..2ce200d4
--- /dev/null
+++ b/tests/lexers/ezhil/test_sum.txt
@@ -0,0 +1,8 @@
+---input---
+1+3
+
+---tokens---
+'1' Literal.Number.Integer
+'+' Operator
+'3' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/gas/test_comments.txt b/tests/lexers/gas/test_comments.txt
new file mode 100644
index 00000000..723e66a6
--- /dev/null
+++ b/tests/lexers/gas/test_comments.txt
@@ -0,0 +1,29 @@
+---input---
+lock addq $0, /* comments */ (%rsp) /*
+// comments
+*/ xorq %rax, %rax // comments
+
+---tokens---
+'lock' Name.Attribute
+' ' Text
+'addq' Name.Function
+' ' Text
+'$0' Name.Constant
+',' Punctuation
+' ' Text
+'/* comments */' Comment.Multiline
+' ' Text
+'(' Punctuation
+'%rsp' Name.Variable
+')' Punctuation
+' ' Text
+'/*\n// comments\n*/' Comment.Multiline
+' ' Text
+'xorq' Name.Function
+' ' Text
+'%rax' Name.Variable
+',' Punctuation
+' ' Text
+'%rax' Name.Variable
+' ' Text
+'// comments\n' Comment.Single
diff --git a/tests/lexers/gdscript/test_comment.txt b/tests/lexers/gdscript/test_comment.txt
new file mode 100644
index 00000000..1b85dd5a
--- /dev/null
+++ b/tests/lexers/gdscript/test_comment.txt
@@ -0,0 +1,6 @@
+---input---
+# Comment
+
+---tokens---
+'# Comment' Comment.Single
+'\n' Text
diff --git a/tests/lexers/gdscript/test_export_array.txt b/tests/lexers/gdscript/test_export_array.txt
new file mode 100644
index 00000000..6b6410e6
--- /dev/null
+++ b/tests/lexers/gdscript/test_export_array.txt
@@ -0,0 +1,17 @@
+---input---
+export (Array, AudioStream) var streams
+
+---tokens---
+'export' Keyword
+' ' Text
+'(' Punctuation
+'Array' Name.Builtin.Type
+',' Punctuation
+' ' Text
+'AudioStream' Name
+')' Punctuation
+' ' Text
+'var' Keyword
+' ' Text
+'streams' Name
+'\n' Text
diff --git a/tests/lexers/gdscript/test_function_with_types.txt b/tests/lexers/gdscript/test_function_with_types.txt
new file mode 100644
index 00000000..05256949
--- /dev/null
+++ b/tests/lexers/gdscript/test_function_with_types.txt
@@ -0,0 +1,33 @@
+---input---
+func abc(arg: String) -> void:
+ print("Hello", arg)
+
+---tokens---
+'func' Keyword
+' ' Text
+'abc' Name
+'(' Punctuation
+'arg' Name
+':' Punctuation
+' ' Text
+'String' Name.Builtin.Type
+')' Punctuation
+' ' Text
+'-' Operator
+'>' Operator
+' ' Text
+'void' Name
+':' Punctuation
+'\n' Text
+
+'\t' Text
+'print' Name.Builtin
+'(' Punctuation
+'"' Literal.String.Double
+'Hello' Literal.String.Double
+'"' Literal.String.Double
+',' Punctuation
+' ' Text
+'arg' Name
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/gdscript/test_inner_class.txt b/tests/lexers/gdscript/test_inner_class.txt
new file mode 100644
index 00000000..4c69275c
--- /dev/null
+++ b/tests/lexers/gdscript/test_inner_class.txt
@@ -0,0 +1,20 @@
+---input---
+class InnerClass:
+ var a = 5
+
+---tokens---
+'class' Keyword
+' ' Text
+'InnerClass' Name
+':' Punctuation
+'\n' Text
+
+'\t' Text
+'var' Keyword
+' ' Text
+'a' Name
+' ' Text
+'=' Operator
+' ' Text
+'5' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/gdscript/test_multiline_string.txt b/tests/lexers/gdscript/test_multiline_string.txt
new file mode 100644
index 00000000..d886983d
--- /dev/null
+++ b/tests/lexers/gdscript/test_multiline_string.txt
@@ -0,0 +1,8 @@
+---input---
+"""
+Multiline
+"""
+
+---tokens---
+'"""\nMultiline\n"""' Literal.String.Doc
+'\n' Text
diff --git a/tests/lexers/gdscript/test_signal.txt b/tests/lexers/gdscript/test_signal.txt
new file mode 100644
index 00000000..ab861f70
--- /dev/null
+++ b/tests/lexers/gdscript/test_signal.txt
@@ -0,0 +1,15 @@
+---input---
+signal sig (arg1, arg2)
+
+---tokens---
+'signal' Keyword
+' ' Text
+'sig' Name
+' ' Text
+'(' Punctuation
+'arg1' Name
+',' Punctuation
+' ' Text
+'arg2' Name
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/gdscript/test_simple_function.txt b/tests/lexers/gdscript/test_simple_function.txt
new file mode 100644
index 00000000..50ab82f9
--- /dev/null
+++ b/tests/lexers/gdscript/test_simple_function.txt
@@ -0,0 +1,22 @@
+---input---
+func abc(arg):
+ print("Hello, World!")
+
+---tokens---
+'func' Keyword
+' ' Text
+'abc' Name
+'(' Punctuation
+'arg' Name
+')' Punctuation
+':' Punctuation
+'\n' Text
+
+'\t' Text
+'print' Name.Builtin
+'(' Punctuation
+'"' Literal.String.Double
+'Hello, World!' Literal.String.Double
+'"' Literal.String.Double
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/gdscript/test_variable_declaration_and_assigment.txt b/tests/lexers/gdscript/test_variable_declaration_and_assigment.txt
new file mode 100644
index 00000000..65b4b968
--- /dev/null
+++ b/tests/lexers/gdscript/test_variable_declaration_and_assigment.txt
@@ -0,0 +1,12 @@
+---input---
+var abc = 5.4
+
+---tokens---
+'var' Keyword
+' ' Text
+'abc' Name
+' ' Text
+'=' Operator
+' ' Text
+'5.4' Literal.Number.Float
+'\n' Text
diff --git a/tests/lexers/haskell/test_promoted_names.txt b/tests/lexers/haskell/test_promoted_names.txt
new file mode 100644
index 00000000..087cd264
--- /dev/null
+++ b/tests/lexers/haskell/test_promoted_names.txt
@@ -0,0 +1,10 @@
+---input---
+'x ': '[]
+
+---tokens---
+"'x" Name
+' ' Text
+"':" Keyword.Type
+' ' Text
+"'[]" Keyword.Type
+'\n' Text
diff --git a/tests/lexers/http/test_application_calendar_xml.txt b/tests/lexers/http/test_application_calendar_xml.txt
new file mode 100644
index 00000000..81fff587
--- /dev/null
+++ b/tests/lexers/http/test_application_calendar_xml.txt
@@ -0,0 +1,28 @@
+---input---
+GET / HTTP/1.0
+Content-Type: application/calendar+xml
+
+<foo>
+
+---tokens---
+'GET' Name.Function
+' ' Text
+'/' Name.Namespace
+' ' Text
+'HTTP' Keyword.Reserved
+'/' Operator
+'1.0' Literal.Number
+'\n' Text
+
+'Content-Type' Name.Attribute
+'' Text
+':' Operator
+' ' Text
+'application/calendar+xml' Literal
+'\n' Text
+
+'\n' Text
+
+'<foo' Name.Tag
+'>' Name.Tag
+'\n' Text
diff --git a/tests/lexers/http/test_application_xml.txt b/tests/lexers/http/test_application_xml.txt
new file mode 100644
index 00000000..6b3cffad
--- /dev/null
+++ b/tests/lexers/http/test_application_xml.txt
@@ -0,0 +1,28 @@
+---input---
+GET / HTTP/1.0
+Content-Type: application/xml
+
+<foo>
+
+---tokens---
+'GET' Name.Function
+' ' Text
+'/' Name.Namespace
+' ' Text
+'HTTP' Keyword.Reserved
+'/' Operator
+'1.0' Literal.Number
+'\n' Text
+
+'Content-Type' Name.Attribute
+'' Text
+':' Operator
+' ' Text
+'application/xml' Literal
+'\n' Text
+
+'\n' Text
+
+'<foo' Name.Tag
+'>' Name.Tag
+'\n' Text
diff --git a/tests/lexers/http/test_http_status_line.txt b/tests/lexers/http/test_http_status_line.txt
new file mode 100644
index 00000000..8f8449d1
--- /dev/null
+++ b/tests/lexers/http/test_http_status_line.txt
@@ -0,0 +1,12 @@
+---input---
+HTTP/1.1 200 OK
+
+---tokens---
+'HTTP' Keyword.Reserved
+'/' Operator
+'1.1' Literal.Number
+' ' Text
+'200' Literal.Number
+' ' Text
+'OK' Name.Exception
+'\n' Text
diff --git a/tests/lexers/http/test_http_status_line_without_reason_phrase.txt b/tests/lexers/http/test_http_status_line_without_reason_phrase.txt
new file mode 100644
index 00000000..91bfa0e5
--- /dev/null
+++ b/tests/lexers/http/test_http_status_line_without_reason_phrase.txt
@@ -0,0 +1,10 @@
+---input---
+HTTP/1.1 200
+
+---tokens---
+'HTTP' Keyword.Reserved
+'/' Operator
+'1.1' Literal.Number
+' ' Text
+'200' Literal.Number
+'\n' Text
diff --git a/tests/lexers/http/test_http_status_line_without_reason_phrase_rfc_7230.txt b/tests/lexers/http/test_http_status_line_without_reason_phrase_rfc_7230.txt
new file mode 100644
index 00000000..e0c9896c
--- /dev/null
+++ b/tests/lexers/http/test_http_status_line_without_reason_phrase_rfc_7230.txt
@@ -0,0 +1,11 @@
+---input---
+HTTP/1.1 200
+
+---tokens---
+'HTTP' Keyword.Reserved
+'/' Operator
+'1.1' Literal.Number
+' ' Text
+'200' Literal.Number
+' ' Text
+'\n' Text
diff --git a/tests/lexers/idris/test_compiler_directive.txt b/tests/lexers/idris/test_compiler_directive.txt
new file mode 100644
index 00000000..cc56e9a8
--- /dev/null
+++ b/tests/lexers/idris/test_compiler_directive.txt
@@ -0,0 +1,20 @@
+---input---
+%link C "object.o"
+%name Vect xs
+
+---tokens---
+'%link' Keyword.Reserved
+' ' Text
+'C' Keyword.Type
+' ' Text
+'"' Literal.String
+'object.o' Literal.String
+'"' Literal.String
+'\n' Text
+
+'%name' Keyword.Reserved
+' ' Text
+'Vect' Keyword.Type
+' ' Text
+'xs' Text
+'\n' Text
diff --git a/tests/lexers/idris/test_reserved_word.txt b/tests/lexers/idris/test_reserved_word.txt
new file mode 100644
index 00000000..53c94e0b
--- /dev/null
+++ b/tests/lexers/idris/test_reserved_word.txt
@@ -0,0 +1,29 @@
+---input---
+namespace Foobar
+ links : String
+ links = "abc"
+
+---tokens---
+'namespace' Keyword.Reserved
+' ' Text
+'Foobar' Keyword.Type
+'\n' Text
+
+' ' Text
+'links' Name.Function
+' ' Text
+':' Operator.Word
+' ' Text
+'String' Keyword.Type
+'\n' Text
+
+' ' Text
+' ' Text
+'links' Text
+' ' Text
+'=' Operator.Word
+' ' Text
+'"' Literal.String
+'abc' Literal.String
+'"' Literal.String
+'\n' Text
diff --git a/tests/lexers/ini/test_indented_entries_1.txt b/tests/lexers/ini/test_indented_entries_1.txt
new file mode 100644
index 00000000..b22b2fa1
--- /dev/null
+++ b/tests/lexers/ini/test_indented_entries_1.txt
@@ -0,0 +1,16 @@
+---input---
+[section]
+ key1=value1
+ key2=value2
+
+---tokens---
+'[section]' Keyword
+'\n ' Text
+'key1' Name.Attribute
+'=' Operator
+'value1' Literal.String
+'\n ' Text
+'key2' Name.Attribute
+'=' Operator
+'value2' Literal.String
+'\n' Text
diff --git a/tests/lexers/ini/test_indented_entries_2.txt b/tests/lexers/ini/test_indented_entries_2.txt
new file mode 100644
index 00000000..4175bab4
--- /dev/null
+++ b/tests/lexers/ini/test_indented_entries_2.txt
@@ -0,0 +1,20 @@
+---input---
+[section]
+ key1 = value1
+ key2 = value2
+
+---tokens---
+'[section]' Keyword
+'\n ' Text
+'key1' Name.Attribute
+' ' Text
+'=' Operator
+' ' Text
+'value1' Literal.String
+'\n ' Text
+'key2' Name.Attribute
+' ' Text
+'=' Operator
+' ' Text
+'value2' Literal.String
+'\n' Text
diff --git a/tests/lexers/ini/test_indented_entries_3.txt b/tests/lexers/ini/test_indented_entries_3.txt
new file mode 100644
index 00000000..cf5bdcbc
--- /dev/null
+++ b/tests/lexers/ini/test_indented_entries_3.txt
@@ -0,0 +1,20 @@
+---input---
+[section]
+ key 1 = value1
+ key 2 = value2
+
+---tokens---
+'[section]' Keyword
+'\n ' Text
+'key 1' Name.Attribute
+' ' Text
+'=' Operator
+' ' Text
+'value1' Literal.String
+'\n ' Text
+'key 2' Name.Attribute
+' ' Text
+'=' Operator
+' ' Text
+'value2' Literal.String
+'\n' Text
diff --git a/tests/lexers/java/test_enhanced_for.txt b/tests/lexers/java/test_enhanced_for.txt
new file mode 100644
index 00000000..1844c56d
--- /dev/null
+++ b/tests/lexers/java/test_enhanced_for.txt
@@ -0,0 +1,21 @@
+---input---
+label:
+for(String var2: var1) {}
+
+---tokens---
+'label:' Name.Label
+'\n' Text
+
+'for' Keyword
+'(' Punctuation
+'String' Name
+' ' Text
+'var2' Name
+':' Punctuation
+' ' Text
+'var1' Name
+')' Punctuation
+' ' Text
+'{' Punctuation
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/java/test_numeric_literals.txt b/tests/lexers/java/test_numeric_literals.txt
new file mode 100644
index 00000000..5295bd8d
--- /dev/null
+++ b/tests/lexers/java/test_numeric_literals.txt
@@ -0,0 +1,34 @@
+---input---
+0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D
+
+---tokens---
+'0' Literal.Number.Integer
+' ' Text
+'5L' Literal.Number.Integer
+' ' Text
+'9__542_72l' Literal.Number.Integer
+' ' Text
+'0xbEEf' Literal.Number.Hex
+' ' Text
+'0X9_A' Literal.Number.Hex
+' ' Text
+'0_35' Literal.Number.Oct
+' ' Text
+'01' Literal.Number.Oct
+' ' Text
+'0b0___101_0' Literal.Number.Bin
+' ' Text
+'0.' Literal.Number.Float
+' ' Text
+'.7_17F' Literal.Number.Float
+' ' Text
+'3e-1_3d' Literal.Number.Float
+' ' Text
+'1f' Literal.Number.Float
+' ' Text
+'6_01.9e+3' Literal.Number.Float
+' ' Text
+'0x.1Fp3' Literal.Number.Float
+' ' Text
+'0XEP8D' Literal.Number.Float
+'\n' Text
diff --git a/tests/lexers/json/test_basic.txt b/tests/lexers/json/test_basic.txt
new file mode 100644
index 00000000..b6f595cb
--- /dev/null
+++ b/tests/lexers/json/test_basic.txt
@@ -0,0 +1,30 @@
+---input---
+{"foo": "bar", "foo2": [1, 2, 3], "\u0123": "\u0123"}
+
+---tokens---
+'{' Punctuation
+'"foo"' Name.Tag
+':' Punctuation
+' ' Text
+'"bar"' Literal.String.Double
+',' Punctuation
+' ' Text
+'"foo2"' Name.Tag
+':' Punctuation
+' ' Text
+'[' Punctuation
+'1' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'2' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'3' Literal.Number.Integer
+'],' Punctuation
+' ' Text
+'"\\u0123"' Name.Tag
+':' Punctuation
+' ' Text
+'"\\u0123"' Literal.String.Double
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/json/test_basic_bare.txt b/tests/lexers/json/test_basic_bare.txt
new file mode 100644
index 00000000..de30f65e
--- /dev/null
+++ b/tests/lexers/json/test_basic_bare.txt
@@ -0,0 +1,23 @@
+---input---
+"foo": "bar", "foo2": [1, 2, 3]
+
+---tokens---
+'"foo"' Name.Tag
+':' Punctuation
+' ' Text
+'"bar"' Literal.String.Double
+',' Punctuation
+' ' Text
+'"foo2"' Name.Tag
+':' Punctuation
+' ' Text
+'[' Punctuation
+'1' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'2' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'3' Literal.Number.Integer
+']' Punctuation
+'\n' Text
diff --git a/tests/lexers/julia/test_unicode.txt b/tests/lexers/julia/test_unicode.txt
new file mode 100644
index 00000000..36f3577d
--- /dev/null
+++ b/tests/lexers/julia/test_unicode.txt
@@ -0,0 +1,37 @@
+# Test that unicode character, √, in an expression is recognized
+
+---input---
+s = √((1/n) * sum(count .^ 2) - mu .^2)
+
+---tokens---
+'s' Name
+' ' Text
+'=' Operator
+' ' Text
+'√' Operator
+'(' Punctuation
+'(' Punctuation
+'1' Literal.Number.Integer
+'/' Operator
+'n' Name
+')' Punctuation
+' ' Text
+'*' Operator
+' ' Text
+'sum' Name
+'(' Punctuation
+'count' Name
+' ' Text
+'.^' Operator
+' ' Text
+'2' Literal.Number.Integer
+')' Punctuation
+' ' Text
+'-' Operator
+' ' Text
+'mu' Name
+' ' Text
+'.^' Operator
+'2' Literal.Number.Integer
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/kotlin/test_can_cope_generics_in_destructuring.txt b/tests/lexers/kotlin/test_can_cope_generics_in_destructuring.txt
new file mode 100644
index 00000000..7057ea62
--- /dev/null
+++ b/tests/lexers/kotlin/test_can_cope_generics_in_destructuring.txt
@@ -0,0 +1,27 @@
+---input---
+val (a: List<Something>, b: Set<Wobble>) =
+
+---tokens---
+'val' Keyword
+' ' Text
+'(' Punctuation
+'a' Name.Property
+':' Punctuation
+' ' Text
+'List' Name.Property
+'<' Punctuation
+'Something' Name
+'>' Punctuation
+',' Punctuation
+' ' Text
+'b' Name.Property
+':' Punctuation
+' ' Text
+'Set' Name.Property
+'<' Punctuation
+'Wobble' Name
+'>' Punctuation
+')' Punctuation
+' ' Text
+'=' Punctuation
+'\n' Text
diff --git a/tests/lexers/kotlin/test_can_cope_with_backtick_names_in_functions.txt b/tests/lexers/kotlin/test_can_cope_with_backtick_names_in_functions.txt
new file mode 100644
index 00000000..48447109
--- /dev/null
+++ b/tests/lexers/kotlin/test_can_cope_with_backtick_names_in_functions.txt
@@ -0,0 +1,8 @@
+---input---
+fun `wo bble`
+
+---tokens---
+'fun' Keyword
+' ' Text
+'`wo bble`' Name.Function
+'\n' Text
diff --git a/tests/lexers/kotlin/test_can_cope_with_commas_and_dashes_in_backtick_Names.txt b/tests/lexers/kotlin/test_can_cope_with_commas_and_dashes_in_backtick_Names.txt
new file mode 100644
index 00000000..4df4e397
--- /dev/null
+++ b/tests/lexers/kotlin/test_can_cope_with_commas_and_dashes_in_backtick_Names.txt
@@ -0,0 +1,8 @@
+---input---
+fun `wo,-bble`
+
+---tokens---
+'fun' Keyword
+' ' Text
+'`wo,-bble`' Name.Function
+'\n' Text
diff --git a/tests/lexers/kotlin/test_can_cope_with_destructuring.txt b/tests/lexers/kotlin/test_can_cope_with_destructuring.txt
new file mode 100644
index 00000000..e9fab5e9
--- /dev/null
+++ b/tests/lexers/kotlin/test_can_cope_with_destructuring.txt
@@ -0,0 +1,16 @@
+---input---
+val (a, b) =
+
+---tokens---
+'val' Keyword
+' ' Text
+'(' Punctuation
+'a' Name.Property
+',' Punctuation
+' ' Text
+'b' Name.Property
+')' Punctuation
+' ' Text
+'=' Punctuation
+' ' Text
+'\n' Text
diff --git a/tests/lexers/kotlin/test_can_cope_with_generics.txt b/tests/lexers/kotlin/test_can_cope_with_generics.txt
new file mode 100644
index 00000000..554a820a
--- /dev/null
+++ b/tests/lexers/kotlin/test_can_cope_with_generics.txt
@@ -0,0 +1,32 @@
+---input---
+inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {
+
+---tokens---
+'inline fun' Keyword
+' ' Text
+'<' Punctuation
+'reified' Keyword
+' ' Text
+'T' Name
+' ' Text
+':' Punctuation
+' ' Text
+'ContractState' Name
+'>' Punctuation
+' ' Text
+'VaultService' Name.Class
+'.' Punctuation
+'queryBy' Name.Function
+'(' Punctuation
+')' Punctuation
+':' Punctuation
+' ' Text
+'Vault' Name
+'.' Punctuation
+'Page' Name
+'<' Punctuation
+'T' Name
+'>' Punctuation
+' ' Text
+'{' Punctuation
+'\n' Text
diff --git a/tests/lexers/kotlin/test_should_cope_with_multiline_comments.txt b/tests/lexers/kotlin/test_should_cope_with_multiline_comments.txt
new file mode 100644
index 00000000..3de13278
--- /dev/null
+++ b/tests/lexers/kotlin/test_should_cope_with_multiline_comments.txt
@@ -0,0 +1,10 @@
+---input---
+"""
+this
+is
+a
+comment"""
+
+---tokens---
+'"""\nthis\nis\na\ncomment"""' Literal.String
+'\n' Text
diff --git a/tests/lexers/mason/test_handles_tags_correctly.txt b/tests/lexers/mason/test_handles_tags_correctly.txt
new file mode 100644
index 00000000..2f3f064f
--- /dev/null
+++ b/tests/lexers/mason/test_handles_tags_correctly.txt
@@ -0,0 +1,69 @@
+---input---
+<%class>
+has 'foo';
+has 'bar' => (required => 1);
+has 'baz' => (isa => 'Int', default => 17);
+</%class>
+
+---tokens---
+'<%class>' Name.Tag
+'\n' Text
+
+'' Name
+'has' Name
+' ' Text
+"'foo'" Literal.String
+';' Punctuation
+'\n' Text
+
+'' Name
+'has' Name
+' ' Text
+"'bar'" Literal.String
+' ' Text
+'=' Operator
+'>' Operator
+' ' Text
+'(' Punctuation
+'' Name
+'required' Name
+' ' Text
+'=' Operator
+'>' Operator
+' ' Text
+'1' Literal.Number.Integer
+')' Punctuation
+';' Punctuation
+'\n' Text
+
+'' Name
+'has' Name
+' ' Text
+"'baz'" Literal.String
+' ' Text
+'=' Operator
+'>' Operator
+' ' Text
+'(' Punctuation
+'' Name
+'isa' Name
+' ' Text
+'=' Operator
+'>' Operator
+' ' Text
+"'Int'" Literal.String
+',' Punctuation
+' ' Text
+'' Name
+'default' Name
+' ' Text
+'=' Operator
+'>' Operator
+' ' Text
+'17' Literal.Number.Integer
+')' Punctuation
+';' Punctuation
+'\n' Text
+
+'</%class>' Name.Tag
+'\n' Text
diff --git a/tests/lexers/matlab/test_classes_with_properties.txt b/tests/lexers/matlab/test_classes_with_properties.txt
new file mode 100644
index 00000000..7de838eb
--- /dev/null
+++ b/tests/lexers/matlab/test_classes_with_properties.txt
@@ -0,0 +1,105 @@
+---input---
+classdef Name < dynamicprops
+ properties
+ % i am a comment
+ name1
+ name2
+ end
+ properties (Constant = true, SetAccess = protected)
+ % i too am a comment
+ matrix = [0, 1, 2];
+ string = 'i am a string'
+ end
+ methods
+ % i am also a comment
+ function self = Name()
+ % i am a comment inside a constructor
+ end
+ end
+end
+
+---tokens---
+'classdef' Keyword
+' ' Text.Whitespace
+'Name' Name
+' ' Text.Whitespace
+'<' Operator
+' ' Text.Whitespace
+'dynamicprops' Keyword
+'\n ' Text.Whitespace
+'properties' Keyword
+'\n ' Text.Whitespace
+'% i am a comment' Comment
+'\n ' Text.Whitespace
+'name1' Name
+'\n ' Text.Whitespace
+'name2' Name
+'\n ' Text.Whitespace
+'end' Keyword
+'\n ' Text.Whitespace
+'properties' Keyword
+' ' Text.Whitespace
+'(' Punctuation
+'Constant' Name.Builtin
+' ' Text.Whitespace
+'=' Punctuation
+' ' Text.Whitespace
+'true' Keyword
+',' Punctuation
+' ' Text.Whitespace
+'SetAccess' Name.Builtin
+' ' Text.Whitespace
+'=' Punctuation
+' ' Text.Whitespace
+'protected' Keyword
+')' Punctuation
+'\n ' Text.Whitespace
+'% i too am a comment' Comment
+'\n ' Text.Whitespace
+'matrix' Name
+' ' Text.Whitespace
+'=' Punctuation
+' ' Text.Whitespace
+'[' Punctuation
+'0' Literal.Number.Integer
+',' Punctuation
+' ' Text.Whitespace
+'1' Literal.Number.Integer
+',' Punctuation
+' ' Text.Whitespace
+'2' Literal.Number.Integer
+']' Punctuation
+';' Punctuation
+'\n ' Text.Whitespace
+'string' Name
+' ' Text.Whitespace
+'=' Punctuation
+' ' Text.Whitespace
+"'" Literal.String
+"i am a string'" Literal.String
+'\n ' Text.Whitespace
+'end' Keyword
+'\n ' Text.Whitespace
+'methods' Keyword
+'\n ' Text.Whitespace
+'% i am also a comment' Comment
+'\n ' Text.Whitespace
+'function' Keyword
+' ' Text.Whitespace
+'self' Text
+' ' Text.Whitespace
+'=' Punctuation
+' ' Text.Whitespace
+'Name' Name.Function
+'(' Punctuation
+')' Punctuation
+'\n ' Text.Whitespace
+'% i am a comment inside a constructor' Comment
+'\n ' Text.Whitespace
+'end' Keyword
+'\n ' Text.Whitespace
+'end' Keyword
+'\n' Text.Whitespace
+
+'end' Keyword
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_command_mode.txt b/tests/lexers/matlab/test_command_mode.txt
new file mode 100644
index 00000000..554b4084
--- /dev/null
+++ b/tests/lexers/matlab/test_command_mode.txt
@@ -0,0 +1,12 @@
+# MATLAB allows char function arguments to not be enclosed by parentheses
+# or contain quote characters, as long as they are space separated. Test
+# that one common such function is formatted appropriately.
+
+---input---
+help sin
+
+---tokens---
+'help' Name
+' ' Text.Whitespace
+'sin' Literal.String
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_comment_after_continuation.txt b/tests/lexers/matlab/test_comment_after_continuation.txt
new file mode 100644
index 00000000..501407c9
--- /dev/null
+++ b/tests/lexers/matlab/test_comment_after_continuation.txt
@@ -0,0 +1,25 @@
+# Test that text after the line continuation ellipses is marked as a comment.
+
+---input---
+set('T',300,... a comment
+'P',101325);
+
+---tokens---
+'set' Name
+'(' Punctuation
+"'" Literal.String
+"T'" Literal.String
+',' Punctuation
+'300' Literal.Number.Integer
+',' Punctuation
+'...' Keyword
+' a comment' Comment
+'\n' Text.Whitespace
+
+"'" Literal.String
+"P'" Literal.String
+',' Punctuation
+'101325' Literal.Number.Integer
+')' Punctuation
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_keywords_ended_by_newline.txt b/tests/lexers/matlab/test_keywords_ended_by_newline.txt
new file mode 100644
index 00000000..59dca038
--- /dev/null
+++ b/tests/lexers/matlab/test_keywords_ended_by_newline.txt
@@ -0,0 +1,36 @@
+# Test that keywords on their own line are marked as keywords.
+
+---input---
+if x > 100
+ disp('x > 100')
+else
+ disp('x < 100')
+end
+
+---tokens---
+'if' Keyword
+' ' Text.Whitespace
+'x' Name
+' ' Text.Whitespace
+'>' Operator
+' ' Text.Whitespace
+'100' Literal.Number.Integer
+'\n ' Text.Whitespace
+'disp' Name.Builtin
+'(' Punctuation
+"'" Literal.String
+"x > 100'" Literal.String
+')' Punctuation
+'\n' Text.Whitespace
+
+'else' Keyword
+'\n ' Text.Whitespace
+'disp' Name.Builtin
+'(' Punctuation
+"'" Literal.String
+"x < 100'" Literal.String
+')' Punctuation
+'\n' Text.Whitespace
+
+'end' Keyword
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_line_continuation.txt b/tests/lexers/matlab/test_line_continuation.txt
new file mode 100644
index 00000000..bf46f897
--- /dev/null
+++ b/tests/lexers/matlab/test_line_continuation.txt
@@ -0,0 +1,25 @@
+# Test that line continuation by ellipses does not produce generic
+# output on the second line.
+
+---input---
+set('T',300,...
+'P',101325);
+
+---tokens---
+'set' Name
+'(' Punctuation
+"'" Literal.String
+"T'" Literal.String
+',' Punctuation
+'300' Literal.Number.Integer
+',' Punctuation
+'...' Keyword
+'\n' Text.Whitespace
+
+"'" Literal.String
+"P'" Literal.String
+',' Punctuation
+'101325' Literal.Number.Integer
+')' Punctuation
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_multiple_spaces_variable_assignment.txt b/tests/lexers/matlab/test_multiple_spaces_variable_assignment.txt
new file mode 100644
index 00000000..ec5ac24c
--- /dev/null
+++ b/tests/lexers/matlab/test_multiple_spaces_variable_assignment.txt
@@ -0,0 +1,13 @@
+# Test that multiple spaces with an equal sign doesn't get formatted to a string.
+
+---input---
+x = 100;
+
+---tokens---
+'x' Name
+' ' Text.Whitespace
+'=' Punctuation
+' ' Text.Whitespace
+'100' Literal.Number.Integer
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_one_space_assignment.txt b/tests/lexers/matlab/test_one_space_assignment.txt
new file mode 100644
index 00000000..ceafb6e5
--- /dev/null
+++ b/tests/lexers/matlab/test_one_space_assignment.txt
@@ -0,0 +1,13 @@
+# Test that one space before an equal sign is formatted correctly.
+
+---input---
+x = 100;
+
+---tokens---
+'x' Name
+' ' Text.Whitespace
+'=' Punctuation
+' ' Text.Whitespace
+'100' Literal.Number.Integer
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_operator_multiple_space.txt b/tests/lexers/matlab/test_operator_multiple_space.txt
new file mode 100644
index 00000000..e13d3a37
--- /dev/null
+++ b/tests/lexers/matlab/test_operator_multiple_space.txt
@@ -0,0 +1,13 @@
+# Test that multiple spaces with an operator doesn't get formatted to a string.
+
+---input---
+x > 100;
+
+---tokens---
+'x' Name
+' ' Text.Whitespace
+'>' Operator
+' ' Text.Whitespace
+'100' Literal.Number.Integer
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/matlab/test_single_line.txt b/tests/lexers/matlab/test_single_line.txt
new file mode 100644
index 00000000..90b2520a
--- /dev/null
+++ b/tests/lexers/matlab/test_single_line.txt
@@ -0,0 +1,18 @@
+---input---
+set('T',300,'P',101325);
+
+---tokens---
+'set' Name
+'(' Punctuation
+"'" Literal.String
+"T'" Literal.String
+',' Punctuation
+'300' Literal.Number.Integer
+',' Punctuation
+"'" Literal.String
+"P'" Literal.String
+',' Punctuation
+'101325' Literal.Number.Integer
+')' Punctuation
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/md/test_bold_fenced_by_asterisk.txt b/tests/lexers/md/test_bold_fenced_by_asterisk.txt
new file mode 100644
index 00000000..7ab6a4b4
--- /dev/null
+++ b/tests/lexers/md/test_bold_fenced_by_asterisk.txt
@@ -0,0 +1,15 @@
+---input---
+**bold**
+
+(**bold**)
+
+---tokens---
+'**bold**' Generic.Strong
+'\n' Text
+
+'\n' Text
+
+'(' Text
+'**bold**' Generic.Strong
+')' Text
+'\n' Text
diff --git a/tests/lexers/md/test_bold_fenced_by_underscore.txt b/tests/lexers/md/test_bold_fenced_by_underscore.txt
new file mode 100644
index 00000000..5c822695
--- /dev/null
+++ b/tests/lexers/md/test_bold_fenced_by_underscore.txt
@@ -0,0 +1,15 @@
+---input---
+__bold__
+
+(__bold__)
+
+---tokens---
+'__bold__' Generic.Strong
+'\n' Text
+
+'\n' Text
+
+'(' Text
+'__bold__' Generic.Strong
+')' Text
+'\n' Text
diff --git a/tests/lexers/md/test_bulleted_list_1.txt b/tests/lexers/md/test_bulleted_list_1.txt
new file mode 100644
index 00000000..6bb58fbe
--- /dev/null
+++ b/tests/lexers/md/test_bulleted_list_1.txt
@@ -0,0 +1,14 @@
+---input---
+* foo
+* bar
+
+---tokens---
+'*' Keyword
+' ' Text
+'foo' Text
+'\n' Text
+
+'*' Keyword
+' ' Text
+'bar' Text
+'\n' Text
diff --git a/tests/lexers/md/test_bulleted_list_2.txt b/tests/lexers/md/test_bulleted_list_2.txt
new file mode 100644
index 00000000..9283611d
--- /dev/null
+++ b/tests/lexers/md/test_bulleted_list_2.txt
@@ -0,0 +1,14 @@
+---input---
+- foo
+- bar
+
+---tokens---
+'-' Keyword
+' ' Text
+'foo' Text
+'\n' Text
+
+'-' Keyword
+' ' Text
+'bar' Text
+'\n' Text
diff --git a/tests/lexers/md/test_bulleted_list_3.txt b/tests/lexers/md/test_bulleted_list_3.txt
new file mode 100644
index 00000000..b7142007
--- /dev/null
+++ b/tests/lexers/md/test_bulleted_list_3.txt
@@ -0,0 +1,14 @@
+---input---
+* *foo*
+* bar
+
+---tokens---
+'*' Keyword
+' ' Text
+'*foo*' Generic.Emph
+'\n' Text
+
+'*' Keyword
+' ' Text
+'bar' Text
+'\n' Text
diff --git a/tests/lexers/md/test_bulleted_list_4.txt b/tests/lexers/md/test_bulleted_list_4.txt
new file mode 100644
index 00000000..fe29d0a6
--- /dev/null
+++ b/tests/lexers/md/test_bulleted_list_4.txt
@@ -0,0 +1,19 @@
+---input---
+```
+code
+```
+* *foo*
+* bar
+
+---tokens---
+'```\ncode\n```\n' Literal.String.Backtick
+
+'*' Keyword
+' ' Text
+'*foo*' Generic.Emph
+'\n' Text
+
+'*' Keyword
+' ' Text
+'bar' Text
+'\n' Text
diff --git a/tests/lexers/md/test_code_block_fenced_by_backticks.txt b/tests/lexers/md/test_code_block_fenced_by_backticks.txt
new file mode 100644
index 00000000..4f8fefa4
--- /dev/null
+++ b/tests/lexers/md/test_code_block_fenced_by_backticks.txt
@@ -0,0 +1,15 @@
+---input---
+```
+code
+```
+
+```
+multi
+`line`
+code
+```
+
+---tokens---
+'```\ncode\n```\n' Literal.String.Backtick
+
+'\n```\nmulti\n`line`\ncode\n```\n' Literal.String.Backtick
diff --git a/tests/lexers/md/test_code_block_with_language.txt b/tests/lexers/md/test_code_block_with_language.txt
new file mode 100644
index 00000000..088ae26c
--- /dev/null
+++ b/tests/lexers/md/test_code_block_with_language.txt
@@ -0,0 +1,16 @@
+---input---
+```python
+import this
+```
+
+---tokens---
+'```' Literal.String.Backtick
+'python' Literal.String.Backtick
+'\n' Text
+
+'import' Keyword.Namespace
+' ' Text
+'this' Name.Namespace
+'\n' Text
+
+'```\n' Literal.String.Backtick
diff --git a/tests/lexers/md/test_escape_italics.txt b/tests/lexers/md/test_escape_italics.txt
new file mode 100644
index 00000000..62512593
--- /dev/null
+++ b/tests/lexers/md/test_escape_italics.txt
@@ -0,0 +1,23 @@
+---input---
+\*no italics\*
+
+\_ no italics \_
+
+---tokens---
+'\\*' Text
+'no' Text
+' ' Text
+'italics' Text
+'\\*' Text
+'\n' Text
+
+'\n' Text
+
+'\\_' Text
+' ' Text
+'no' Text
+' ' Text
+'italics' Text
+' ' Text
+'\\_' Text
+'\n' Text
diff --git a/tests/lexers/md/test_inline_code.txt b/tests/lexers/md/test_inline_code.txt
new file mode 100644
index 00000000..c79c5286
--- /dev/null
+++ b/tests/lexers/md/test_inline_code.txt
@@ -0,0 +1,36 @@
+---input---
+code: `code`
+
+ `**code**`
+
+(`code`)
+
+code (`in brackets`)
+
+---tokens---
+'code:' Text
+' ' Text
+'`code`' Literal.String.Backtick
+'\n' Text
+
+'\n' Text
+
+' ' Text
+'`**code**`' Literal.String.Backtick
+'\n' Text
+
+'\n' Text
+
+'(' Text
+'`code`' Literal.String.Backtick
+')' Text
+'\n' Text
+
+'\n' Text
+
+'code' Text
+' ' Text
+'(' Text
+'`in brackets`' Literal.String.Backtick
+')' Text
+'\n' Text
diff --git a/tests/lexers/md/test_inline_code_after_block.txt b/tests/lexers/md/test_inline_code_after_block.txt
new file mode 100644
index 00000000..9be80acf
--- /dev/null
+++ b/tests/lexers/md/test_inline_code_after_block.txt
@@ -0,0 +1,19 @@
+---input---
+```
+code
+```
+* nocode
+* `code`
+
+---tokens---
+'```\ncode\n```\n' Literal.String.Backtick
+
+'*' Keyword
+' ' Text
+'nocode' Text
+'\n' Text
+
+'*' Keyword
+' ' Text
+'`code`' Literal.String.Backtick
+'\n' Text
diff --git a/tests/lexers/md/test_inline_code_in_list.txt b/tests/lexers/md/test_inline_code_in_list.txt
new file mode 100644
index 00000000..643f0e5d
--- /dev/null
+++ b/tests/lexers/md/test_inline_code_in_list.txt
@@ -0,0 +1,26 @@
+---input---
+* `code`
+
+- `code`
+
+1. `code`
+
+---tokens---
+'*' Keyword
+' ' Text
+'`code`' Literal.String.Backtick
+'\n' Text
+
+'\n' Text
+
+'-' Keyword
+' ' Text
+'`code`' Literal.String.Backtick
+'\n' Text
+
+'\n' Text
+
+'1.' Keyword
+' ' Text
+'`code`' Literal.String.Backtick
+'\n' Text
diff --git a/tests/lexers/md/test_invalid_bold.txt b/tests/lexers/md/test_invalid_bold.txt
new file mode 100644
index 00000000..cc55d832
--- /dev/null
+++ b/tests/lexers/md/test_invalid_bold.txt
@@ -0,0 +1,31 @@
+---input---
+**no bold__
+
+__no bold**
+
+*no bold*
+
+_no bold_
+
+---tokens---
+'**no' Text
+' ' Text
+'bold__' Text
+'\n' Text
+
+'\n' Text
+
+'__no' Text
+' ' Text
+'bold**' Text
+'\n' Text
+
+'\n' Text
+
+'*no bold*' Generic.Emph
+'\n' Text
+
+'\n' Text
+
+'_no bold_' Generic.Emph
+'\n' Text
diff --git a/tests/lexers/md/test_invalid_italics.txt b/tests/lexers/md/test_invalid_italics.txt
new file mode 100644
index 00000000..7e7cbc34
--- /dev/null
+++ b/tests/lexers/md/test_invalid_italics.txt
@@ -0,0 +1,31 @@
+---input---
+*no italics_
+
+_no italics*
+
+**no italics**
+
+__no italics__
+
+---tokens---
+'*no' Text
+' ' Text
+'italics_' Text
+'\n' Text
+
+'\n' Text
+
+'_no' Text
+' ' Text
+'italics*' Text
+'\n' Text
+
+'\n' Text
+
+'**no italics**' Generic.Strong
+'\n' Text
+
+'\n' Text
+
+'__no italics__' Generic.Strong
+'\n' Text
diff --git a/tests/lexers/md/test_italics_and_bold.txt b/tests/lexers/md/test_italics_and_bold.txt
new file mode 100644
index 00000000..70a426ec
--- /dev/null
+++ b/tests/lexers/md/test_italics_and_bold.txt
@@ -0,0 +1,21 @@
+---input---
+**bold** and *italics*
+
+*italics* and **bold**
+
+---tokens---
+'**bold**' Generic.Strong
+' ' Text
+'and' Text
+' ' Text
+'*italics*' Generic.Emph
+'\n' Text
+
+'\n' Text
+
+'*italics*' Generic.Emph
+' ' Text
+'and' Text
+' ' Text
+'**bold**' Generic.Strong
+'\n' Text
diff --git a/tests/lexers/md/test_italics_fenced_by_asterisk.txt b/tests/lexers/md/test_italics_fenced_by_asterisk.txt
new file mode 100644
index 00000000..0a43d60d
--- /dev/null
+++ b/tests/lexers/md/test_italics_fenced_by_asterisk.txt
@@ -0,0 +1,15 @@
+---input---
+*italics*
+
+(*italics*)
+
+---tokens---
+'*italics*' Generic.Emph
+'\n' Text
+
+'\n' Text
+
+'(' Text
+'*italics*' Generic.Emph
+')' Text
+'\n' Text
diff --git a/tests/lexers/md/test_italics_fenced_by_underscore.txt b/tests/lexers/md/test_italics_fenced_by_underscore.txt
new file mode 100644
index 00000000..d712ca65
--- /dev/null
+++ b/tests/lexers/md/test_italics_fenced_by_underscore.txt
@@ -0,0 +1,15 @@
+---input---
+_italics_
+
+(_italics_)
+
+---tokens---
+'_italics_' Generic.Emph
+'\n' Text
+
+'\n' Text
+
+'(' Text
+'_italics_' Generic.Emph
+')' Text
+'\n' Text
diff --git a/tests/lexers/md/test_italics_no_multiline.txt b/tests/lexers/md/test_italics_no_multiline.txt
new file mode 100644
index 00000000..ad2007b8
--- /dev/null
+++ b/tests/lexers/md/test_italics_no_multiline.txt
@@ -0,0 +1,10 @@
+---input---
+*no
+italics*
+
+---tokens---
+'*no' Text
+'\n' Text
+
+'italics*' Text
+'\n' Text
diff --git a/tests/lexers/md/test_links.txt b/tests/lexers/md/test_links.txt
new file mode 100644
index 00000000..83b274e2
--- /dev/null
+++ b/tests/lexers/md/test_links.txt
@@ -0,0 +1,23 @@
+---input---
+[text](link)
+
+![Image of foo](https://bar.baz)
+
+---tokens---
+'[' Text
+'text' Name.Tag
+']' Text
+'(' Text
+'link' Name.Attribute
+')' Text
+'\n' Text
+
+'\n' Text
+
+'![' Text
+'Image of foo' Name.Tag
+']' Text
+'(' Text
+'https://bar.baz' Name.Attribute
+')' Text
+'\n' Text
diff --git a/tests/lexers/md/test_mentions.txt b/tests/lexers/md/test_mentions.txt
new file mode 100644
index 00000000..7c7ef01f
--- /dev/null
+++ b/tests/lexers/md/test_mentions.txt
@@ -0,0 +1,10 @@
+---input---
+note for @me:
+
+---tokens---
+'note' Text
+' ' Text
+'for' Text
+' ' Text
+'@me:' Name.Entity
+'\n' Text
diff --git a/tests/lexers/md/test_numbered_list.txt b/tests/lexers/md/test_numbered_list.txt
new file mode 100644
index 00000000..0f386438
--- /dev/null
+++ b/tests/lexers/md/test_numbered_list.txt
@@ -0,0 +1,14 @@
+---input---
+1. foo
+2. bar
+
+---tokens---
+'1.' Keyword
+' ' Text
+'foo' Text
+'\n' Text
+
+'2.' Keyword
+' ' Text
+'bar' Text
+'\n' Text
diff --git a/tests/lexers/md/test_quote.txt b/tests/lexers/md/test_quote.txt
new file mode 100644
index 00000000..cc64ec2c
--- /dev/null
+++ b/tests/lexers/md/test_quote.txt
@@ -0,0 +1,10 @@
+---input---
+> a
+> quote
+
+---tokens---
+'> ' Keyword
+'a\n' Generic.Emph
+
+'> ' Keyword
+'quote\n' Generic.Emph
diff --git a/tests/lexers/md/test_reference_style_links.txt b/tests/lexers/md/test_reference_style_links.txt
new file mode 100644
index 00000000..3acc6875
--- /dev/null
+++ b/tests/lexers/md/test_reference_style_links.txt
@@ -0,0 +1,18 @@
+---input---
+[an example][id]
+[id]: http://example.com
+
+---tokens---
+'[' Text
+'an example' Name.Tag
+']' Text
+'[' Text
+'id' Name.Label
+']' Text
+'\n' Text
+
+'[' Text
+'id' Name.Label
+']: ' Text
+'http://example.com' Name.Attribute
+'\n' Text
diff --git a/tests/lexers/md/test_strikethrough.txt b/tests/lexers/md/test_strikethrough.txt
new file mode 100644
index 00000000..3c63e975
--- /dev/null
+++ b/tests/lexers/md/test_strikethrough.txt
@@ -0,0 +1,9 @@
+---input---
+~~striked~~not striked
+
+---tokens---
+'~~striked~~' Generic.Deleted
+'not' Text
+' ' Text
+'striked' Text
+'\n' Text
diff --git a/tests/lexers/md/test_task_list.txt b/tests/lexers/md/test_task_list.txt
new file mode 100644
index 00000000..e821ff8e
--- /dev/null
+++ b/tests/lexers/md/test_task_list.txt
@@ -0,0 +1,34 @@
+---input---
+- [ ] sample task
+
+* [ ] sample task
+
+ * [ ] sample task
+
+---tokens---
+'- ' Keyword
+'[ ]' Keyword
+' ' Text
+'sample' Text
+' ' Text
+'task' Text
+'\n' Text
+
+'\n' Text
+
+'* ' Keyword
+'[ ]' Keyword
+' ' Text
+'sample' Text
+' ' Text
+'task' Text
+'\n' Text
+
+'\n ' Text
+'* ' Keyword
+'[ ]' Keyword
+' ' Text
+'sample' Text
+' ' Text
+'task' Text
+'\n' Text
diff --git a/tests/lexers/md/test_topics.txt b/tests/lexers/md/test_topics.txt
new file mode 100644
index 00000000..aa84fc98
--- /dev/null
+++ b/tests/lexers/md/test_topics.txt
@@ -0,0 +1,10 @@
+---input---
+message to #you:
+
+---tokens---
+'message' Text
+' ' Text
+'to' Text
+' ' Text
+'#you:' Name.Entity
+'\n' Text
diff --git a/tests/lexers/objectivec/test_literal_number_bool.txt b/tests/lexers/objectivec/test_literal_number_bool.txt
new file mode 100644
index 00000000..131c3997
--- /dev/null
+++ b/tests/lexers/objectivec/test_literal_number_bool.txt
@@ -0,0 +1,7 @@
+---input---
+@NO;
+
+---tokens---
+'@NO' Literal.Number
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/objectivec/test_literal_number_bool_expression.txt b/tests/lexers/objectivec/test_literal_number_bool_expression.txt
new file mode 100644
index 00000000..a492e221
--- /dev/null
+++ b/tests/lexers/objectivec/test_literal_number_bool_expression.txt
@@ -0,0 +1,9 @@
+---input---
+@(YES);
+
+---tokens---
+'@(' Literal
+'YES' Name.Builtin
+')' Literal
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/objectivec/test_literal_number_expression.txt b/tests/lexers/objectivec/test_literal_number_expression.txt
new file mode 100644
index 00000000..f70d0624
--- /dev/null
+++ b/tests/lexers/objectivec/test_literal_number_expression.txt
@@ -0,0 +1,11 @@
+---input---
+@(1+2);
+
+---tokens---
+'@(' Literal
+'1' Literal.Number.Integer
+'+' Operator
+'2' Literal.Number.Integer
+')' Literal
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/objectivec/test_literal_number_int.txt b/tests/lexers/objectivec/test_literal_number_int.txt
new file mode 100644
index 00000000..907a0301
--- /dev/null
+++ b/tests/lexers/objectivec/test_literal_number_int.txt
@@ -0,0 +1,9 @@
+---input---
+@(1);
+
+---tokens---
+'@(' Literal
+'1' Literal.Number.Integer
+')' Literal
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/objectivec/test_literal_number_nested_expression.txt b/tests/lexers/objectivec/test_literal_number_nested_expression.txt
new file mode 100644
index 00000000..8226cb69
--- /dev/null
+++ b/tests/lexers/objectivec/test_literal_number_nested_expression.txt
@@ -0,0 +1,15 @@
+---input---
+@(1+(2+3));
+
+---tokens---
+'@(' Literal
+'1' Literal.Number.Integer
+'+' Operator
+'(' Punctuation
+'2' Literal.Number.Integer
+'+' Operator
+'3' Literal.Number.Integer
+')' Punctuation
+')' Literal
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/objectivec/test_module_import.txt b/tests/lexers/objectivec/test_module_import.txt
new file mode 100644
index 00000000..918d1555
--- /dev/null
+++ b/tests/lexers/objectivec/test_module_import.txt
@@ -0,0 +1,9 @@
+---input---
+@import ModuleA;
+
+---tokens---
+'@import' Keyword
+' ' Text
+'ModuleA' Name
+';' Punctuation
+'\n' Text
diff --git a/tests/lexers/peg/test_basic.txt b/tests/lexers/peg/test_basic.txt
new file mode 100644
index 00000000..78d240e4
--- /dev/null
+++ b/tests/lexers/peg/test_basic.txt
@@ -0,0 +1,17 @@
+---input---
+rule<-("terminal"/nonterminal/[cls])*
+
+---tokens---
+'rule' Name.Class
+'<-' Operator
+'(' Punctuation
+'"terminal"' Literal.String.Double
+'/' Operator
+'nonterminal' Name.Class
+'/' Operator
+'[' Punctuation
+'cls' Literal.String
+']' Punctuation
+')' Punctuation
+'*' Operator
+'\n' Text
diff --git a/tests/lexers/peg/test_modified_strings.txt b/tests/lexers/peg/test_modified_strings.txt
new file mode 100644
index 00000000..21a59877
--- /dev/null
+++ b/tests/lexers/peg/test_modified_strings.txt
@@ -0,0 +1,21 @@
+# see for example:
+# - http://textx.github.io/Arpeggio/
+# - https://nim-lang.org/docs/pegs.html
+# - https://github.com/erikrose/parsimonious
+# can't handle parsimonious-style regex while ~ is a cut operator
+
+---input---
+~"regex" i"insensitive" "multimod"ilx ("not modified")
+
+---tokens---
+'~' Operator
+'"regex"' Literal.String.Double
+' ' Text
+'i"insensitive"' Literal.String.Double
+' ' Text
+'"multimod"ilx' Literal.String.Double
+' ' Text
+'(' Punctuation
+'"not modified"' Literal.String.Double
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/peg/test_operators.txt b/tests/lexers/peg/test_operators.txt
new file mode 100644
index 00000000..b901c818
--- /dev/null
+++ b/tests/lexers/peg/test_operators.txt
@@ -0,0 +1,29 @@
+# see for example:
+# - https://github.com/gvanrossum/pegen
+# - https://nim-lang.org/docs/pegs.html
+
+---input---
+rule = 'a' | 'b'
+rule: 'a' ~ 'b'
+
+---tokens---
+'rule' Name.Class
+' ' Text
+'=' Operator
+' ' Text
+"'a'" Literal.String.Single
+' ' Text
+'|' Operator
+' ' Text
+"'b'" Literal.String.Single
+'\n' Text
+
+'rule' Name.Class
+':' Operator
+' ' Text
+"'a'" Literal.String.Single
+' ' Text
+'~' Operator
+' ' Text
+"'b'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/php/test_string_escaping_run.txt b/tests/lexers/php/test_string_escaping_run.txt
new file mode 100644
index 00000000..e31fbfc0
--- /dev/null
+++ b/tests/lexers/php/test_string_escaping_run.txt
@@ -0,0 +1,16 @@
+---input---
+<?php $x="{\""; ?>
+
+---tokens---
+'<?php' Comment.Preproc
+' ' Text
+'$x' Name.Variable
+'=' Operator
+'"' Literal.String.Double
+'{' Literal.String.Double
+'\\"' Literal.String.Escape
+'"' Literal.String.Double
+';' Punctuation
+' ' Text
+'?>' Comment.Preproc
+'\n' Other
diff --git a/tests/lexers/powershell/test_remoting_session.txt b/tests/lexers/powershell/test_remoting_session.txt
new file mode 100644
index 00000000..9d9db603
--- /dev/null
+++ b/tests/lexers/powershell/test_remoting_session.txt
@@ -0,0 +1,19 @@
+---input---
+[Long-NetBIOS-Hostname]: PS C:\> Get-ChildItem
+
+---tokens---
+'[' Punctuation
+'Long' Name
+'-NetBIOS' Name
+'-Hostname' Name
+']' Punctuation
+':' Error
+' ' Text
+'PS ' Name.Builtin
+'C' Name
+':' Error
+'\\' Punctuation
+'>' Punctuation
+' ' Text
+'Get-ChildItem' Name.Builtin
+'\n' Text
diff --git a/tests/lexers/powershell/test_session.txt b/tests/lexers/powershell/test_session.txt
new file mode 100644
index 00000000..38212440
--- /dev/null
+++ b/tests/lexers/powershell/test_session.txt
@@ -0,0 +1,28 @@
+---input---
+PS C:\> Get-ChildItem
+
+PS> Get-ChildItem
+
+PS > Get-ChildItem
+
+---tokens---
+'PS ' Name.Builtin
+'C' Name
+':' Error
+'\\' Punctuation
+'>' Punctuation
+' ' Text
+'Get-ChildItem' Name.Builtin
+'\n\n' Text
+
+'PS' Name
+'>' Punctuation
+' ' Text
+'Get-ChildItem' Name.Builtin
+'\n\n' Text
+
+'PS ' Name.Builtin
+'>' Punctuation
+' ' Text
+'Get-ChildItem' Name.Builtin
+'\n' Text
diff --git a/tests/lexers/praat/test_broken_unquoted_string.txt b/tests/lexers/praat/test_broken_unquoted_string.txt
new file mode 100644
index 00000000..573b8079
--- /dev/null
+++ b/tests/lexers/praat/test_broken_unquoted_string.txt
@@ -0,0 +1,16 @@
+---input---
+printline string
+... 'interpolated' string
+
+---tokens---
+'printline' Keyword
+' ' Text
+'string' Literal.String
+'\n' Text
+
+'...' Punctuation
+' ' Text
+"'interpolated'" Literal.String.Interpol
+' ' Text
+'string' Literal.String
+'\n' Text
diff --git a/tests/lexers/praat/test_function_call.txt b/tests/lexers/praat/test_function_call.txt
new file mode 100644
index 00000000..ebb24a63
--- /dev/null
+++ b/tests/lexers/praat/test_function_call.txt
@@ -0,0 +1,20 @@
+---input---
+selected("Sound", i+(a*b))
+
+---tokens---
+'selected' Name.Function
+'(' Punctuation
+'"' Literal.String
+'Sound' Literal.String
+'"' Literal.String
+',' Punctuation
+' ' Text
+'i' Text
+'+' Operator
+'(' Text
+'a' Text
+'*' Operator
+'b' Text
+')' Text
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/praat/test_inline_if.txt b/tests/lexers/praat/test_inline_if.txt
new file mode 100644
index 00000000..02b10a5b
--- /dev/null
+++ b/tests/lexers/praat/test_inline_if.txt
@@ -0,0 +1,27 @@
+---input---
+var = if true == 1 then -1 else 0 fi
+
+---tokens---
+'var' Text
+' ' Text
+'=' Operator
+' ' Text
+'if' Keyword
+' ' Text
+'true' Text
+' ' Text
+'==' Operator
+' ' Text
+'1' Literal.Number
+' ' Text
+'then' Keyword
+' ' Text
+'-' Operator
+'1' Literal.Number
+' ' Text
+'else' Keyword
+' ' Text
+'0' Literal.Number
+' ' Text
+'fi' Keyword
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolated_indexed_numeric_with_precision.txt b/tests/lexers/praat/test_interpolated_indexed_numeric_with_precision.txt
new file mode 100644
index 00000000..82849ee8
--- /dev/null
+++ b/tests/lexers/praat/test_interpolated_indexed_numeric_with_precision.txt
@@ -0,0 +1,6 @@
+---input---
+'a[3]:3'
+
+---tokens---
+"'a[3]:3'" Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolated_local_numeric_with_precision.txt b/tests/lexers/praat/test_interpolated_local_numeric_with_precision.txt
new file mode 100644
index 00000000..651509bf
--- /dev/null
+++ b/tests/lexers/praat/test_interpolated_local_numeric_with_precision.txt
@@ -0,0 +1,6 @@
+---input---
+'a.a:3'
+
+---tokens---
+"'a.a:3'" Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolated_numeric_hash.txt b/tests/lexers/praat/test_interpolated_numeric_hash.txt
new file mode 100644
index 00000000..419faa1a
--- /dev/null
+++ b/tests/lexers/praat/test_interpolated_numeric_hash.txt
@@ -0,0 +1,6 @@
+---input---
+'a["b"]'
+
+---tokens---
+'\'a["b"]\'' Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolated_numeric_indexed.txt b/tests/lexers/praat/test_interpolated_numeric_indexed.txt
new file mode 100644
index 00000000..3cd1b422
--- /dev/null
+++ b/tests/lexers/praat/test_interpolated_numeric_indexed.txt
@@ -0,0 +1,6 @@
+---input---
+'a[3]'
+
+---tokens---
+"'a[3]'" Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolated_numeric_with_precision.txt b/tests/lexers/praat/test_interpolated_numeric_with_precision.txt
new file mode 100644
index 00000000..800ac35d
--- /dev/null
+++ b/tests/lexers/praat/test_interpolated_numeric_with_precision.txt
@@ -0,0 +1,6 @@
+---input---
+'a:3'
+
+---tokens---
+"'a:3'" Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolated_string_hash.txt b/tests/lexers/praat/test_interpolated_string_hash.txt
new file mode 100644
index 00000000..608873d2
--- /dev/null
+++ b/tests/lexers/praat/test_interpolated_string_hash.txt
@@ -0,0 +1,6 @@
+---input---
+'a$["b"]'
+
+---tokens---
+'\'a$["b"]\'' Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolated_string_indexed.txt b/tests/lexers/praat/test_interpolated_string_indexed.txt
new file mode 100644
index 00000000..a50b5d16
--- /dev/null
+++ b/tests/lexers/praat/test_interpolated_string_indexed.txt
@@ -0,0 +1,6 @@
+---input---
+'a$[3]'
+
+---tokens---
+"'a$[3]'" Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/praat/test_interpolation_boundary.txt b/tests/lexers/praat/test_interpolation_boundary.txt
new file mode 100644
index 00000000..d125e829
--- /dev/null
+++ b/tests/lexers/praat/test_interpolation_boundary.txt
@@ -0,0 +1,14 @@
+---input---
+"'" + "'"
+
+---tokens---
+'"' Literal.String
+"'" Literal.String
+'"' Literal.String
+' ' Text
+'+' Operator
+' ' Text
+'"' Literal.String
+"'" Literal.String
+'"' Literal.String
+'\n' Text
diff --git a/tests/lexers/praat/test_numeric_assignment.txt b/tests/lexers/praat/test_numeric_assignment.txt
new file mode 100644
index 00000000..ed954568
--- /dev/null
+++ b/tests/lexers/praat/test_numeric_assignment.txt
@@ -0,0 +1,11 @@
+---input---
+var = -15e4
+
+---tokens---
+'var' Text
+' ' Text
+'=' Operator
+' ' Text
+'-' Operator
+'15e4' Literal.Number
+'\n' Text
diff --git a/tests/lexers/praat/test_string_assignment.txt b/tests/lexers/praat/test_string_assignment.txt
new file mode 100644
index 00000000..0e1a86ef
--- /dev/null
+++ b/tests/lexers/praat/test_string_assignment.txt
@@ -0,0 +1,12 @@
+---input---
+var$ = "foo"
+
+---tokens---
+'var$' Text
+' ' Text
+'=' Operator
+' ' Text
+'"' Literal.String
+'foo' Literal.String
+'"' Literal.String
+'\n' Text
diff --git a/tests/lexers/praat/test_string_escaped_quotes.txt b/tests/lexers/praat/test_string_escaped_quotes.txt
new file mode 100644
index 00000000..6867a312
--- /dev/null
+++ b/tests/lexers/praat/test_string_escaped_quotes.txt
@@ -0,0 +1,13 @@
+---input---
+"it said ""foo"""
+
+---tokens---
+'"' Literal.String
+'it said ' Literal.String
+'"' Literal.String
+'"' Literal.String
+'foo' Literal.String
+'"' Literal.String
+'"' Literal.String
+'"' Literal.String
+'\n' Text
diff --git a/tests/lexers/promql/test_expression_and_comment.txt b/tests/lexers/promql/test_expression_and_comment.txt
new file mode 100644
index 00000000..3ee2bfa8
--- /dev/null
+++ b/tests/lexers/promql/test_expression_and_comment.txt
@@ -0,0 +1,15 @@
+---input---
+go_gc_duration_seconds{instance="localhost:9090"} # single comment
+
+---tokens---
+'go_gc_duration_seconds' Name.Variable
+'{' Punctuation
+'instance' Name.Label
+'=' Operator
+'"' Punctuation
+'localhost:9090' Literal.String
+'"' Punctuation
+'}' Punctuation
+' ' Text.Whitespace
+'# single comment' Comment.Single
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_function_delta.txt b/tests/lexers/promql/test_function_delta.txt
new file mode 100644
index 00000000..8b1f9b08
--- /dev/null
+++ b/tests/lexers/promql/test_function_delta.txt
@@ -0,0 +1,19 @@
+---input---
+delta(cpu_temp_celsius{host="zeus"}[2h])
+
+---tokens---
+'delta' Keyword.Reserved
+'(' Operator
+'cpu_temp_celsius' Name.Variable
+'{' Punctuation
+'host' Name.Label
+'=' Operator
+'"' Punctuation
+'zeus' Literal.String
+'"' Punctuation
+'}' Punctuation
+'[' Punctuation
+'2h' Literal.String
+']' Punctuation
+')' Operator
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_function_multi_line.txt b/tests/lexers/promql/test_function_multi_line.txt
new file mode 100644
index 00000000..31664ccd
--- /dev/null
+++ b/tests/lexers/promql/test_function_multi_line.txt
@@ -0,0 +1,80 @@
+---input---
+label_replace(
+ sum by (instance) (
+ irate(node_disk_read_bytes_total[2m])
+ ) / 1024 / 1024,
+ "device",
+ 'disk',
+ "instance",
+ ".*"
+)
+
+---tokens---
+'label_replace' Keyword.Reserved
+'(' Operator
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'sum' Keyword
+' ' Text.Whitespace
+'by' Keyword
+' ' Text.Whitespace
+'(' Operator
+'instance' Name.Variable
+')' Operator
+' ' Text.Whitespace
+'(' Operator
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'irate' Keyword.Reserved
+'(' Operator
+'node_disk_read_bytes_total' Name.Variable
+'[' Punctuation
+'2m' Literal.String
+']' Punctuation
+')' Operator
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+')' Operator
+' ' Text.Whitespace
+'/' Operator
+' ' Text.Whitespace
+'1024' Literal.Number.Integer
+' ' Text.Whitespace
+'/' Operator
+' ' Text.Whitespace
+'1024' Literal.Number.Integer
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'"' Punctuation
+'device' Literal.String
+'"' Punctuation
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+"'" Punctuation
+'disk' Literal.String
+"'" Punctuation
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'"' Punctuation
+'instance' Literal.String
+'"' Punctuation
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'"' Punctuation
+'.*' Literal.String
+'"' Punctuation
+'\n' Text.Whitespace
+
+')' Operator
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_function_multi_line_with_offset.txt b/tests/lexers/promql/test_function_multi_line_with_offset.txt
new file mode 100644
index 00000000..a7462fb9
--- /dev/null
+++ b/tests/lexers/promql/test_function_multi_line_with_offset.txt
@@ -0,0 +1,87 @@
+---input---
+label_replace(
+ avg by(instance)
+ (irate(node_cpu_seconds_total{mode = "idle"}[5m] offset 3s)
+ ) * 100,
+ "device",
+ "cpu",
+ "instance",
+ ".*"
+)
+
+---tokens---
+'label_replace' Keyword.Reserved
+'(' Operator
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'avg' Keyword
+' ' Text.Whitespace
+'by' Keyword
+'(' Operator
+'instance' Name.Variable
+')' Operator
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'(' Operator
+'irate' Keyword.Reserved
+'(' Operator
+'node_cpu_seconds_total' Name.Variable
+'{' Punctuation
+'mode' Name.Label
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'"' Punctuation
+'idle' Literal.String
+'"' Punctuation
+'}' Punctuation
+'[' Punctuation
+'5m' Literal.String
+']' Punctuation
+' ' Text.Whitespace
+'offset' Keyword
+' ' Text.Whitespace
+'3s' Literal.String
+')' Operator
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+')' Operator
+' ' Text.Whitespace
+'*' Operator
+' ' Text.Whitespace
+'100' Literal.Number.Integer
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'"' Punctuation
+'device' Literal.String
+'"' Punctuation
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'"' Punctuation
+'cpu' Literal.String
+'"' Punctuation
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'"' Punctuation
+'instance' Literal.String
+'"' Punctuation
+',' Punctuation
+'\n' Text.Whitespace
+
+' ' Text.Whitespace
+'"' Punctuation
+'.*' Literal.String
+'"' Punctuation
+'\n' Text.Whitespace
+
+')' Operator
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_function_sum_with_args.txt b/tests/lexers/promql/test_function_sum_with_args.txt
new file mode 100644
index 00000000..3d677e14
--- /dev/null
+++ b/tests/lexers/promql/test_function_sum_with_args.txt
@@ -0,0 +1,19 @@
+---input---
+sum by (app, proc) (instance_memory_usage_bytes)
+
+---tokens---
+'sum' Keyword
+' ' Text.Whitespace
+'by' Keyword
+' ' Text.Whitespace
+'(' Operator
+'app' Name.Variable
+',' Punctuation
+' ' Text.Whitespace
+'proc' Name.Variable
+')' Operator
+' ' Text.Whitespace
+'(' Operator
+'instance_memory_usage_bytes' Name.Variable
+')' Operator
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_metric.txt b/tests/lexers/promql/test_metric.txt
new file mode 100644
index 00000000..e4889cf8
--- /dev/null
+++ b/tests/lexers/promql/test_metric.txt
@@ -0,0 +1,6 @@
+---input---
+go_gc_duration_seconds
+
+---tokens---
+'go_gc_duration_seconds' Name.Variable
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_metric_multiple_labels.txt b/tests/lexers/promql/test_metric_multiple_labels.txt
new file mode 100644
index 00000000..bcb0b646
--- /dev/null
+++ b/tests/lexers/promql/test_metric_multiple_labels.txt
@@ -0,0 +1,19 @@
+---input---
+go_gc_duration_seconds{instance="localhost:9090",job="alertmanager"}
+
+---tokens---
+'go_gc_duration_seconds' Name.Variable
+'{' Punctuation
+'instance' Name.Label
+'=' Operator
+'"' Punctuation
+'localhost:9090' Literal.String
+'"' Punctuation
+',' Punctuation
+'job' Name.Label
+'=' Operator
+'"' Punctuation
+'alertmanager' Literal.String
+'"' Punctuation
+'}' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_metric_multiple_labels_with_spaces.txt b/tests/lexers/promql/test_metric_multiple_labels_with_spaces.txt
new file mode 100644
index 00000000..3ca3d495
--- /dev/null
+++ b/tests/lexers/promql/test_metric_multiple_labels_with_spaces.txt
@@ -0,0 +1,22 @@
+---input---
+go_gc_duration_seconds{ instance="localhost:9090", job="alertmanager" }
+
+---tokens---
+'go_gc_duration_seconds' Name.Variable
+'{' Punctuation
+' ' Text.Whitespace
+'instance' Name.Label
+'=' Operator
+'"' Punctuation
+'localhost:9090' Literal.String
+'"' Punctuation
+',' Punctuation
+' ' Text.Whitespace
+'job' Name.Label
+'=' Operator
+'"' Punctuation
+'alertmanager' Literal.String
+'"' Punctuation
+' ' Text.Whitespace
+'}' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/promql/test_metric_one_label.txt b/tests/lexers/promql/test_metric_one_label.txt
new file mode 100644
index 00000000..8baeafb8
--- /dev/null
+++ b/tests/lexers/promql/test_metric_one_label.txt
@@ -0,0 +1,13 @@
+---input---
+go_gc_duration_seconds{instance="localhost:9090"}
+
+---tokens---
+'go_gc_duration_seconds' Name.Variable
+'{' Punctuation
+'instance' Name.Label
+'=' Operator
+'"' Punctuation
+'localhost:9090' Literal.String
+'"' Punctuation
+'}' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/properties/test_comments.txt b/tests/lexers/properties/test_comments.txt
new file mode 100644
index 00000000..b785cd2c
--- /dev/null
+++ b/tests/lexers/properties/test_comments.txt
@@ -0,0 +1,12 @@
+# Assures lines lead by either # or ! are recognized as a comment
+
+---input---
+! a comment
+# also a comment
+
+---tokens---
+'! a comment' Comment
+'\n' Text
+
+'# also a comment' Comment
+'\n' Text
diff --git a/tests/lexers/properties/test_escaped_space_in_key.txt b/tests/lexers/properties/test_escaped_space_in_key.txt
new file mode 100644
index 00000000..5b947817
--- /dev/null
+++ b/tests/lexers/properties/test_escaped_space_in_key.txt
@@ -0,0 +1,10 @@
+---input---
+key = value
+
+---tokens---
+'key' Name.Attribute
+' ' Text
+'=' Operator
+' ' Text
+'value' Literal.String
+'\n' Text
diff --git a/tests/lexers/properties/test_escaped_space_in_value.txt b/tests/lexers/properties/test_escaped_space_in_value.txt
new file mode 100644
index 00000000..5cf7eb56
--- /dev/null
+++ b/tests/lexers/properties/test_escaped_space_in_value.txt
@@ -0,0 +1,10 @@
+---input---
+key = doubleword\ value
+
+---tokens---
+'key' Name.Attribute
+' ' Text
+'=' Operator
+' ' Text
+'doubleword\\ value' Literal.String
+'\n' Text
diff --git a/tests/lexers/properties/test_just_key.txt b/tests/lexers/properties/test_just_key.txt
new file mode 100644
index 00000000..7af7379d
--- /dev/null
+++ b/tests/lexers/properties/test_just_key.txt
@@ -0,0 +1,6 @@
+---input---
+justkey
+
+---tokens---
+'justkey' Name.Attribute
+'\n' Text
diff --git a/tests/lexers/properties/test_just_key_with_space.txt b/tests/lexers/properties/test_just_key_with_space.txt
new file mode 100644
index 00000000..0e009501
--- /dev/null
+++ b/tests/lexers/properties/test_just_key_with_space.txt
@@ -0,0 +1,6 @@
+---input---
+just\ key
+
+---tokens---
+'just\\ key' Name.Attribute
+'\n' Text
diff --git a/tests/lexers/properties/test_leading_whitespace_comments.txt b/tests/lexers/properties/test_leading_whitespace_comments.txt
new file mode 100644
index 00000000..337734ca
--- /dev/null
+++ b/tests/lexers/properties/test_leading_whitespace_comments.txt
@@ -0,0 +1,6 @@
+---input---
+# comment
+
+---tokens---
+'# comment' Comment
+'\n' Text
diff --git a/tests/lexers/properties/test_space_delimited_kv_pair.txt b/tests/lexers/properties/test_space_delimited_kv_pair.txt
new file mode 100644
index 00000000..0f79fc37
--- /dev/null
+++ b/tests/lexers/properties/test_space_delimited_kv_pair.txt
@@ -0,0 +1,7 @@
+---input---
+key value
+
+---tokens---
+'key' Name.Attribute
+' ' Text
+'value\n' Literal.String
diff --git a/tests/lexers/python/test_fstring_01a.txt b/tests/lexers/python/test_fstring_01a.txt
new file mode 100644
index 00000000..406e2cc8
--- /dev/null
+++ b/tests/lexers/python/test_fstring_01a.txt
@@ -0,0 +1,25 @@
+---input---
+f'My name is {name}, my age next year is {age+1}, my anniversary is {anniversary:%A, %B %d, %Y}.'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'My name is ' Literal.String.Single
+'{' Literal.String.Interpol
+'name' Name
+'}' Literal.String.Interpol
+', my age next year is ' Literal.String.Single
+'{' Literal.String.Interpol
+'age' Name
+'+' Operator
+'1' Literal.Number.Integer
+'}' Literal.String.Interpol
+', my anniversary is ' Literal.String.Single
+'{' Literal.String.Interpol
+'anniversary' Name
+':' Literal.String.Interpol
+'%A, %B %d, %Y' Literal.String.Single
+'}' Literal.String.Interpol
+'.' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_01b.txt b/tests/lexers/python/test_fstring_01b.txt
new file mode 100644
index 00000000..278258e2
--- /dev/null
+++ b/tests/lexers/python/test_fstring_01b.txt
@@ -0,0 +1,25 @@
+---input---
+f"My name is {name}, my age next year is {age+1}, my anniversary is {anniversary:%A, %B %d, %Y}."
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'My name is ' Literal.String.Double
+'{' Literal.String.Interpol
+'name' Name
+'}' Literal.String.Interpol
+', my age next year is ' Literal.String.Double
+'{' Literal.String.Interpol
+'age' Name
+'+' Operator
+'1' Literal.Number.Integer
+'}' Literal.String.Interpol
+', my anniversary is ' Literal.String.Double
+'{' Literal.String.Interpol
+'anniversary' Name
+':' Literal.String.Interpol
+'%A, %B %d, %Y' Literal.String.Double
+'}' Literal.String.Interpol
+'.' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_02a.txt b/tests/lexers/python/test_fstring_02a.txt
new file mode 100644
index 00000000..359c13be
--- /dev/null
+++ b/tests/lexers/python/test_fstring_02a.txt
@@ -0,0 +1,13 @@
+---input---
+f'He said his name is {name!r}.'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'He said his name is ' Literal.String.Single
+'{' Literal.String.Interpol
+'name' Name
+'!r}' Literal.String.Interpol
+'.' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_02b.txt b/tests/lexers/python/test_fstring_02b.txt
new file mode 100644
index 00000000..abef1600
--- /dev/null
+++ b/tests/lexers/python/test_fstring_02b.txt
@@ -0,0 +1,13 @@
+---input---
+f"He said his name is {name!r}."
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'He said his name is ' Literal.String.Double
+'{' Literal.String.Interpol
+'name' Name
+'!r}' Literal.String.Interpol
+'.' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_03a.txt b/tests/lexers/python/test_fstring_03a.txt
new file mode 100644
index 00000000..3edee4cf
--- /dev/null
+++ b/tests/lexers/python/test_fstring_03a.txt
@@ -0,0 +1,14 @@
+---input---
+f'input={value:#06x}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'input=' Literal.String.Single
+'{' Literal.String.Interpol
+'value' Name
+':' Literal.String.Interpol
+'#06x' Literal.String.Single
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_03b.txt b/tests/lexers/python/test_fstring_03b.txt
new file mode 100644
index 00000000..f7723898
--- /dev/null
+++ b/tests/lexers/python/test_fstring_03b.txt
@@ -0,0 +1,14 @@
+---input---
+f"input={value:#06x}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'input=' Literal.String.Double
+'{' Literal.String.Interpol
+'value' Name
+':' Literal.String.Interpol
+'#06x' Literal.String.Double
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_04a.txt b/tests/lexers/python/test_fstring_04a.txt
new file mode 100644
index 00000000..85619ab8
--- /dev/null
+++ b/tests/lexers/python/test_fstring_04a.txt
@@ -0,0 +1,13 @@
+---input---
+f'{"quoted string"}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'"' Literal.String.Double
+'quoted string' Literal.String.Double
+'"' Literal.String.Double
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_04b.txt b/tests/lexers/python/test_fstring_04b.txt
new file mode 100644
index 00000000..bd6308c2
--- /dev/null
+++ b/tests/lexers/python/test_fstring_04b.txt
@@ -0,0 +1,13 @@
+---input---
+f"{'quoted string'}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+"'" Literal.String.Single
+'quoted string' Literal.String.Single
+"'" Literal.String.Single
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_05a.txt b/tests/lexers/python/test_fstring_05a.txt
new file mode 100644
index 00000000..18074571
--- /dev/null
+++ b/tests/lexers/python/test_fstring_05a.txt
@@ -0,0 +1,16 @@
+---input---
+f'{f"{inner}"}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'inner' Name
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_05b.txt b/tests/lexers/python/test_fstring_05b.txt
new file mode 100644
index 00000000..b0f3ebd8
--- /dev/null
+++ b/tests/lexers/python/test_fstring_05b.txt
@@ -0,0 +1,16 @@
+---input---
+f"{f'{inner}'}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'inner' Name
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_06a.txt b/tests/lexers/python/test_fstring_06a.txt
new file mode 100644
index 00000000..0ba0809a
--- /dev/null
+++ b/tests/lexers/python/test_fstring_06a.txt
@@ -0,0 +1,16 @@
+# SyntaxError: f-string expression part cannot include a backslash
+
+---input---
+f'{\'quoted string\'}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'\\' Error
+"'" Literal.String.Single
+'quoted string' Literal.String.Single
+"\\'" Literal.String.Escape
+'}' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_06b.txt b/tests/lexers/python/test_fstring_06b.txt
new file mode 100644
index 00000000..f98b3dc2
--- /dev/null
+++ b/tests/lexers/python/test_fstring_06b.txt
@@ -0,0 +1,16 @@
+# SyntaxError: f-string expression part cannot include a backslash
+
+---input---
+f"{\"quoted string\"}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'\\' Error
+'"' Literal.String.Double
+'quoted string' Literal.String.Double
+'\\"' Literal.String.Escape
+'}' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_07a.txt b/tests/lexers/python/test_fstring_07a.txt
new file mode 100644
index 00000000..63942200
--- /dev/null
+++ b/tests/lexers/python/test_fstring_07a.txt
@@ -0,0 +1,17 @@
+---input---
+f'{{ {4*10} }}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{{' Literal.String.Escape
+' ' Literal.String.Single
+'{' Literal.String.Interpol
+'4' Literal.Number.Integer
+'*' Operator
+'10' Literal.Number.Integer
+'}' Literal.String.Interpol
+' ' Literal.String.Single
+'}}' Literal.String.Escape
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_07b.txt b/tests/lexers/python/test_fstring_07b.txt
new file mode 100644
index 00000000..c7e87bc7
--- /dev/null
+++ b/tests/lexers/python/test_fstring_07b.txt
@@ -0,0 +1,17 @@
+---input---
+f"{{ {4*10} }}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{{' Literal.String.Escape
+' ' Literal.String.Double
+'{' Literal.String.Interpol
+'4' Literal.Number.Integer
+'*' Operator
+'10' Literal.Number.Integer
+'}' Literal.String.Interpol
+' ' Literal.String.Double
+'}}' Literal.String.Escape
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_08a.txt b/tests/lexers/python/test_fstring_08a.txt
new file mode 100644
index 00000000..61b6cc8f
--- /dev/null
+++ b/tests/lexers/python/test_fstring_08a.txt
@@ -0,0 +1,15 @@
+---input---
+f'{{{4*10}}}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{{' Literal.String.Escape
+'{' Literal.String.Interpol
+'4' Literal.Number.Integer
+'*' Operator
+'10' Literal.Number.Integer
+'}' Literal.String.Interpol
+'}}' Literal.String.Escape
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_08b.txt b/tests/lexers/python/test_fstring_08b.txt
new file mode 100644
index 00000000..41eb13a4
--- /dev/null
+++ b/tests/lexers/python/test_fstring_08b.txt
@@ -0,0 +1,15 @@
+---input---
+f"{{{4*10}}}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{{' Literal.String.Escape
+'{' Literal.String.Interpol
+'4' Literal.Number.Integer
+'*' Operator
+'10' Literal.Number.Integer
+'}' Literal.String.Interpol
+'}}' Literal.String.Escape
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_09a.txt b/tests/lexers/python/test_fstring_09a.txt
new file mode 100644
index 00000000..3dcec51e
--- /dev/null
+++ b/tests/lexers/python/test_fstring_09a.txt
@@ -0,0 +1,14 @@
+---input---
+fr'x={4*10}'
+
+---tokens---
+'fr' Literal.String.Affix
+"'" Literal.String.Single
+'x=' Literal.String.Single
+'{' Literal.String.Interpol
+'4' Literal.Number.Integer
+'*' Operator
+'10' Literal.Number.Integer
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_09b.txt b/tests/lexers/python/test_fstring_09b.txt
new file mode 100644
index 00000000..72932b29
--- /dev/null
+++ b/tests/lexers/python/test_fstring_09b.txt
@@ -0,0 +1,14 @@
+---input---
+fr"x={4*10}"
+
+---tokens---
+'fr' Literal.String.Affix
+'"' Literal.String.Double
+'x=' Literal.String.Double
+'{' Literal.String.Interpol
+'4' Literal.Number.Integer
+'*' Operator
+'10' Literal.Number.Integer
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_10a.txt b/tests/lexers/python/test_fstring_10a.txt
new file mode 100644
index 00000000..02fba2b4
--- /dev/null
+++ b/tests/lexers/python/test_fstring_10a.txt
@@ -0,0 +1,18 @@
+---input---
+f'abc {a["x"]} def'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'abc ' Literal.String.Single
+'{' Literal.String.Interpol
+'a' Name
+'[' Punctuation
+'"' Literal.String.Double
+'x' Literal.String.Double
+'"' Literal.String.Double
+']' Punctuation
+'}' Literal.String.Interpol
+' def' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_10b.txt b/tests/lexers/python/test_fstring_10b.txt
new file mode 100644
index 00000000..46150935
--- /dev/null
+++ b/tests/lexers/python/test_fstring_10b.txt
@@ -0,0 +1,18 @@
+---input---
+f"abc {a['x']} def"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'abc ' Literal.String.Double
+'{' Literal.String.Interpol
+'a' Name
+'[' Punctuation
+"'" Literal.String.Single
+'x' Literal.String.Single
+"'" Literal.String.Single
+']' Punctuation
+'}' Literal.String.Interpol
+' def' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_11a.txt b/tests/lexers/python/test_fstring_11a.txt
new file mode 100644
index 00000000..6116c960
--- /dev/null
+++ b/tests/lexers/python/test_fstring_11a.txt
@@ -0,0 +1,18 @@
+---input---
+f'''abc {a['x']} def'''
+
+---tokens---
+'f' Literal.String.Affix
+"'''" Literal.String.Single
+'abc ' Literal.String.Single
+'{' Literal.String.Interpol
+'a' Name
+'[' Punctuation
+"'" Literal.String.Single
+'x' Literal.String.Single
+"'" Literal.String.Single
+']' Punctuation
+'}' Literal.String.Interpol
+' def' Literal.String.Single
+"'''" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_11b.txt b/tests/lexers/python/test_fstring_11b.txt
new file mode 100644
index 00000000..77c3d6f8
--- /dev/null
+++ b/tests/lexers/python/test_fstring_11b.txt
@@ -0,0 +1,18 @@
+---input---
+f"""abc {a["x"]} def"""
+
+---tokens---
+'f' Literal.String.Affix
+'"""' Literal.String.Double
+'abc ' Literal.String.Double
+'{' Literal.String.Interpol
+'a' Name
+'[' Punctuation
+'"' Literal.String.Double
+'x' Literal.String.Double
+'"' Literal.String.Double
+']' Punctuation
+'}' Literal.String.Interpol
+' def' Literal.String.Double
+'"""' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_12a.txt b/tests/lexers/python/test_fstring_12a.txt
new file mode 100644
index 00000000..cc8eeab4
--- /dev/null
+++ b/tests/lexers/python/test_fstring_12a.txt
@@ -0,0 +1,16 @@
+---input---
+f'''{x
++1}'''
+
+---tokens---
+'f' Literal.String.Affix
+"'''" Literal.String.Single
+'{' Literal.String.Interpol
+'x' Name
+'\n' Text
+
+'+' Operator
+'1' Literal.Number.Integer
+'}' Literal.String.Interpol
+"'''" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_12b.txt b/tests/lexers/python/test_fstring_12b.txt
new file mode 100644
index 00000000..160b2d91
--- /dev/null
+++ b/tests/lexers/python/test_fstring_12b.txt
@@ -0,0 +1,16 @@
+---input---
+f"""{x
++1}"""
+
+---tokens---
+'f' Literal.String.Affix
+'"""' Literal.String.Double
+'{' Literal.String.Interpol
+'x' Name
+'\n' Text
+
+'+' Operator
+'1' Literal.Number.Integer
+'}' Literal.String.Interpol
+'"""' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_13a.txt b/tests/lexers/python/test_fstring_13a.txt
new file mode 100644
index 00000000..b556a5b6
--- /dev/null
+++ b/tests/lexers/python/test_fstring_13a.txt
@@ -0,0 +1,17 @@
+---input---
+f'''{d[0
+]}'''
+
+---tokens---
+'f' Literal.String.Affix
+"'''" Literal.String.Single
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+'0' Literal.Number.Integer
+'\n' Text
+
+']' Punctuation
+'}' Literal.String.Interpol
+"'''" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_13b.txt b/tests/lexers/python/test_fstring_13b.txt
new file mode 100644
index 00000000..1e9fd60d
--- /dev/null
+++ b/tests/lexers/python/test_fstring_13b.txt
@@ -0,0 +1,17 @@
+---input---
+f"""{d[0
+]}"""
+
+---tokens---
+'f' Literal.String.Affix
+'"""' Literal.String.Double
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+'0' Literal.Number.Integer
+'\n' Text
+
+']' Punctuation
+'}' Literal.String.Interpol
+'"""' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_14a.txt b/tests/lexers/python/test_fstring_14a.txt
new file mode 100644
index 00000000..de202a21
--- /dev/null
+++ b/tests/lexers/python/test_fstring_14a.txt
@@ -0,0 +1,20 @@
+---input---
+f'result: {value:{width}.{precision}}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'result: ' Literal.String.Single
+'{' Literal.String.Interpol
+'value' Name
+':' Literal.String.Interpol
+'{' Literal.String.Interpol
+'width' Name
+'}' Literal.String.Interpol
+'.' Literal.String.Single
+'{' Literal.String.Interpol
+'precision' Name
+'}' Literal.String.Interpol
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_14b.txt b/tests/lexers/python/test_fstring_14b.txt
new file mode 100644
index 00000000..83aaad5e
--- /dev/null
+++ b/tests/lexers/python/test_fstring_14b.txt
@@ -0,0 +1,20 @@
+---input---
+f"result: {value:{width}.{precision}}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'result: ' Literal.String.Double
+'{' Literal.String.Interpol
+'value' Name
+':' Literal.String.Interpol
+'{' Literal.String.Interpol
+'width' Name
+'}' Literal.String.Interpol
+'.' Literal.String.Double
+'{' Literal.String.Interpol
+'precision' Name
+'}' Literal.String.Interpol
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_15a.txt b/tests/lexers/python/test_fstring_15a.txt
new file mode 100644
index 00000000..635e8791
--- /dev/null
+++ b/tests/lexers/python/test_fstring_15a.txt
@@ -0,0 +1,42 @@
+---input---
+'a' 'b' f'{x}' '{c}' f'str<{y:^4}>' 'd' 'e'
+
+---tokens---
+"'" Literal.String.Single
+'a' Literal.String.Single
+"'" Literal.String.Single
+' ' Text
+"'" Literal.String.Single
+'b' Literal.String.Single
+"'" Literal.String.Single
+' ' Text
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'x' Name
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+' ' Text
+"'" Literal.String.Single
+'{c}' Literal.String.Interpol
+"'" Literal.String.Single
+' ' Text
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'str<' Literal.String.Single
+'{' Literal.String.Interpol
+'y' Name
+':' Literal.String.Interpol
+'^4' Literal.String.Single
+'}' Literal.String.Interpol
+'>' Literal.String.Single
+"'" Literal.String.Single
+' ' Text
+"'" Literal.String.Single
+'d' Literal.String.Single
+"'" Literal.String.Single
+' ' Text
+"'" Literal.String.Single
+'e' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_15b.txt b/tests/lexers/python/test_fstring_15b.txt
new file mode 100644
index 00000000..1a1ab251
--- /dev/null
+++ b/tests/lexers/python/test_fstring_15b.txt
@@ -0,0 +1,42 @@
+---input---
+"a" "b" f"{x}" "{c}" f"str<{y:^4}>" "d" "e"
+
+---tokens---
+'"' Literal.String.Double
+'a' Literal.String.Double
+'"' Literal.String.Double
+' ' Text
+'"' Literal.String.Double
+'b' Literal.String.Double
+'"' Literal.String.Double
+' ' Text
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'x' Name
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+' ' Text
+'"' Literal.String.Double
+'{c}' Literal.String.Interpol
+'"' Literal.String.Double
+' ' Text
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'str<' Literal.String.Double
+'{' Literal.String.Interpol
+'y' Name
+':' Literal.String.Interpol
+'^4' Literal.String.Double
+'}' Literal.String.Interpol
+'>' Literal.String.Double
+'"' Literal.String.Double
+' ' Text
+'"' Literal.String.Double
+'d' Literal.String.Double
+'"' Literal.String.Double
+' ' Text
+'"' Literal.String.Double
+'e' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_16a.txt b/tests/lexers/python/test_fstring_16a.txt
new file mode 100644
index 00000000..cbd4d14b
--- /dev/null
+++ b/tests/lexers/python/test_fstring_16a.txt
@@ -0,0 +1,18 @@
+---input---
+f'{i}:{d[i]}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'i' Name
+'}' Literal.String.Interpol
+':' Literal.String.Single
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+'i' Name
+']' Punctuation
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_16b.txt b/tests/lexers/python/test_fstring_16b.txt
new file mode 100644
index 00000000..8ce173f0
--- /dev/null
+++ b/tests/lexers/python/test_fstring_16b.txt
@@ -0,0 +1,18 @@
+---input---
+f"{i}:{d[i]}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'i' Name
+'}' Literal.String.Interpol
+':' Literal.String.Double
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+'i' Name
+']' Punctuation
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_17a.txt b/tests/lexers/python/test_fstring_17a.txt
new file mode 100644
index 00000000..a1b5b3a5
--- /dev/null
+++ b/tests/lexers/python/test_fstring_17a.txt
@@ -0,0 +1,14 @@
+---input---
+f'x = {x:+3}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'x = ' Literal.String.Single
+'{' Literal.String.Interpol
+'x' Name
+':' Literal.String.Interpol
+'+3' Literal.String.Single
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_17b.txt b/tests/lexers/python/test_fstring_17b.txt
new file mode 100644
index 00000000..5f34b174
--- /dev/null
+++ b/tests/lexers/python/test_fstring_17b.txt
@@ -0,0 +1,14 @@
+---input---
+f"x = {x:+3}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'x = ' Literal.String.Double
+'{' Literal.String.Interpol
+'x' Name
+':' Literal.String.Interpol
+'+3' Literal.String.Double
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_18a.txt b/tests/lexers/python/test_fstring_18a.txt
new file mode 100644
index 00000000..60d05123
--- /dev/null
+++ b/tests/lexers/python/test_fstring_18a.txt
@@ -0,0 +1,25 @@
+---input---
+f'{fn(lst,2)} {fn(lst,3)}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'fn' Name
+'(' Punctuation
+'lst' Name
+',' Punctuation
+'2' Literal.Number.Integer
+')' Punctuation
+'}' Literal.String.Interpol
+' ' Literal.String.Single
+'{' Literal.String.Interpol
+'fn' Name
+'(' Punctuation
+'lst' Name
+',' Punctuation
+'3' Literal.Number.Integer
+')' Punctuation
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_18b.txt b/tests/lexers/python/test_fstring_18b.txt
new file mode 100644
index 00000000..a189d00a
--- /dev/null
+++ b/tests/lexers/python/test_fstring_18b.txt
@@ -0,0 +1,25 @@
+---input---
+f"{fn(lst,2)} {fn(lst,3)}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'fn' Name
+'(' Punctuation
+'lst' Name
+',' Punctuation
+'2' Literal.Number.Integer
+')' Punctuation
+'}' Literal.String.Interpol
+' ' Literal.String.Double
+'{' Literal.String.Interpol
+'fn' Name
+'(' Punctuation
+'lst' Name
+',' Punctuation
+'3' Literal.Number.Integer
+')' Punctuation
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_19a.txt b/tests/lexers/python/test_fstring_19a.txt
new file mode 100644
index 00000000..6e412a38
--- /dev/null
+++ b/tests/lexers/python/test_fstring_19a.txt
@@ -0,0 +1,46 @@
+---input---
+f'mapping is { {a:b for (a, b) in ((1, 2), (3, 4))} }'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'mapping is ' Literal.String.Single
+'{' Literal.String.Interpol
+' ' Text
+'{' Punctuation
+'a' Name
+':' Punctuation
+'b' Name
+' ' Text
+'for' Keyword
+' ' Text
+'(' Punctuation
+'a' Name
+',' Punctuation
+' ' Text
+'b' Name
+')' Punctuation
+' ' Text
+'in' Operator.Word
+' ' Text
+'(' Punctuation
+'(' Punctuation
+'1' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'2' Literal.Number.Integer
+')' Punctuation
+',' Punctuation
+' ' Text
+'(' Punctuation
+'3' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'4' Literal.Number.Integer
+')' Punctuation
+')' Punctuation
+'}' Punctuation
+' ' Text
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_19b.txt b/tests/lexers/python/test_fstring_19b.txt
new file mode 100644
index 00000000..5c9d6f73
--- /dev/null
+++ b/tests/lexers/python/test_fstring_19b.txt
@@ -0,0 +1,46 @@
+---input---
+f"mapping is { {a:b for (a, b) in ((1, 2), (3, 4))} }"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'mapping is ' Literal.String.Double
+'{' Literal.String.Interpol
+' ' Text
+'{' Punctuation
+'a' Name
+':' Punctuation
+'b' Name
+' ' Text
+'for' Keyword
+' ' Text
+'(' Punctuation
+'a' Name
+',' Punctuation
+' ' Text
+'b' Name
+')' Punctuation
+' ' Text
+'in' Operator.Word
+' ' Text
+'(' Punctuation
+'(' Punctuation
+'1' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'2' Literal.Number.Integer
+')' Punctuation
+',' Punctuation
+' ' Text
+'(' Punctuation
+'3' Literal.Number.Integer
+',' Punctuation
+' ' Text
+'4' Literal.Number.Integer
+')' Punctuation
+')' Punctuation
+'}' Punctuation
+' ' Text
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_20a.txt b/tests/lexers/python/test_fstring_20a.txt
new file mode 100644
index 00000000..a72a7a44
--- /dev/null
+++ b/tests/lexers/python/test_fstring_20a.txt
@@ -0,0 +1,17 @@
+---input---
+f'a={d["a"]}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'a=' Literal.String.Single
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+'"' Literal.String.Double
+'a' Literal.String.Double
+'"' Literal.String.Double
+']' Punctuation
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_20b.txt b/tests/lexers/python/test_fstring_20b.txt
new file mode 100644
index 00000000..556f7de8
--- /dev/null
+++ b/tests/lexers/python/test_fstring_20b.txt
@@ -0,0 +1,17 @@
+---input---
+f"a={d['a']}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'a=' Literal.String.Double
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+"'" Literal.String.Single
+'a' Literal.String.Single
+"'" Literal.String.Single
+']' Punctuation
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_21a.txt b/tests/lexers/python/test_fstring_21a.txt
new file mode 100644
index 00000000..1a591003
--- /dev/null
+++ b/tests/lexers/python/test_fstring_21a.txt
@@ -0,0 +1,15 @@
+---input---
+f'a={d[a]}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'a=' Literal.String.Single
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+'a' Name
+']' Punctuation
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_21b.txt b/tests/lexers/python/test_fstring_21b.txt
new file mode 100644
index 00000000..7e3ed08a
--- /dev/null
+++ b/tests/lexers/python/test_fstring_21b.txt
@@ -0,0 +1,15 @@
+---input---
+f"a={d[a]}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'a=' Literal.String.Double
+'{' Literal.String.Interpol
+'d' Name
+'[' Punctuation
+'a' Name
+']' Punctuation
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_22a.txt b/tests/lexers/python/test_fstring_22a.txt
new file mode 100644
index 00000000..6a3b792f
--- /dev/null
+++ b/tests/lexers/python/test_fstring_22a.txt
@@ -0,0 +1,14 @@
+---input---
+fr'{header}:\s+'
+
+---tokens---
+'fr' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'header' Name
+'}' Literal.String.Interpol
+':' Literal.String.Single
+'\\' Literal.String.Single
+'s+' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_22b.txt b/tests/lexers/python/test_fstring_22b.txt
new file mode 100644
index 00000000..75a08aac
--- /dev/null
+++ b/tests/lexers/python/test_fstring_22b.txt
@@ -0,0 +1,14 @@
+---input---
+fr"{header}:\s+"
+
+---tokens---
+'fr' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'header' Name
+'}' Literal.String.Interpol
+':' Literal.String.Double
+'\\' Literal.String.Double
+'s+' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_23a.txt b/tests/lexers/python/test_fstring_23a.txt
new file mode 100644
index 00000000..ae944ed2
--- /dev/null
+++ b/tests/lexers/python/test_fstring_23a.txt
@@ -0,0 +1,11 @@
+---input---
+f'{a!r}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'a' Name
+'!r}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_23b.txt b/tests/lexers/python/test_fstring_23b.txt
new file mode 100644
index 00000000..8038c8b4
--- /dev/null
+++ b/tests/lexers/python/test_fstring_23b.txt
@@ -0,0 +1,11 @@
+---input---
+f"{a!r}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'a' Name
+'!r}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_24a.txt b/tests/lexers/python/test_fstring_24a.txt
new file mode 100644
index 00000000..9dd1e94e
--- /dev/null
+++ b/tests/lexers/python/test_fstring_24a.txt
@@ -0,0 +1,23 @@
+---input---
+f'{(lambda x: x*2)(3)}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'(' Punctuation
+'lambda' Keyword
+' ' Text
+'x' Name
+':' Punctuation
+' ' Text
+'x' Name
+'*' Operator
+'2' Literal.Number.Integer
+')' Punctuation
+'(' Punctuation
+'3' Literal.Number.Integer
+')' Punctuation
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_24b.txt b/tests/lexers/python/test_fstring_24b.txt
new file mode 100644
index 00000000..0ef51a52
--- /dev/null
+++ b/tests/lexers/python/test_fstring_24b.txt
@@ -0,0 +1,23 @@
+---input---
+f"{(lambda x: x*2)(3)}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'(' Punctuation
+'lambda' Keyword
+' ' Text
+'x' Name
+':' Punctuation
+' ' Text
+'x' Name
+'*' Operator
+'2' Literal.Number.Integer
+')' Punctuation
+'(' Punctuation
+'3' Literal.Number.Integer
+')' Punctuation
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_25a.txt b/tests/lexers/python/test_fstring_25a.txt
new file mode 100644
index 00000000..e42453e5
--- /dev/null
+++ b/tests/lexers/python/test_fstring_25a.txt
@@ -0,0 +1,24 @@
+---input---
+extra = f'{extra},waiters:{len(self._waiters)}'
+
+---tokens---
+'extra' Name
+' ' Text
+'=' Operator
+' ' Text
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'extra' Name
+'}' Literal.String.Interpol
+',waiters:' Literal.String.Single
+'{' Literal.String.Interpol
+'len' Name.Builtin
+'(' Punctuation
+'self' Name.Builtin.Pseudo
+'.' Operator
+'_waiters' Name
+')' Punctuation
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_25b.txt b/tests/lexers/python/test_fstring_25b.txt
new file mode 100644
index 00000000..2ba4b721
--- /dev/null
+++ b/tests/lexers/python/test_fstring_25b.txt
@@ -0,0 +1,24 @@
+---input---
+extra = f"{extra},waiters:{len(self._waiters)}"
+
+---tokens---
+'extra' Name
+' ' Text
+'=' Operator
+' ' Text
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'extra' Name
+'}' Literal.String.Interpol
+',waiters:' Literal.String.Double
+'{' Literal.String.Interpol
+'len' Name.Builtin
+'(' Punctuation
+'self' Name.Builtin.Pseudo
+'.' Operator
+'_waiters' Name
+')' Punctuation
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_26a.txt b/tests/lexers/python/test_fstring_26a.txt
new file mode 100644
index 00000000..c644d447
--- /dev/null
+++ b/tests/lexers/python/test_fstring_26a.txt
@@ -0,0 +1,20 @@
+---input---
+message.append(f" [line {lineno:2d}]")
+
+---tokens---
+'message' Name
+'.' Operator
+'append' Name
+'(' Punctuation
+'f' Literal.String.Affix
+'"' Literal.String.Double
+' [line ' Literal.String.Double
+'{' Literal.String.Interpol
+'lineno' Name
+':' Literal.String.Interpol
+'2d' Literal.String.Double
+'}' Literal.String.Interpol
+']' Literal.String.Double
+'"' Literal.String.Double
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_26b.txt b/tests/lexers/python/test_fstring_26b.txt
new file mode 100644
index 00000000..73964104
--- /dev/null
+++ b/tests/lexers/python/test_fstring_26b.txt
@@ -0,0 +1,20 @@
+---input---
+message.append(f' [line {lineno:2d}]')
+
+---tokens---
+'message' Name
+'.' Operator
+'append' Name
+'(' Punctuation
+'f' Literal.String.Affix
+"'" Literal.String.Single
+' [line ' Literal.String.Single
+'{' Literal.String.Interpol
+'lineno' Name
+':' Literal.String.Interpol
+'2d' Literal.String.Single
+'}' Literal.String.Interpol
+']' Literal.String.Single
+"'" Literal.String.Single
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_27a.txt b/tests/lexers/python/test_fstring_27a.txt
new file mode 100644
index 00000000..cd8bdbe2
--- /dev/null
+++ b/tests/lexers/python/test_fstring_27a.txt
@@ -0,0 +1,11 @@
+---input---
+f"{foo=}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'foo' Name
+'=}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_27b.txt b/tests/lexers/python/test_fstring_27b.txt
new file mode 100644
index 00000000..7773ab30
--- /dev/null
+++ b/tests/lexers/python/test_fstring_27b.txt
@@ -0,0 +1,11 @@
+---input---
+f'{foo=}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'foo' Name
+'=}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_28a.txt b/tests/lexers/python/test_fstring_28a.txt
new file mode 100644
index 00000000..6aca3216
--- /dev/null
+++ b/tests/lexers/python/test_fstring_28a.txt
@@ -0,0 +1,11 @@
+---input---
+f'{foo=!s}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'foo' Name
+'=!s}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_28b.txt b/tests/lexers/python/test_fstring_28b.txt
new file mode 100644
index 00000000..14183eba
--- /dev/null
+++ b/tests/lexers/python/test_fstring_28b.txt
@@ -0,0 +1,11 @@
+---input---
+f"{foo=!s}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'foo' Name
+'=!s}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_29a.txt b/tests/lexers/python/test_fstring_29a.txt
new file mode 100644
index 00000000..81715064
--- /dev/null
+++ b/tests/lexers/python/test_fstring_29a.txt
@@ -0,0 +1,15 @@
+---input---
+f"{math.pi=!f:.2f}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'math' Name
+'.' Operator
+'pi' Name
+'=!f:' Literal.String.Interpol
+'.2f' Literal.String.Double
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_29b.txt b/tests/lexers/python/test_fstring_29b.txt
new file mode 100644
index 00000000..de5b5083
--- /dev/null
+++ b/tests/lexers/python/test_fstring_29b.txt
@@ -0,0 +1,15 @@
+---input---
+f'{math.pi=!f:.2f}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'math' Name
+'.' Operator
+'pi' Name
+'=!f:' Literal.String.Interpol
+'.2f' Literal.String.Single
+'}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_30a.txt b/tests/lexers/python/test_fstring_30a.txt
new file mode 100644
index 00000000..f295cfe4
--- /dev/null
+++ b/tests/lexers/python/test_fstring_30a.txt
@@ -0,0 +1,16 @@
+---input---
+f"{ chr(65) =}"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+' ' Text
+'chr' Name.Builtin
+'(' Punctuation
+'65' Literal.Number.Integer
+')' Punctuation
+' ' Text
+'=}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_30b.txt b/tests/lexers/python/test_fstring_30b.txt
new file mode 100644
index 00000000..10a2af8e
--- /dev/null
+++ b/tests/lexers/python/test_fstring_30b.txt
@@ -0,0 +1,16 @@
+---input---
+f'{ chr(65) =}'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+' ' Text
+'chr' Name.Builtin
+'(' Punctuation
+'65' Literal.Number.Integer
+')' Punctuation
+' ' Text
+'=}' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_31a.txt b/tests/lexers/python/test_fstring_31a.txt
new file mode 100644
index 00000000..eac01679
--- /dev/null
+++ b/tests/lexers/python/test_fstring_31a.txt
@@ -0,0 +1,15 @@
+---input---
+f"{chr(65) = }"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'chr' Name.Builtin
+'(' Punctuation
+'65' Literal.Number.Integer
+')' Punctuation
+' ' Text
+'= }' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_31b.txt b/tests/lexers/python/test_fstring_31b.txt
new file mode 100644
index 00000000..40702daa
--- /dev/null
+++ b/tests/lexers/python/test_fstring_31b.txt
@@ -0,0 +1,15 @@
+---input---
+f'{chr(65) = }'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'chr' Name.Builtin
+'(' Punctuation
+'65' Literal.Number.Integer
+')' Punctuation
+' ' Text
+'= }' Literal.String.Interpol
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_32a.txt b/tests/lexers/python/test_fstring_32a.txt
new file mode 100644
index 00000000..666134c6
--- /dev/null
+++ b/tests/lexers/python/test_fstring_32a.txt
@@ -0,0 +1,15 @@
+---input---
+f'*{n=:30}*'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'*' Literal.String.Single
+'{' Literal.String.Interpol
+'n' Name
+'=:' Literal.String.Interpol
+'30' Literal.String.Single
+'}' Literal.String.Interpol
+'*' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_32b.txt b/tests/lexers/python/test_fstring_32b.txt
new file mode 100644
index 00000000..9e087252
--- /dev/null
+++ b/tests/lexers/python/test_fstring_32b.txt
@@ -0,0 +1,15 @@
+---input---
+f"*{n=:30}*"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'*' Literal.String.Double
+'{' Literal.String.Interpol
+'n' Name
+'=:' Literal.String.Interpol
+'30' Literal.String.Double
+'}' Literal.String.Interpol
+'*' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_33a.txt b/tests/lexers/python/test_fstring_33a.txt
new file mode 100644
index 00000000..232491b2
--- /dev/null
+++ b/tests/lexers/python/test_fstring_33a.txt
@@ -0,0 +1,15 @@
+---input---
+f'*{n=!r:30}*'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'*' Literal.String.Single
+'{' Literal.String.Interpol
+'n' Name
+'=!r:' Literal.String.Interpol
+'30' Literal.String.Single
+'}' Literal.String.Interpol
+'*' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_33b.txt b/tests/lexers/python/test_fstring_33b.txt
new file mode 100644
index 00000000..1481e811
--- /dev/null
+++ b/tests/lexers/python/test_fstring_33b.txt
@@ -0,0 +1,15 @@
+---input---
+f"*{n=!r:30}*"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'*' Literal.String.Double
+'{' Literal.String.Interpol
+'n' Name
+'=!r:' Literal.String.Interpol
+'30' Literal.String.Double
+'}' Literal.String.Interpol
+'*' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_34a.txt b/tests/lexers/python/test_fstring_34a.txt
new file mode 100644
index 00000000..6bd510a9
--- /dev/null
+++ b/tests/lexers/python/test_fstring_34a.txt
@@ -0,0 +1,20 @@
+---input---
+f"*{f'{n=}':30}*"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'*' Literal.String.Double
+'{' Literal.String.Interpol
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'{' Literal.String.Interpol
+'n' Name
+'=}' Literal.String.Interpol
+"'" Literal.String.Single
+':' Literal.String.Interpol
+'30' Literal.String.Double
+'}' Literal.String.Interpol
+'*' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_34b.txt b/tests/lexers/python/test_fstring_34b.txt
new file mode 100644
index 00000000..4cb7e631
--- /dev/null
+++ b/tests/lexers/python/test_fstring_34b.txt
@@ -0,0 +1,20 @@
+---input---
+f'*{f"{n=}":30}*'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'*' Literal.String.Single
+'{' Literal.String.Interpol
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'{' Literal.String.Interpol
+'n' Name
+'=}' Literal.String.Interpol
+'"' Literal.String.Double
+':' Literal.String.Interpol
+'30' Literal.String.Single
+'}' Literal.String.Interpol
+'*' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_35a.txt b/tests/lexers/python/test_fstring_35a.txt
new file mode 100644
index 00000000..f09edc18
--- /dev/null
+++ b/tests/lexers/python/test_fstring_35a.txt
@@ -0,0 +1,15 @@
+---input---
+f'*{n=:+<30}*'
+
+---tokens---
+'f' Literal.String.Affix
+"'" Literal.String.Single
+'*' Literal.String.Single
+'{' Literal.String.Interpol
+'n' Name
+'=:' Literal.String.Interpol
+'+<30' Literal.String.Single
+'}' Literal.String.Interpol
+'*' Literal.String.Single
+"'" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_35b.txt b/tests/lexers/python/test_fstring_35b.txt
new file mode 100644
index 00000000..794f7b69
--- /dev/null
+++ b/tests/lexers/python/test_fstring_35b.txt
@@ -0,0 +1,15 @@
+---input---
+f"*{n=:+<30}*"
+
+---tokens---
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'*' Literal.String.Double
+'{' Literal.String.Interpol
+'n' Name
+'=:' Literal.String.Interpol
+'+<30' Literal.String.Double
+'}' Literal.String.Interpol
+'*' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_36a.txt b/tests/lexers/python/test_fstring_36a.txt
new file mode 100644
index 00000000..4b6ab230
--- /dev/null
+++ b/tests/lexers/python/test_fstring_36a.txt
@@ -0,0 +1,16 @@
+---input---
+
+f'''{foo
+ = !s:20}'''
+
+---tokens---
+'f' Literal.String.Affix
+"'''" Literal.String.Single
+'{' Literal.String.Interpol
+'foo' Name
+'\n ' Text
+'= !s:' Literal.String.Interpol
+'20' Literal.String.Single
+'}' Literal.String.Interpol
+"'''" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/python/test_fstring_36b.txt b/tests/lexers/python/test_fstring_36b.txt
new file mode 100644
index 00000000..abc9cf3e
--- /dev/null
+++ b/tests/lexers/python/test_fstring_36b.txt
@@ -0,0 +1,16 @@
+---input---
+
+f"""{foo
+ = !s:20}"""
+
+---tokens---
+'f' Literal.String.Affix
+'"""' Literal.String.Double
+'{' Literal.String.Interpol
+'foo' Name
+'\n ' Text
+'= !s:' Literal.String.Interpol
+'20' Literal.String.Double
+'}' Literal.String.Interpol
+'"""' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_needs_name.txt b/tests/lexers/python/test_needs_name.txt
new file mode 100644
index 00000000..60c782c5
--- /dev/null
+++ b/tests/lexers/python/test_needs_name.txt
@@ -0,0 +1,55 @@
+# Tests that '@' is recognized as an Operator
+
+---input---
+S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)
+
+---tokens---
+'S' Name
+' ' Text
+'=' Operator
+' ' Text
+'(' Punctuation
+'H' Name
+' ' Text
+'@' Operator
+' ' Text
+'beta' Name
+' ' Text
+'-' Operator
+' ' Text
+'r' Name
+')' Punctuation
+'.' Operator
+'T' Name
+' ' Text
+'@' Operator
+' ' Text
+'inv' Name
+'(' Punctuation
+'H' Name
+' ' Text
+'@' Operator
+' ' Text
+'V' Name
+' ' Text
+'@' Operator
+' ' Text
+'H' Name
+'.' Operator
+'T' Name
+')' Punctuation
+' ' Text
+'@' Operator
+' ' Text
+'(' Punctuation
+'H' Name
+' ' Text
+'@' Operator
+' ' Text
+'beta' Name
+' ' Text
+'-' Operator
+' ' Text
+'r' Name
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/python/test_pep_515.txt b/tests/lexers/python/test_pep_515.txt
new file mode 100644
index 00000000..a5f2f358
--- /dev/null
+++ b/tests/lexers/python/test_pep_515.txt
@@ -0,0 +1,28 @@
+# Tests that the lexer can parse numeric literals with underscores
+
+---input---
+1_000_000
+1_000.000_001
+1_000e1_000j
+0xCAFE_F00D
+0b_0011_1111_0100_1110
+0o_777_123
+
+---tokens---
+'1_000_000' Literal.Number.Integer
+'\n' Text
+
+'1_000.000_001' Literal.Number.Float
+'\n' Text
+
+'1_000e1_000j' Literal.Number.Float
+'\n' Text
+
+'0xCAFE_F00D' Literal.Number.Hex
+'\n' Text
+
+'0b_0011_1111_0100_1110' Literal.Number.Bin
+'\n' Text
+
+'0o_777_123' Literal.Number.Oct
+'\n' Text
diff --git a/tests/lexers/python/test_raw_fstring.txt b/tests/lexers/python/test_raw_fstring.txt
new file mode 100644
index 00000000..0dd51c1e
--- /dev/null
+++ b/tests/lexers/python/test_raw_fstring.txt
@@ -0,0 +1,46 @@
+# Tests that the lexer can parse raw f-strings
+
+---input---
+rf"m_\nu = x"
+
+f"m_\nu = {x}"
+
+rf"m_{{\nu}} = {x}"
+
+---tokens---
+'rf' Literal.String.Affix
+'"' Literal.String.Double
+'m_' Literal.String.Double
+'\\' Literal.String.Double
+'nu = x' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
+
+'\n' Text
+
+'f' Literal.String.Affix
+'"' Literal.String.Double
+'m_' Literal.String.Double
+'\\n' Literal.String.Escape
+'u = ' Literal.String.Double
+'{' Literal.String.Interpol
+'x' Name
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
+
+'\n' Text
+
+'rf' Literal.String.Affix
+'"' Literal.String.Double
+'m_' Literal.String.Double
+'{{' Literal.String.Escape
+'\\' Literal.String.Double
+'nu' Literal.String.Double
+'}}' Literal.String.Escape
+' = ' Literal.String.Double
+'{' Literal.String.Interpol
+'x' Name
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/python/test_walrus_operator.txt b/tests/lexers/python/test_walrus_operator.txt
new file mode 100644
index 00000000..05935ec9
--- /dev/null
+++ b/tests/lexers/python/test_walrus_operator.txt
@@ -0,0 +1,21 @@
+# Tests that ':=' is recognized as an Operator
+
+---input---
+if (a := 2) > 4:
+
+---tokens---
+'if' Keyword
+' ' Text
+'(' Punctuation
+'a' Name
+' ' Text
+':=' Operator
+' ' Text
+'2' Literal.Number.Integer
+')' Punctuation
+' ' Text
+'>' Operator
+' ' Text
+'4' Literal.Number.Integer
+':' Punctuation
+'\n' Text
diff --git a/tests/lexers/python2/test_cls_builtin.txt b/tests/lexers/python2/test_cls_builtin.txt
new file mode 100644
index 00000000..333845d8
--- /dev/null
+++ b/tests/lexers/python2/test_cls_builtin.txt
@@ -0,0 +1,34 @@
+# Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo
+
+---input---
+class TestClass():
+ @classmethod
+ def hello(cls):
+ pass
+
+---tokens---
+'class' Keyword
+' ' Text
+'TestClass' Name.Class
+'(' Punctuation
+')' Punctuation
+':' Punctuation
+'\n' Text
+
+' ' Text
+'@classmethod' Name.Decorator
+'\n' Text
+
+' ' Text
+'def' Keyword
+' ' Text
+'hello' Name.Function
+'(' Punctuation
+'cls' Name.Builtin.Pseudo
+')' Punctuation
+':' Punctuation
+'\n' Text
+
+' ' Text
+'pass' Keyword
+'\n' Text
diff --git a/tests/lexers/qbasic/test_keywords_with_dollar.txt b/tests/lexers/qbasic/test_keywords_with_dollar.txt
new file mode 100644
index 00000000..21c4fbae
--- /dev/null
+++ b/tests/lexers/qbasic/test_keywords_with_dollar.txt
@@ -0,0 +1,22 @@
+---input---
+DIM x
+x = RIGHT$("abc", 1)
+
+---tokens---
+'DIM' Keyword.Declaration
+' ' Text.Whitespace
+'x' Name.Variable.Global
+'\n' Text
+
+'x' Name.Variable.Global
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'RIGHT$' Keyword.Reserved
+'(' Punctuation
+'"abc"' Literal.String.Double
+',' Punctuation
+' ' Text.Whitespace
+'1' Literal.Number.Integer.Long
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/r/test_call.txt b/tests/lexers/r/test_call.txt
new file mode 100644
index 00000000..1e803132
--- /dev/null
+++ b/tests/lexers/r/test_call.txt
@@ -0,0 +1,12 @@
+---input---
+f(1, a)
+
+---tokens---
+'f' Name.Function
+'(' Punctuation
+'1' Literal.Number
+',' Punctuation
+' ' Text
+'a' Name
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/r/test_custom_operator.txt b/tests/lexers/r/test_custom_operator.txt
new file mode 100644
index 00000000..a8261a1e
--- /dev/null
+++ b/tests/lexers/r/test_custom_operator.txt
@@ -0,0 +1,10 @@
+---input---
+7 % and % 8
+
+---tokens---
+'7' Literal.Number
+' ' Text
+'% and %' Operator
+' ' Text
+'8' Literal.Number
+'\n' Text
diff --git a/tests/lexers/r/test_dot_indexing.txt b/tests/lexers/r/test_dot_indexing.txt
new file mode 100644
index 00000000..62944968
--- /dev/null
+++ b/tests/lexers/r/test_dot_indexing.txt
@@ -0,0 +1,9 @@
+---input---
+.[1]
+
+---tokens---
+'.' Name
+'[' Punctuation
+'1' Literal.Number
+']' Punctuation
+'\n' Text
diff --git a/tests/lexers/r/test_dot_name.txt b/tests/lexers/r/test_dot_name.txt
new file mode 100644
index 00000000..513abef0
--- /dev/null
+++ b/tests/lexers/r/test_dot_name.txt
@@ -0,0 +1,10 @@
+---input---
+. <- 1
+
+---tokens---
+'.' Name
+' ' Text
+'<-' Operator
+' ' Text
+'1' Literal.Number
+'\n' Text
diff --git a/tests/lexers/r/test_indexing.txt b/tests/lexers/r/test_indexing.txt
new file mode 100644
index 00000000..396a2d38
--- /dev/null
+++ b/tests/lexers/r/test_indexing.txt
@@ -0,0 +1,9 @@
+---input---
+a[1]
+
+---tokens---
+'a' Name
+'[' Punctuation
+'1' Literal.Number
+']' Punctuation
+'\n' Text
diff --git a/tests/lexers/r/test_name1.txt b/tests/lexers/r/test_name1.txt
new file mode 100644
index 00000000..d685ee7e
--- /dev/null
+++ b/tests/lexers/r/test_name1.txt
@@ -0,0 +1,6 @@
+---input---
+._a_2.c
+
+---tokens---
+'._a_2.c' Name
+'\n' Text
diff --git a/tests/lexers/r/test_name2.txt b/tests/lexers/r/test_name2.txt
new file mode 100644
index 00000000..557650d8
--- /dev/null
+++ b/tests/lexers/r/test_name2.txt
@@ -0,0 +1,8 @@
+# Invalid names are valid if backticks are used
+
+---input---
+`.1 blah`
+
+---tokens---
+'`.1 blah`' Name
+'\n' Text
diff --git a/tests/lexers/r/test_name3.txt b/tests/lexers/r/test_name3.txt
new file mode 100644
index 00000000..52e359b8
--- /dev/null
+++ b/tests/lexers/r/test_name3.txt
@@ -0,0 +1,8 @@
+# Internal backticks can be escaped
+
+---input---
+`.1 \` blah`
+
+---tokens---
+'`.1 \\` blah`' Name
+'\n' Text
diff --git a/tests/lexers/ruby/test_escaped_bracestring.txt b/tests/lexers/ruby/test_escaped_bracestring.txt
new file mode 100644
index 00000000..7bcf845e
--- /dev/null
+++ b/tests/lexers/ruby/test_escaped_bracestring.txt
@@ -0,0 +1,19 @@
+---input---
+str.gsub(%r{\\\\}, "/")
+
+---tokens---
+'str' Name
+'.' Operator
+'gsub' Name
+'(' Punctuation
+'%r{' Literal.String.Regex
+'\\\\' Literal.String.Regex
+'\\\\' Literal.String.Regex
+'}' Literal.String.Regex
+',' Punctuation
+' ' Text
+'"' Literal.String.Double
+'/' Literal.String.Double
+'"' Literal.String.Double
+')' Punctuation
+'\n' Text
diff --git a/tests/lexers/ruby/test_interpolation_nested_curly.txt b/tests/lexers/ruby/test_interpolation_nested_curly.txt
new file mode 100644
index 00000000..eceb04af
--- /dev/null
+++ b/tests/lexers/ruby/test_interpolation_nested_curly.txt
@@ -0,0 +1,56 @@
+---input---
+"A#{ (3..5).group_by { |x| x/2}.map do |k,v| "#{k}" end.join }" + "Z"
+
+---tokens---
+'"' Literal.String.Double
+'A' Literal.String.Double
+'#{' Literal.String.Interpol
+' ' Text
+'(' Punctuation
+'3' Literal.Number.Integer
+'..' Operator
+'5' Literal.Number.Integer
+')' Punctuation
+'.' Operator
+'group_by' Name
+' ' Text
+'{' Literal.String.Interpol
+' ' Text
+'|' Operator
+'x' Name
+'|' Operator
+' ' Text
+'x' Name
+'/' Operator
+'2' Literal.Number.Integer
+'}' Literal.String.Interpol
+'.' Operator
+'map' Name
+' ' Text
+'do' Keyword
+' ' Text
+'|' Operator
+'k' Name
+',' Punctuation
+'v' Name
+'|' Operator
+' ' Text
+'"' Literal.String.Double
+'#{' Literal.String.Interpol
+'k' Name
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+' ' Text
+'end' Keyword
+'.' Operator
+'join' Name
+' ' Text
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+' ' Text
+'+' Operator
+' ' Text
+'"' Literal.String.Double
+'Z' Literal.String.Double
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/ruby/test_operator_methods.txt b/tests/lexers/ruby/test_operator_methods.txt
new file mode 100644
index 00000000..f444ee4f
--- /dev/null
+++ b/tests/lexers/ruby/test_operator_methods.txt
@@ -0,0 +1,9 @@
+---input---
+x.==4
+
+---tokens---
+'x' Name
+'.' Operator
+'==' Name.Operator
+'4' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/ruby/test_range_syntax1.txt b/tests/lexers/ruby/test_range_syntax1.txt
new file mode 100644
index 00000000..df7c40cb
--- /dev/null
+++ b/tests/lexers/ruby/test_range_syntax1.txt
@@ -0,0 +1,8 @@
+---input---
+1..3
+
+---tokens---
+'1' Literal.Number.Integer
+'..' Operator
+'3' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/ruby/test_range_syntax2.txt b/tests/lexers/ruby/test_range_syntax2.txt
new file mode 100644
index 00000000..72d47e85
--- /dev/null
+++ b/tests/lexers/ruby/test_range_syntax2.txt
@@ -0,0 +1,8 @@
+---input---
+1...3
+
+---tokens---
+'1' Literal.Number.Integer
+'...' Operator
+'3' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/ruby/test_range_syntax3.txt b/tests/lexers/ruby/test_range_syntax3.txt
new file mode 100644
index 00000000..a72f8380
--- /dev/null
+++ b/tests/lexers/ruby/test_range_syntax3.txt
@@ -0,0 +1,10 @@
+---input---
+1 .. 3
+
+---tokens---
+'1' Literal.Number.Integer
+' ' Text
+'..' Operator
+' ' Text
+'3' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/shell/test_array_nums.txt b/tests/lexers/shell/test_array_nums.txt
new file mode 100644
index 00000000..a7474a06
--- /dev/null
+++ b/tests/lexers/shell/test_array_nums.txt
@@ -0,0 +1,14 @@
+---input---
+a=(1 2 3)
+
+---tokens---
+'a' Name.Variable
+'=' Operator
+'(' Operator
+'1' Literal.Number
+' ' Text
+'2' Literal.Number
+' ' Text
+'3' Literal.Number
+')' Operator
+'\n' Text
diff --git a/tests/lexers/shell/test_curly_no_escape_and_quotes.txt b/tests/lexers/shell/test_curly_no_escape_and_quotes.txt
new file mode 100644
index 00000000..607b95eb
--- /dev/null
+++ b/tests/lexers/shell/test_curly_no_escape_and_quotes.txt
@@ -0,0 +1,15 @@
+---input---
+echo "${a//["b"]/}"
+
+---tokens---
+'echo' Name.Builtin
+' ' Text
+'"' Literal.String.Double
+'${' Literal.String.Interpol
+'a' Name.Variable
+'//[' Punctuation
+'"b"' Literal.String.Double
+']/' Punctuation
+'}' Literal.String.Interpol
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/shell/test_curly_with_escape.txt b/tests/lexers/shell/test_curly_with_escape.txt
new file mode 100644
index 00000000..b6577876
--- /dev/null
+++ b/tests/lexers/shell/test_curly_with_escape.txt
@@ -0,0 +1,13 @@
+---input---
+echo ${a//[\"]/}
+
+---tokens---
+'echo' Name.Builtin
+' ' Text
+'${' Literal.String.Interpol
+'a' Name.Variable
+'//[' Punctuation
+'\\"' Literal.String.Escape
+']/' Punctuation
+'}' Literal.String.Interpol
+'\n' Text
diff --git a/tests/lexers/shell/test_end_of_line_nums.txt b/tests/lexers/shell/test_end_of_line_nums.txt
new file mode 100644
index 00000000..8164daef
--- /dev/null
+++ b/tests/lexers/shell/test_end_of_line_nums.txt
@@ -0,0 +1,15 @@
+---input---
+a=1
+b=2 # comment
+
+---tokens---
+'a' Name.Variable
+'=' Operator
+'1' Literal.Number
+'\n' Text
+
+'b' Name.Variable
+'=' Operator
+'2' Literal.Number
+' ' Text
+'# comment\n' Comment.Single
diff --git a/tests/lexers/shell/test_parsed_single.txt b/tests/lexers/shell/test_parsed_single.txt
new file mode 100644
index 00000000..c11b3c76
--- /dev/null
+++ b/tests/lexers/shell/test_parsed_single.txt
@@ -0,0 +1,8 @@
+---input---
+a=$'abc\''
+
+---tokens---
+'a' Name.Variable
+'=' Operator
+"$'abc\\''" Literal.String.Single
+'\n' Text
diff --git a/tests/lexers/shell/test_short_variable_names.txt b/tests/lexers/shell/test_short_variable_names.txt
new file mode 100644
index 00000000..5adde793
--- /dev/null
+++ b/tests/lexers/shell/test_short_variable_names.txt
@@ -0,0 +1,26 @@
+---input---
+x="$"
+y="$_"
+z="$abc"
+
+---tokens---
+'x' Name.Variable
+'=' Operator
+'"' Literal.String.Double
+'$' Text
+'"' Literal.String.Double
+'\n' Text
+
+'y' Name.Variable
+'=' Operator
+'"' Literal.String.Double
+'$_' Name.Variable
+'"' Literal.String.Double
+'\n' Text
+
+'z' Name.Variable
+'=' Operator
+'"' Literal.String.Double
+'$abc' Name.Variable
+'"' Literal.String.Double
+'\n' Text
diff --git a/tests/lexers/shexc/test_prefixed_name_starting_with_number.txt b/tests/lexers/shexc/test_prefixed_name_starting_with_number.txt
new file mode 100644
index 00000000..ca1c2937
--- /dev/null
+++ b/tests/lexers/shexc/test_prefixed_name_starting_with_number.txt
@@ -0,0 +1,8 @@
+---input---
+alice:6f6e4241-75a2-4780-9b2a-40da53082e54
+
+---tokens---
+'alice' Name.Namespace
+':' Punctuation
+'6f6e4241-75a2-4780-9b2a-40da53082e54' Name.Tag
+'\n' Text
diff --git a/tests/lexers/smarty/test_nested_curly.txt b/tests/lexers/smarty/test_nested_curly.txt
new file mode 100644
index 00000000..493aa498
--- /dev/null
+++ b/tests/lexers/smarty/test_nested_curly.txt
@@ -0,0 +1,18 @@
+---input---
+{templateFunction param={anotherFunction} param2=$something}
+
+---tokens---
+'{' Comment.Preproc
+'templateFunction' Name.Function
+' ' Text
+'param' Name.Attribute
+'=' Operator
+'{' Comment.Preproc
+'anotherFunction' Name.Attribute
+'}' Comment.Preproc
+' ' Text
+'param2' Name.Attribute
+'=' Operator
+'$something' Name.Variable
+'}' Comment.Preproc
+'\n' Other
diff --git a/tests/lexers/systemverilog/test_basic.txt b/tests/lexers/systemverilog/test_basic.txt
new file mode 100644
index 00000000..9307538d
--- /dev/null
+++ b/tests/lexers/systemverilog/test_basic.txt
@@ -0,0 +1,181 @@
+# Examine tokens emitted by the SV lexer for a trivial module.
+# Not intended to stress any particular corner of the language.
+
+---input---
+// Adder flops the sum of its inputs
+module Adder #(
+ parameter int N = 42
+) (
+ output logic [N-1:0] y,
+ output logic co,
+
+ input logic [N-1:0] a,
+ input logic [N-1:0] b,
+ input logic ci,
+
+ input logic clk
+);
+ always_ff @(posedge clk) begin
+ {co, y} <= a + b + ci;
+ end
+endmodule : Adder
+
+---tokens---
+'// Adder flops the sum of its inputs\n' Comment.Single
+
+'module' Keyword
+' ' Text
+'Adder' Name
+' ' Text
+'#' Punctuation
+'(' Punctuation
+'\n' Text
+
+' ' Text
+'parameter' Keyword
+' ' Text
+'int' Keyword.Type
+' ' Text
+'N' Name
+' ' Text
+'=' Operator
+' ' Text
+'42' Literal.Number.Integer
+'\n' Text
+
+')' Punctuation
+' ' Text
+'(' Punctuation
+'\n' Text
+
+' ' Text
+'output' Keyword
+' ' Text
+'logic' Keyword.Type
+' ' Text
+'[' Punctuation
+'N' Name
+'-' Operator
+'1' Literal.Number.Integer
+':' Operator
+'0' Literal.Number.Integer
+']' Punctuation
+' ' Text
+'y' Name
+',' Punctuation
+'\n' Text
+
+' ' Text
+'output' Keyword
+' ' Text
+'logic' Keyword.Type
+' ' Text
+'co' Name
+',' Punctuation
+'\n' Text
+
+'\n' Text
+
+' ' Text
+'input' Keyword
+' ' Text
+'logic' Keyword.Type
+' ' Text
+'[' Punctuation
+'N' Name
+'-' Operator
+'1' Literal.Number.Integer
+':' Operator
+'0' Literal.Number.Integer
+']' Punctuation
+' ' Text
+'a' Name
+',' Punctuation
+'\n' Text
+
+' ' Text
+'input' Keyword
+' ' Text
+'logic' Keyword.Type
+' ' Text
+'[' Punctuation
+'N' Name
+'-' Operator
+'1' Literal.Number.Integer
+':' Operator
+'0' Literal.Number.Integer
+']' Punctuation
+' ' Text
+'b' Name
+',' Punctuation
+'\n' Text
+
+' ' Text
+'input' Keyword
+' ' Text
+'logic' Keyword.Type
+' ' Text
+'ci' Name
+',' Punctuation
+'\n' Text
+
+'\n' Text
+
+' ' Text
+'input' Keyword
+' ' Text
+'logic' Keyword.Type
+' ' Text
+'clk' Name
+'\n' Text
+
+')' Punctuation
+';' Punctuation
+'\n' Text
+
+' ' Text
+'always_ff' Keyword
+' ' Text
+'@' Punctuation
+'(' Punctuation
+'posedge' Keyword
+' ' Text
+'clk' Name
+')' Punctuation
+' ' Text
+'begin' Keyword
+'\n' Text
+
+' ' Text
+'{' Punctuation
+'co' Name
+',' Punctuation
+' ' Text
+'y' Name
+'}' Punctuation
+' ' Text
+'<' Operator
+'=' Operator
+' ' Text
+'a' Name
+' ' Text
+'+' Operator
+' ' Text
+'b' Name
+' ' Text
+'+' Operator
+' ' Text
+'ci' Name
+';' Punctuation
+'\n' Text
+
+' ' Text
+'end' Keyword
+'\n' Text
+
+'endmodule' Keyword
+' ' Text
+':' Operator
+' ' Text
+'Adder' Name
+'\n' Text
diff --git a/tests/lexers/systemverilog/test_classes.txt b/tests/lexers/systemverilog/test_classes.txt
new file mode 100644
index 00000000..fce86a72
--- /dev/null
+++ b/tests/lexers/systemverilog/test_classes.txt
@@ -0,0 +1,95 @@
+# Most of the interesting types of class declarations
+
+---input---
+class Foo;
+endclass
+
+class Bar;
+endclass : Bar
+
+class Fiz extends Buz;
+endclass : Fiz
+
+class Free #(parameter type T = byte) extends Beer #(T);
+endclass : Free
+
+---tokens---
+'class' Keyword.Declaration
+' ' Text
+'Foo' Name.Class
+';' Punctuation
+'\n' Text
+
+'endclass' Keyword.Declaration
+'\n' Text
+
+'\n' Text
+
+'class' Keyword.Declaration
+' ' Text
+'Bar' Name.Class
+';' Punctuation
+'\n' Text
+
+'endclass' Keyword.Declaration
+' ' Text
+':' Punctuation
+' ' Text
+'Bar' Name.Class
+'\n' Text
+
+'\n' Text
+
+'class' Keyword.Declaration
+' ' Text
+'Fiz' Name.Class
+' ' Text
+'extends' Keyword.Declaration
+' ' Text
+'Buz' Name.Class
+';' Punctuation
+'\n' Text
+
+'endclass' Keyword.Declaration
+' ' Text
+':' Punctuation
+' ' Text
+'Fiz' Name.Class
+'\n' Text
+
+'\n' Text
+
+'class' Keyword.Declaration
+' ' Text
+'Free' Name.Class
+' ' Text
+'#' Punctuation
+'(' Punctuation
+'parameter' Keyword
+' ' Text
+'type' Keyword.Type
+' ' Text
+'T' Name
+' ' Text
+'=' Operator
+' ' Text
+'byte' Keyword.Type
+')' Punctuation
+' ' Text
+'extends' Keyword.Declaration
+' ' Text
+'Beer' Name.Class
+' ' Text
+'#' Punctuation
+'(' Punctuation
+'T' Name
+')' Punctuation
+';' Punctuation
+'\n' Text
+
+'endclass' Keyword.Declaration
+' ' Text
+':' Punctuation
+' ' Text
+'Free' Name.Class
+'\n' Text
diff --git a/tests/lexers/systemverilog/test_numbers.txt b/tests/lexers/systemverilog/test_numbers.txt
new file mode 100644
index 00000000..fa0ee5f6
--- /dev/null
+++ b/tests/lexers/systemverilog/test_numbers.txt
@@ -0,0 +1,168 @@
+# Believe it or not, SystemVerilog supports spaces before and after the base
+# specifier (ie 'b, 'd, 'h). See IEEE 1800-2017 Section 5.7.1 for examples.
+
+---input---
+8'b10101010
+8 'b10101010
+8'b 10101010
+8'sb10101010
+8'Sb10101010
+8'B10101010
+8'b1010_1010
+8'b10xXzZ?10
+
+24'o01234567
+24 'o01234567
+24'o 01234567
+24'so01234567
+24'So01234567
+24'O01234567
+24'o0123_4567
+24'o01xXzZ?7
+
+32'd27182818
+32 'd27182818
+32'd 27182818
+32'sd27182818
+32'Sd27182818
+32'D27182818
+32'd2718_2818
+32'd27xXzZ?8
+
+32'hdeadbeef
+32 'hdeadbeef
+32'h deadbeef
+32'shdeadbeef
+32'Shdeadbeef
+32'Hdeadbeef
+32'hdead_beef
+32'hdexXzZ?f
+
+'0 '1 'x 'X 'z 'Z
+
+42 1234_5678
+
+---tokens---
+"8'b10101010" Literal.Number.Bin
+'\n' Text
+
+"8 'b10101010" Literal.Number.Bin
+'\n' Text
+
+"8'b 10101010" Literal.Number.Bin
+'\n' Text
+
+"8'sb10101010" Literal.Number.Bin
+'\n' Text
+
+"8'Sb10101010" Literal.Number.Bin
+'\n' Text
+
+"8'B10101010" Literal.Number.Bin
+'\n' Text
+
+"8'b1010_1010" Literal.Number.Bin
+'\n' Text
+
+"8'b10xXzZ?10" Literal.Number.Bin
+'\n' Text
+
+'\n' Text
+
+"24'o01234567" Literal.Number.Oct
+'\n' Text
+
+"24 'o01234567" Literal.Number.Oct
+'\n' Text
+
+"24'o 01234567" Literal.Number.Oct
+'\n' Text
+
+"24'so01234567" Literal.Number.Oct
+'\n' Text
+
+"24'So01234567" Literal.Number.Oct
+'\n' Text
+
+"24'O01234567" Literal.Number.Oct
+'\n' Text
+
+"24'o0123_4567" Literal.Number.Oct
+'\n' Text
+
+"24'o01xXzZ?7" Literal.Number.Oct
+'\n' Text
+
+'\n' Text
+
+"32'd27182818" Literal.Number.Integer
+'\n' Text
+
+"32 'd27182818" Literal.Number.Integer
+'\n' Text
+
+"32'd 27182818" Literal.Number.Integer
+'\n' Text
+
+"32'sd27182818" Literal.Number.Integer
+'\n' Text
+
+"32'Sd27182818" Literal.Number.Integer
+'\n' Text
+
+"32'D27182818" Literal.Number.Integer
+'\n' Text
+
+"32'd2718_2818" Literal.Number.Integer
+'\n' Text
+
+"32'd27xXzZ?8" Literal.Number.Integer
+'\n' Text
+
+'\n' Text
+
+"32'hdeadbeef" Literal.Number.Hex
+'\n' Text
+
+"32 'hdeadbeef" Literal.Number.Hex
+'\n' Text
+
+"32'h deadbeef" Literal.Number.Hex
+'\n' Text
+
+"32'shdeadbeef" Literal.Number.Hex
+'\n' Text
+
+"32'Shdeadbeef" Literal.Number.Hex
+'\n' Text
+
+"32'Hdeadbeef" Literal.Number.Hex
+'\n' Text
+
+"32'hdead_beef" Literal.Number.Hex
+'\n' Text
+
+"32'hdexXzZ?f" Literal.Number.Hex
+'\n' Text
+
+'\n' Text
+
+"'0" Literal.Number
+' ' Text
+"'1" Literal.Number
+' ' Text
+"'x" Literal.Number
+' ' Text
+"'X" Literal.Number
+' ' Text
+"'z" Literal.Number
+' ' Text
+"'Z" Literal.Number
+'\n' Text
+
+'\n' Text
+
+'42' Literal.Number.Integer
+' ' Text
+'1234_5678' Literal.Number.Integer
+'\n' Text
diff --git a/tests/lexers/systemverilog/test_operators.txt b/tests/lexers/systemverilog/test_operators.txt
new file mode 100644
index 00000000..5c8dc2e1
--- /dev/null
+++ b/tests/lexers/systemverilog/test_operators.txt
@@ -0,0 +1,213 @@
+# See 1800-2017 Table 11-2: Operator Precedence and Associativity
+# Note that the duplicates (unary/binary) have been removed,
+# ie '+', '-', '&', '|', '^', '~^', '^~'
+# Note: This is a inconsistent mix of operator and punctuation
+# Note: Operators would ideally be represented as one token: ':' ':' -> '::', '~' '&' -> '~&'
+
+---input---
+() [] :: .
++ - ! ~ & ~& | ~| ^ ~^ ^~ ++ --
+**
+* / %
+<< >> <<< >>>
+< <= > >= inside dist
+== != === !== ==? !=?
+&&
+||
+?:
+-> <->
+= += -= *= /= %= &= ^= |= <<= >>= <<<= >>>= := :/ <=
+{} {{}}
+
+---tokens---
+'(' Punctuation
+')' Punctuation
+' ' Text
+'[' Punctuation
+']' Punctuation
+' ' Text
+':' Operator
+':' Operator
+' ' Text
+'.' Punctuation
+'\n' Text
+
+'+' Operator
+' ' Text
+'-' Operator
+' ' Text
+'!' Operator
+' ' Text
+'~' Operator
+' ' Text
+'&' Operator
+' ' Text
+'~' Operator
+'&' Operator
+' ' Text
+'|' Operator
+' ' Text
+'~' Operator
+'|' Operator
+' ' Text
+'^' Operator
+' ' Text
+'~' Operator
+'^' Operator
+' ' Text
+'^' Operator
+'~' Operator
+' ' Text
+'+' Operator
+'+' Operator
+' ' Text
+'-' Operator
+'-' Operator
+'\n' Text
+
+'*' Operator
+'*' Operator
+'\n' Text
+
+'*' Operator
+' ' Text
+'/' Operator
+' ' Text
+'%' Operator
+'\n' Text
+
+'<' Operator
+'<' Operator
+' ' Text
+'>' Operator
+'>' Operator
+' ' Text
+'<' Operator
+'<' Operator
+'<' Operator
+' ' Text
+'>' Operator
+'>' Operator
+'>' Operator
+'\n' Text
+
+'<' Operator
+' ' Text
+'<' Operator
+'=' Operator
+' ' Text
+'>' Operator
+' ' Text
+'>' Operator
+'=' Operator
+' ' Text
+'inside' Operator.Word
+' ' Text
+'dist' Operator.Word
+'\n' Text
+
+'=' Operator
+'=' Operator
+' ' Text
+'!' Operator
+'=' Operator
+' ' Text
+'=' Operator
+'=' Operator
+'=' Operator
+' ' Text
+'!' Operator
+'=' Operator
+'=' Operator
+' ' Text
+'=' Operator
+'=' Operator
+'?' Operator
+' ' Text
+'!' Operator
+'=' Operator
+'?' Operator
+'\n' Text
+
+'&' Operator
+'&' Operator
+'\n' Text
+
+'|' Operator
+'|' Operator
+'\n' Text
+
+'?' Operator
+':' Operator
+'\n' Text
+
+'-' Operator
+'>' Operator
+' ' Text
+'<' Operator
+'-' Operator
+'>' Operator
+'\n' Text
+
+'=' Operator
+' ' Text
+'+' Operator
+'=' Operator
+' ' Text
+'-' Operator
+'=' Operator
+' ' Text
+'*' Operator
+'=' Operator
+' ' Text
+'/' Operator
+'=' Operator
+' ' Text
+'%' Operator
+'=' Operator
+' ' Text
+'&' Operator
+'=' Operator
+' ' Text
+'^' Operator
+'=' Operator
+' ' Text
+'|' Operator
+'=' Operator
+' ' Text
+'<' Operator
+'<' Operator
+'=' Operator
+' ' Text
+'>' Operator
+'>' Operator
+'=' Operator
+' ' Text
+'<' Operator
+'<' Operator
+'<' Operator
+'=' Operator
+' ' Text
+'>' Operator
+'>' Operator
+'>' Operator
+'=' Operator
+' ' Text
+':' Operator
+'=' Operator
+' ' Text
+':' Operator
+'/' Operator
+' ' Text
+'<' Operator
+'=' Operator
+'\n' Text
+
+'{' Punctuation
+'}' Punctuation
+' ' Text
+'{' Punctuation
+'{' Punctuation
+'}' Punctuation
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/turtle/test_prefixed_name_starting_with_number.txt b/tests/lexers/turtle/test_prefixed_name_starting_with_number.txt
new file mode 100644
index 00000000..ca1c2937
--- /dev/null
+++ b/tests/lexers/turtle/test_prefixed_name_starting_with_number.txt
@@ -0,0 +1,8 @@
+---input---
+alice:6f6e4241-75a2-4780-9b2a-40da53082e54
+
+---tokens---
+'alice' Name.Namespace
+':' Punctuation
+'6f6e4241-75a2-4780-9b2a-40da53082e54' Name.Tag
+'\n' Text
diff --git a/tests/lexers/typescript/test_function_definition.txt b/tests/lexers/typescript/test_function_definition.txt
new file mode 100644
index 00000000..a2e10a8b
--- /dev/null
+++ b/tests/lexers/typescript/test_function_definition.txt
@@ -0,0 +1,18 @@
+---input---
+async function main() {
+}
+
+---tokens---
+'async' Keyword
+' ' Text
+'function' Keyword.Declaration
+' ' Text
+'main' Name.Other
+'(' Punctuation
+')' Punctuation
+' ' Text
+'{' Punctuation
+'\n' Text
+
+'}' Punctuation
+'\n' Text
diff --git a/tests/lexers/usd/test_attribute.txt b/tests/lexers/usd/test_attribute.txt
new file mode 100644
index 00000000..74e6789f
--- /dev/null
+++ b/tests/lexers/usd/test_attribute.txt
@@ -0,0 +1,174 @@
+---input---
+double foo = 8.0
+
+custom double foo = 8.0
+
+uniform double foo = 8.0
+
+custom uniform double foo = 8.0
+
+custom double foo_underscore_name = 8.0
+
+double[] foo_underscore_name = [10.1, 12.0, 13]
+
+double[] primvar:foo_thing = [10.1, 12.0, 13]
+
+custom int[] foo = [8, 10, 14]
+
+custom int[] foo.timeSamples = {
+ 1: [8, 0, 14],
+ 2: [-8, 0, 14],
+}
+
+---tokens---
+'double' Keyword.Type
+' ' Text.Whitespace
+'foo' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'8.0' Literal.Number
+'\n\n' Text.Whitespace
+
+'custom' Keyword.Token
+' ' Text.Whitespace
+'double' Keyword.Type
+' ' Text.Whitespace
+'foo' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'8.0' Literal.Number
+'\n\n' Text.Whitespace
+
+'uniform' Keyword.Token
+' ' Text.Whitespace
+'double' Keyword.Type
+' ' Text.Whitespace
+'foo' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'8.0' Literal.Number
+'\n\n' Text.Whitespace
+
+'custom' Keyword.Token
+' ' Text.Whitespace
+'uniform' Keyword.Token
+' ' Text.Whitespace
+'double' Keyword.Type
+' ' Text.Whitespace
+'foo' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'8.0' Literal.Number
+'\n\n' Text.Whitespace
+
+'custom' Keyword.Token
+' ' Text.Whitespace
+'double' Keyword.Type
+' ' Text.Whitespace
+'foo_underscore_name' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'8.0' Literal.Number
+'\n\n' Text.Whitespace
+
+'double[]' Keyword.Type
+' ' Text.Whitespace
+'foo_underscore_name' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'[' Punctuation
+'10.1' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'12.0' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'13' Literal.Number
+']' Punctuation
+'\n\n' Text.Whitespace
+
+'double[]' Keyword.Type
+' ' Text.Whitespace
+'primvar:foo_thing' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'[' Punctuation
+'10.1' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'12.0' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'13' Literal.Number
+']' Punctuation
+'\n\n' Text.Whitespace
+
+'custom' Keyword.Token
+' ' Text.Whitespace
+'int[]' Keyword.Type
+' ' Text.Whitespace
+'foo' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'[' Punctuation
+'8' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'10' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'14' Literal.Number
+']' Punctuation
+'\n\n' Text.Whitespace
+
+'custom' Keyword.Token
+' ' Text.Whitespace
+'int[]' Keyword.Type
+' ' Text.Whitespace
+'foo' Name.Attribute
+'.' Text
+'timeSamples' Name.Keyword.Tokens
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'{' Punctuation
+'\n ' Text.Whitespace
+'1' Literal.Number
+':' Punctuation
+' ' Text.Whitespace
+'[' Punctuation
+'8' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'0' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'14' Literal.Number
+']' Punctuation
+',' Punctuation
+'\n ' Text.Whitespace
+'2' Literal.Number
+':' Punctuation
+' ' Text.Whitespace
+'[' Punctuation
+'-8' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'0' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'14' Literal.Number
+']' Punctuation
+',' Punctuation
+'\n' Text.Whitespace
+
+'}' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_composition_arcs.txt b/tests/lexers/usd/test_composition_arcs.txt
new file mode 100644
index 00000000..0270c939
--- /dev/null
+++ b/tests/lexers/usd/test_composition_arcs.txt
@@ -0,0 +1,101 @@
+---input---
+def Xform "BottleMedical" (
+ kind = "prop"
+ payload = @./BottleMedical_payload.usd@</BottleMedical>
+ variants = {
+ string modelingVariant = "LiquidBottleLg"
+ string shadingComplexity = "full"
+ }
+ add variantSets = ["modelingVariant", "shadingComplexity"]
+)
+{
+ variantSet "modelingVariant" = {
+ "ALL_VARIANTS" {
+ }
+ }
+}
+
+---tokens---
+'def' Keyword.Tokens
+' ' Text.Whitespace
+'Xform' Text
+' ' Text.Whitespace
+'"BottleMedical"' Literal.String
+' ' Text.Whitespace
+'(' Punctuation
+'\n ' Text.Whitespace
+'kind' Name.Builtins
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'"prop"' Literal.String
+'\n ' Text.Whitespace
+'payload' Keyword.Tokens
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'@./BottleMedical_payload.usd@' Literal.String.Interpol
+'</BottleMedical>' Name.Namespace
+'\n ' Text.Whitespace
+'variants' Keyword.Tokens
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'{' Punctuation
+'\n ' Text.Whitespace
+'string' Keyword.Type
+' ' Text.Whitespace
+'modelingVariant' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'"LiquidBottleLg"' Literal.String
+'\n ' Text.Whitespace
+'string' Keyword.Type
+' ' Text.Whitespace
+'shadingComplexity' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'"full"' Literal.String
+'\n ' Text.Whitespace
+'}' Punctuation
+'\n ' Text.Whitespace
+'add' Keyword.Type
+' ' Text.Whitespace
+'variantSets' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'[' Punctuation
+'"modelingVariant"' Literal.String
+',' Punctuation
+' ' Text.Whitespace
+'"shadingComplexity"' Literal.String
+']' Punctuation
+'\n' Text.Whitespace
+
+')' Punctuation
+'\n' Text.Whitespace
+
+'{' Punctuation
+'\n ' Text.Whitespace
+'variantSet' Keyword.Tokens
+' ' Text.Whitespace
+'"modelingVariant"' Literal.String
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'{' Punctuation
+'\n ' Text.Whitespace
+'"ALL_VARIANTS"' Literal.String
+' ' Text.Whitespace
+'{' Punctuation
+'\n ' Text.Whitespace
+'}' Punctuation
+'\n ' Text.Whitespace
+'}' Punctuation
+'\n' Text.Whitespace
+
+'}' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_metadata.txt b/tests/lexers/usd/test_metadata.txt
new file mode 100644
index 00000000..edc46147
--- /dev/null
+++ b/tests/lexers/usd/test_metadata.txt
@@ -0,0 +1,36 @@
+# Make sure metadata [the stuff inside ()s] don't match as Attributes.
+
+---input---
+float[] primvars:skel:jointWeights = [1] (
+ elementSize = 1
+ interpolation = "constant"
+)
+
+---tokens---
+'float[]' Keyword.Type
+' ' Text.Whitespace
+'primvars:skel:jointWeights' Name.Attribute
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'[' Punctuation
+'1' Literal.Number
+']' Punctuation
+' ' Text.Whitespace
+'(' Punctuation
+'\n ' Text.Whitespace
+'elementSize' Name.Builtins
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'1' Literal.Number
+'\n ' Text.Whitespace
+'interpolation' Name.Builtins
+' ' Text.Whitespace
+'=' Operator
+' ' Text.Whitespace
+'"constant"' Literal.String
+'\n' Text.Whitespace
+
+')' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_numbers.txt b/tests/lexers/usd/test_numbers.txt
new file mode 100644
index 00000000..a4e0f888
--- /dev/null
+++ b/tests/lexers/usd/test_numbers.txt
@@ -0,0 +1,21 @@
+---input---
+8 8.0123312132, -4 -14.123 1e10 0.1e10 10.123e+10 0.123e-14
+
+---tokens---
+'8' Literal.Number
+' ' Text.Whitespace
+'8.0123312132' Literal.Number
+',' Punctuation
+' ' Text.Whitespace
+'-4' Literal.Number
+' ' Text.Whitespace
+'-14.123' Literal.Number
+' ' Text.Whitespace
+'1e10' Literal.Number
+' ' Text.Whitespace
+'0.1e10' Literal.Number
+' ' Text.Whitespace
+'10.123e+10' Literal.Number
+' ' Text.Whitespace
+'0.123e-14' Literal.Number
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_outer_match_at_sign.txt b/tests/lexers/usd/test_outer_match_at_sign.txt
new file mode 100644
index 00000000..de0bc722
--- /dev/null
+++ b/tests/lexers/usd/test_outer_match_at_sign.txt
@@ -0,0 +1,14 @@
+# Make sure that text between located between quotes and @@s are not matched.
+
+---input---
+@firststring@ something else @secondstring@
+
+---tokens---
+'@firststring@' Literal.String.Interpol
+' ' Text.Whitespace
+'something' Text
+' ' Text.Whitespace
+'else' Text
+' ' Text.Whitespace
+'@secondstring@' Literal.String.Interpol
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_outer_match_double.txt b/tests/lexers/usd/test_outer_match_double.txt
new file mode 100644
index 00000000..773da130
--- /dev/null
+++ b/tests/lexers/usd/test_outer_match_double.txt
@@ -0,0 +1,12 @@
+---input---
+'firststring' something else 'secondstring'
+
+---tokens---
+"'firststring'" Literal.String
+' ' Text.Whitespace
+'something' Text
+' ' Text.Whitespace
+'else' Text
+' ' Text.Whitespace
+"'secondstring'" Literal.String
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_outer_match_single.txt b/tests/lexers/usd/test_outer_match_single.txt
new file mode 100644
index 00000000..773da130
--- /dev/null
+++ b/tests/lexers/usd/test_outer_match_single.txt
@@ -0,0 +1,12 @@
+---input---
+'firststring' something else 'secondstring'
+
+---tokens---
+"'firststring'" Literal.String
+' ' Text.Whitespace
+'something' Text
+' ' Text.Whitespace
+'else' Text
+' ' Text.Whitespace
+"'secondstring'" Literal.String
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_string_multiple_line.txt b/tests/lexers/usd/test_string_multiple_line.txt
new file mode 100644
index 00000000..b3ab1fdd
--- /dev/null
+++ b/tests/lexers/usd/test_string_multiple_line.txt
@@ -0,0 +1,20 @@
+---input---
+"""
+Some text multiline
+"""
+
+"""Some text multiline
+"""
+
+"""
+Some text multiline"""
+
+---tokens---
+'"""\nSome text multiline\n"""' Literal.String
+'\n\n' Text.Whitespace
+
+'"""Some text multiline\n"""' Literal.String
+'\n\n' Text.Whitespace
+
+'"""\nSome text multiline"""' Literal.String
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_string_priority.txt b/tests/lexers/usd/test_string_priority.txt
new file mode 100644
index 00000000..481a416b
--- /dev/null
+++ b/tests/lexers/usd/test_string_priority.txt
@@ -0,0 +1,10 @@
+# Make sure that no other rules override a string match.
+
+---input---
+"""
+custom int[] foo = [8, 10, 14]
+"""
+
+---tokens---
+'"""\ncustom int[] foo = [8, 10, 14]\n"""' Literal.String
+'\n' Text.Whitespace
diff --git a/tests/lexers/usd/test_string_single_line.txt b/tests/lexers/usd/test_string_single_line.txt
new file mode 100644
index 00000000..b5058ce0
--- /dev/null
+++ b/tests/lexers/usd/test_string_single_line.txt
@@ -0,0 +1,6 @@
+---input---
+"Some 'text"
+
+---tokens---
+'"Some \'text"' Literal.String
+'\n' Text.Whitespace
diff --git a/tests/lexers/vbscript/test_floats.txt b/tests/lexers/vbscript/test_floats.txt
new file mode 100644
index 00000000..9493a3b0
--- /dev/null
+++ b/tests/lexers/vbscript/test_floats.txt
@@ -0,0 +1,34 @@
+---input---
+1.
+1.e1
+.1
+1.2
+1.2e3
+1.2e+3
+1.2e-3
+1e2
+
+---tokens---
+'1.' Literal.Number.Float
+'\n' Text.Whitespace
+
+'1.e1' Literal.Number.Float
+'\n' Text.Whitespace
+
+'.1' Literal.Number.Float
+'\n' Text.Whitespace
+
+'1.2' Literal.Number.Float
+'\n' Text.Whitespace
+
+'1.2e3' Literal.Number.Float
+'\n' Text.Whitespace
+
+'1.2e+3' Literal.Number.Float
+'\n' Text.Whitespace
+
+'1.2e-3' Literal.Number.Float
+'\n' Text.Whitespace
+
+'1e2' Literal.Number.Float
+'\n' Text.Whitespace
diff --git a/tests/lexers/vbscript/test_floats_multiple.txt b/tests/lexers/vbscript/test_floats_multiple.txt
new file mode 100644
index 00000000..30a37087
--- /dev/null
+++ b/tests/lexers/vbscript/test_floats_multiple.txt
@@ -0,0 +1,7 @@
+---input---
+1e2.1e2
+
+---tokens---
+'1e2' Literal.Number.Float
+'.1e2' Literal.Number.Float
+'\n' Text.Whitespace
diff --git a/tests/lexers/vbscript/test_integers.txt b/tests/lexers/vbscript/test_integers.txt
new file mode 100644
index 00000000..132ef7e2
--- /dev/null
+++ b/tests/lexers/vbscript/test_integers.txt
@@ -0,0 +1,14 @@
+---input---
+1
+23
+456
+
+---tokens---
+'1' Literal.Number.Integer
+'\n' Text.Whitespace
+
+'23' Literal.Number.Integer
+'\n' Text.Whitespace
+
+'456' Literal.Number.Integer
+'\n' Text.Whitespace
diff --git a/tests/lexers/vbscript/test_invalid_character.txt b/tests/lexers/vbscript/test_invalid_character.txt
new file mode 100644
index 00000000..6a1e6f13
--- /dev/null
+++ b/tests/lexers/vbscript/test_invalid_character.txt
@@ -0,0 +1,10 @@
+---input---
+a;bc
+d
+
+---tokens---
+'a' Name
+';bc\n' Error
+
+'d' Name
+'\n' Text.Whitespace
diff --git a/tests/lexers/vbscript/test_names.txt b/tests/lexers/vbscript/test_names.txt
new file mode 100644
index 00000000..404844f0
--- /dev/null
+++ b/tests/lexers/vbscript/test_names.txt
@@ -0,0 +1,18 @@
+---input---
+thingy
+thingy123
+_thingy
+_123
+
+---tokens---
+'thingy' Name
+'\n' Text.Whitespace
+
+'thingy123' Name
+'\n' Text.Whitespace
+
+'_thingy' Name
+'\n' Text.Whitespace
+
+'_123' Name
+'\n' Text.Whitespace
diff --git a/tests/lexers/vbscript/test_reject_almost_float.txt b/tests/lexers/vbscript/test_reject_almost_float.txt
new file mode 100644
index 00000000..478e6a3c
--- /dev/null
+++ b/tests/lexers/vbscript/test_reject_almost_float.txt
@@ -0,0 +1,7 @@
+---input---
+.e1
+
+---tokens---
+'.' Punctuation
+'e1' Name
+'\n' Text.Whitespace
diff --git a/tests/lexers/vbscript/test_unterminated_string.txt b/tests/lexers/vbscript/test_unterminated_string.txt
new file mode 100644
index 00000000..e92060bd
--- /dev/null
+++ b/tests/lexers/vbscript/test_unterminated_string.txt
@@ -0,0 +1,7 @@
+---input---
+"x\nx
+
+---tokens---
+'"' Literal.String.Double
+'x\\nx' Literal.String.Double
+'\n' Error
diff --git a/tests/lexers/whiley/test_whiley_operator.txt b/tests/lexers/whiley/test_whiley_operator.txt
new file mode 100644
index 00000000..50761db9
--- /dev/null
+++ b/tests/lexers/whiley/test_whiley_operator.txt
@@ -0,0 +1,10 @@
+---input---
+123 ∀ x
+
+---tokens---
+'123' Literal.Number.Integer
+' ' Text
+'∀' Operator
+' ' Text
+'x' Name
+'\n' Text
diff --git a/tests/lexers/yaml/test_yaml.txt b/tests/lexers/yaml/test_yaml.txt
new file mode 100644
index 00000000..fcdd6105
--- /dev/null
+++ b/tests/lexers/yaml/test_yaml.txt
@@ -0,0 +1,13 @@
+# Bug #1528: This previously parsed 'token # innocent' as a tag
+
+---input---
+here: token # innocent: comment
+
+---tokens---
+'here' Name.Tag
+':' Punctuation
+' ' Text
+'token' Literal.Scalar.Plain
+' ' Text
+'# innocent: comment' Comment.Single
+'\n' Text
diff --git a/tests/lexers/yang/test_float_value.txt b/tests/lexers/yang/test_float_value.txt
new file mode 100644
index 00000000..b49f479e
--- /dev/null
+++ b/tests/lexers/yang/test_float_value.txt
@@ -0,0 +1,11 @@
+# Float value `1.1` should be explicitly highlighted
+
+---input---
+yang-version 1.1;
+
+---tokens---
+'yang-version' Keyword
+' ' Text.Whitespace
+'1.1' Literal.Number.Float
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/yang/test_integer_value.txt b/tests/lexers/yang/test_integer_value.txt
new file mode 100644
index 00000000..149d19cd
--- /dev/null
+++ b/tests/lexers/yang/test_integer_value.txt
@@ -0,0 +1,11 @@
+# Integer value `5` should be explicitly highlighted
+
+---input---
+value 5;
+
+---tokens---
+'value' Keyword
+' ' Text.Whitespace
+'5' Literal.Number.Integer
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/yang/test_namespace_1.txt b/tests/lexers/yang/test_namespace_1.txt
new file mode 100644
index 00000000..1caf138c
--- /dev/null
+++ b/tests/lexers/yang/test_namespace_1.txt
@@ -0,0 +1,11 @@
+# Namespace `urn:test:std:yang` should not be explicitly highlighted
+
+---input---
+namespace urn:test:std:yang;
+
+---tokens---
+'namespace' Keyword
+' ' Text.Whitespace
+'urn:test:std:yang' Name.Variable
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/yang/test_namespace_2.txt b/tests/lexers/yang/test_namespace_2.txt
new file mode 100644
index 00000000..a245f7c0
--- /dev/null
+++ b/tests/lexers/yang/test_namespace_2.txt
@@ -0,0 +1,13 @@
+# namespace-prefix `yang` should be explicitly highlighted
+
+---input---
+type yang:counter64;
+
+---tokens---
+'type' Keyword
+' ' Text.Whitespace
+'yang' Name.Namespace
+':' Punctuation
+'counter64' Name.Variable
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/yang/test_revision_date.txt b/tests/lexers/yang/test_revision_date.txt
new file mode 100644
index 00000000..09ff5a60
--- /dev/null
+++ b/tests/lexers/yang/test_revision_date.txt
@@ -0,0 +1,11 @@
+# Revision-date `2020-08-03` should be explicitly highlighted
+
+---input---
+revision 2020-03-08{
+
+---tokens---
+'revision' Keyword
+' ' Text.Whitespace
+'2020-03-08' Name.Label
+'{' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/lexers/yang/test_string_value.txt b/tests/lexers/yang/test_string_value.txt
new file mode 100644
index 00000000..41dcae54
--- /dev/null
+++ b/tests/lexers/yang/test_string_value.txt
@@ -0,0 +1,11 @@
+# String value `"5"` should be not explicitly highlighted
+
+---input---
+value "5";
+
+---tokens---
+'value' Keyword
+' ' Text.Whitespace
+'"5"' Literal.String.Double
+';' Punctuation
+'\n' Text.Whitespace
diff --git a/tests/test_apache_conf.py b/tests/test_apache_conf.py
deleted file mode 100644
index 94aebc9a..00000000
--- a/tests/test_apache_conf.py
+++ /dev/null
@@ -1,133 +0,0 @@
-"""
- Basic Apache Configuration Test
- ~~~~~~~~~~~~~~~~~--------------
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import textwrap
-
-import pytest
-
-from pygments.token import Text, Number, Token
-from pygments.lexers import configs
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield configs.ApacheConfLexer()
-
-
-def test_multiline_comment(lexer):
- fragment = '#SecAction \\\n "id:\'900004\', \\\n phase:1, \\\n t:none, \\\n setvar:tx.anomaly_score_blocking=on, \\\n nolog, \\\n pass"\n \n'
- tokens = [
- (Token.Comment, '#SecAction \\\n "id:\'900004\', \\\n phase:1, \\\n t:none, \\\n setvar:tx.anomaly_score_blocking=on, \\\n nolog, \\\n pass"'),
- (Token.Text, '\n \n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_multiline_argument(lexer):
- fragment = 'SecAction \\\n "id:\'900001\', \\\n phase:1, \\\n t:none, \\\n setvar:tx.critical_anomaly_score=5, \\\n setvar:tx.error_anomaly_score=4, \\\n setvar:tx.warning_anomaly_score=3, \\\n setvar:tx.notice_anomaly_score=2, \\\n nolog, \\\n pass"\n'
- tokens = [
- (Token.Name.Builtin, 'SecAction'),
- (Token.Text, ' '),
- (Token.Text, '\\\n'),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"id:\'900001\', \\\n phase:1, \\\n t:none, \\\n setvar:tx.critical_anomaly_score=5, \\\n setvar:tx.error_anomaly_score=4, \\\n setvar:tx.warning_anomaly_score=3, \\\n setvar:tx.notice_anomaly_score=2, \\\n nolog, \\\n pass"'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_directive_no_args(lexer):
- fragment = 'Example\nServerName localhost'
- tokens = [
- (Token.Name.Builtin, 'Example'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- (Token.Name.Builtin, 'ServerName'),
- (Token.Text, ' '),
- (Token.Text, 'localhost'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_include_globs(lexer):
- fragment = 'Include /etc/httpd/conf.d/*.conf'
- tokens = [
- (Token.Name.Builtin, 'Include'),
- (Token.Text, ' '),
- (Token.String.Other, '/etc/httpd/conf.d/*.conf'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_multi_include_globs(lexer):
- fragment = 'Include /etc/httpd/conf.d/*/*.conf'
- tokens = [
- (Token.Name.Builtin, 'Include'),
- (Token.Text, ' '),
- (Token.String.Other, '/etc/httpd/conf.d/*/*.conf'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_multi_include_globs_root(lexer):
- fragment = 'Include /*conf/*.conf'
- tokens = [
- (Token.Name.Builtin, 'Include'),
- (Token.Text, ' '),
- (Token.String.Other, '/*conf/*.conf'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_fix_lock_absolute_path(lexer):
- fragment = 'LockFile /var/lock/apache2/accept.lock'
- tokens = [
- (Token.Name.Builtin, 'LockFile'),
- (Token.Text, ' '),
- (Token.String.Other, '/var/lock/apache2/accept.lock'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_normal_scoped_directive(lexer):
- fragment = '<VirtualHost "test">\n</VirtualHost>'
- tokens = [
- (Token.Name.Tag, '<VirtualHost'),
- (Token.Text, ' '),
- (Token.Literal.String, '"test"'),
- (Token.Name.Tag, '>'),
- (Token.Text, '\n'),
- (Token.Name.Tag, '</VirtualHost'),
- (Token.Name.Tag, '>'),
- (Token.Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_malformed_scoped_directive_closing_tag(lexer):
- fragment = '<VirtualHost "test">\n</VirtualHost\n>'
- tokens = [
- (Token.Name.Tag, '<VirtualHost'),
- (Token.Text, ' '),
- (Token.Literal.String, '"test"'),
- (Token.Name.Tag, '>'),
- (Token.Text, '\n'),
- (Token.Error, '<'),
- (Token.Error, '/'),
- (Token.Name.Builtin, 'VirtualHost'),
- (Token.Text, ''),
- (Token.Text, '\n'),
- (Token.Error, '>'),
- (Token.Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
diff --git a/tests/test_asm.py b/tests/test_asm.py
deleted file mode 100644
index 6c5665bb..00000000
--- a/tests/test_asm.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
- Basic GasLexer/NasmLexer Test
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import NasmLexer, GasLexer
-
-
-@pytest.fixture(scope='module')
-def lexer_gas():
- yield GasLexer()
-
-@pytest.fixture(scope='module')
-def lexer_nasm():
- yield NasmLexer()
-
-
-def test_comments(lexer_gas):
- fragment = '''
- lock addq $0, /* comments */ (%rsp) /*
- // comments
- */ xorq %rax, %rax // comments
- '''
- tokens = [
- (Token.Text, ' '),
- (Token.Name.Attribute, 'lock'),
- (Token.Text, ' '),
- (Token.Name.Function, 'addq'),
- (Token.Text, ' '),
- (Token.Name.Constant, '$0'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Comment.Multiline, '/* comments */'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Name.Variable, '%rsp'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Comment.Multiline, '/*\n // comments\n */'),
- (Token.Text, ' '),
- (Token.Name.Function, 'xorq'),
- (Token.Text, ' '),
- (Token.Name.Variable, '%rax'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Name.Variable, '%rax'),
- (Token.Text, ' '),
- (Token.Comment.Single, '// comments\n'),
- (Token.Text, ' \n')
- ]
- assert list(lexer_gas.get_tokens(fragment)) == tokens
-
-def test_cpuid(lexer_nasm):
- # CPU is a valid directive, and we don't want to parse this as
- # cpu id, but as a single token. See bug #1517
- fragment = 'cpuid'
- expected = [
- (Token.Name.Function, 'cpuid'),
- (Token.Text, '\n'),
- ]
- assert expected == list(lexer_nasm.get_tokens(fragment))
diff --git a/tests/test_basic.py b/tests/test_basic.py
deleted file mode 100644
index 4488d34e..00000000
--- a/tests/test_basic.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""
- Pygments Basic lexers tests
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers.basic import VBScriptLexer
-from pygments.token import Error, Name, Number, Punctuation, String, Whitespace
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield VBScriptLexer()
-
-
-def assert_are_tokens_of_type(lexer, examples, expected_token_type):
- for test_number, example in enumerate(examples.split(), 1):
- token_count = 0
- for token_type, token_value in lexer.get_tokens(example):
- if token_type != Whitespace:
- token_count += 1
- assert token_type == expected_token_type, \
- 'token_type #%d for %s is be %s but must be %s' % \
- (test_number, token_value, token_type, expected_token_type)
- assert token_count == 1, \
- '%s must yield exactly 1 token instead of %d' % (example, token_count)
-
-
-def assert_tokens_match(lexer, text, expected_tokens_without_trailing_newline):
- actual_tokens = tuple(lexer.get_tokens(text))
- if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
- actual_tokens = tuple(actual_tokens[:-1])
- assert expected_tokens_without_trailing_newline == actual_tokens, \
- 'text must yield expected tokens: %s' % text
-
-
-def test_can_lex_float(lexer):
- assert_are_tokens_of_type(lexer,
- '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2',
- Number.Float)
- assert_tokens_match(lexer,
- '1e2.1e2',
- ((Number.Float, '1e2'), (Number.Float, '.1e2')))
-
-
-def test_can_reject_almost_float(lexer):
- assert_tokens_match(lexer, '.e1', ((Punctuation, '.'), (Name, 'e1')))
-
-
-def test_can_lex_integer(lexer):
- assert_are_tokens_of_type(lexer, '1 23 456', Number.Integer)
-
-
-def test_can_lex_names(lexer):
- assert_are_tokens_of_type(lexer, 'thingy thingy123 _thingy _123', Name)
-
-
-def test_can_recover_after_unterminated_string(lexer):
- assert_tokens_match(lexer,
- '"x\nx',
- ((String.Double, '"'), (String.Double, 'x'),
- (Error, '\n'), (Name, 'x')))
-
-
-def test_can_recover_from_invalid_character(lexer):
- assert_tokens_match(lexer,
- 'a;bc\nd',
- ((Name, 'a'), (Error, ';bc\n'), (Name, 'd')))
diff --git a/tests/test_bibtex.py b/tests/test_bibtex.py
deleted file mode 100644
index e47869d5..00000000
--- a/tests/test_bibtex.py
+++ /dev/null
@@ -1,239 +0,0 @@
-"""
- BibTeX Test
- ~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import textwrap
-
-import pytest
-
-from pygments.lexers import BibTeXLexer, BSTLexer
-from pygments.token import Token
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield BibTeXLexer()
-
-
-def test_preamble(lexer):
- data = '@PREAMBLE{"% some LaTeX code here"}'
- tokens = [
- (Token.Name.Class, '@PREAMBLE'),
- (Token.Punctuation, '{'),
- (Token.String, '"'),
- (Token.String, '% some LaTeX code here'),
- (Token.String, '"'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(data)) == tokens
-
-
-def test_string(lexer):
- data = '@STRING(SCI = "Science")'
- tokens = [
- (Token.Name.Class, '@STRING'),
- (Token.Punctuation, '('),
- (Token.Name.Attribute, 'SCI'),
- (Token.Text, ' '),
- (Token.Punctuation, '='),
- (Token.Text, ' '),
- (Token.String, '"'),
- (Token.String, 'Science'),
- (Token.String, '"'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(data)) == tokens
-
-
-def test_entry(lexer):
- data = """
- This is a comment.
-
- @ARTICLE{ruckenstein-diffusion,
- author = "Liu, Hongquin" # and # "Ruckenstein, Eli",
- year = 1997,
- month = JAN,
- pages = "888-895"
- }
- """
-
- tokens = [
- (Token.Comment, 'This is a comment.'),
- (Token.Text, '\n\n'),
- (Token.Name.Class, '@ARTICLE'),
- (Token.Punctuation, '{'),
- (Token.Name.Label, 'ruckenstein-diffusion'),
- (Token.Punctuation, ','),
- (Token.Text, '\n '),
- (Token.Name.Attribute, 'author'),
- (Token.Text, ' '),
- (Token.Punctuation, '='),
- (Token.Text, ' '),
- (Token.String, '"'),
- (Token.String, 'Liu, Hongquin'),
- (Token.String, '"'),
- (Token.Text, ' '),
- (Token.Punctuation, '#'),
- (Token.Text, ' '),
- (Token.Name.Variable, 'and'),
- (Token.Text, ' '),
- (Token.Punctuation, '#'),
- (Token.Text, ' '),
- (Token.String, '"'),
- (Token.String, 'Ruckenstein, Eli'),
- (Token.String, '"'),
- (Token.Punctuation, ','),
- (Token.Text, '\n '),
- (Token.Name.Attribute, 'year'),
- (Token.Text, ' '),
- (Token.Punctuation, '='),
- (Token.Text, ' '),
- (Token.Number, '1997'),
- (Token.Punctuation, ','),
- (Token.Text, '\n '),
- (Token.Name.Attribute, 'month'),
- (Token.Text, ' '),
- (Token.Punctuation, '='),
- (Token.Text, ' '),
- (Token.Name.Variable, 'JAN'),
- (Token.Punctuation, ','),
- (Token.Text, '\n '),
- (Token.Name.Attribute, 'pages'),
- (Token.Text, ' '),
- (Token.Punctuation, '='),
- (Token.Text, ' '),
- (Token.String, '"'),
- (Token.String, '888-895'),
- (Token.String, '"'),
- (Token.Text, '\n'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(textwrap.dedent(data))) == tokens
-
-
-def test_comment(lexer):
- data = '@COMMENT{test}'
- tokens = [
- (Token.Comment, '@COMMENT'),
- (Token.Comment, '{test}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(data)) == tokens
-
-
-def test_missing_body(lexer):
- data = '@ARTICLE xxx'
- tokens = [
- (Token.Name.Class, '@ARTICLE'),
- (Token.Text, ' '),
- (Token.Error, 'x'),
- (Token.Error, 'x'),
- (Token.Error, 'x'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(data)) == tokens
-
-
-def test_mismatched_brace(lexer):
- data = '@PREAMBLE(""}'
- tokens = [
- (Token.Name.Class, '@PREAMBLE'),
- (Token.Punctuation, '('),
- (Token.String, '"'),
- (Token.String, '"'),
- (Token.Error, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(data)) == tokens
-
-
-def test_basic_bst():
- lexer = BSTLexer()
- data = """
- % BibTeX standard bibliography style `plain'
-
- INTEGERS { output.state before.all }
-
- FUNCTION {sort.format.title}
- { 't :=
- "A " #2
- "An " #3
- "The " #4 t chop.word
- chop.word
- chop.word
- sortify
- #1 global.max$ substring$
- }
-
- ITERATE {call.type$}
- """
- tokens = [
- (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"),
- (Token.Text, '\n\n'),
- (Token.Keyword, 'INTEGERS'),
- (Token.Text, ' '),
- (Token.Punctuation, '{'),
- (Token.Text, ' '),
- (Token.Name.Variable, 'output.state'),
- (Token.Text, ' '),
- (Token.Name.Variable, 'before.all'),
- (Token.Text, ' '),
- (Token.Punctuation, '}'),
- (Token.Text, '\n\n'),
- (Token.Keyword, 'FUNCTION'),
- (Token.Text, ' '),
- (Token.Punctuation, '{'),
- (Token.Name.Variable, 'sort.format.title'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- (Token.Punctuation, '{'),
- (Token.Text, ' '),
- (Token.Name.Function, "'t"),
- (Token.Text, ' '),
- (Token.Name.Variable, ':='),
- (Token.Text, '\n'),
- (Token.Literal.String, '"A "'),
- (Token.Text, ' '),
- (Token.Literal.Number, '#2'),
- (Token.Text, '\n '),
- (Token.Literal.String, '"An "'),
- (Token.Text, ' '),
- (Token.Literal.Number, '#3'),
- (Token.Text, '\n '),
- (Token.Literal.String, '"The "'),
- (Token.Text, ' '),
- (Token.Literal.Number, '#4'),
- (Token.Text, ' '),
- (Token.Name.Variable, 't'),
- (Token.Text, ' '),
- (Token.Name.Variable, 'chop.word'),
- (Token.Text, '\n '),
- (Token.Name.Variable, 'chop.word'),
- (Token.Text, '\n'),
- (Token.Name.Variable, 'chop.word'),
- (Token.Text, '\n'),
- (Token.Name.Variable, 'sortify'),
- (Token.Text, '\n'),
- (Token.Literal.Number, '#1'),
- (Token.Text, ' '),
- (Token.Name.Builtin, 'global.max$'),
- (Token.Text, ' '),
- (Token.Name.Builtin, 'substring$'),
- (Token.Text, '\n'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n\n'),
- (Token.Keyword, 'ITERATE'),
- (Token.Text, ' '),
- (Token.Punctuation, '{'),
- (Token.Name.Builtin, 'call.type$'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(textwrap.dedent(data))) == tokens
diff --git a/tests/test_cfm.py b/tests/test_cfm.py
deleted file mode 100644
index 096bc876..00000000
--- a/tests/test_cfm.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""
- Basic ColdfusionHtmlLexer Test
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import ColdfusionHtmlLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield ColdfusionHtmlLexer()
-
-
-def test_basic_comment(lexer):
- fragment = '<!--- cfcomment --->'
- expected = [
- (Token.Comment.Multiline, '<!---'),
- (Token.Comment.Multiline, ' cfcomment '),
- (Token.Comment.Multiline, '--->'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
-
-
-def test_nested_comment(lexer):
- fragment = '<!--- nested <!--- cfcomment ---> --->'
- expected = [
- (Token.Comment.Multiline, '<!---'),
- (Token.Comment.Multiline, ' nested '),
- (Token.Comment.Multiline, '<!---'),
- (Token.Comment.Multiline, ' cfcomment '),
- (Token.Comment.Multiline, '--->'),
- (Token.Comment.Multiline, ' '),
- (Token.Comment.Multiline, '--->'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
diff --git a/tests/test_clexer.py b/tests/test_clexer.py
deleted file mode 100644
index dba1e40e..00000000
--- a/tests/test_clexer.py
+++ /dev/null
@@ -1,264 +0,0 @@
-"""
- Basic CLexer Test
- ~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import textwrap
-
-import pytest
-
-from pygments.token import Text, Number, Token
-from pygments.lexers import CLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield CLexer()
-
-
-def test_numbers(lexer):
- code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23'
- wanted = []
- for item in zip([Number.Integer, Number.Float, Number.Float,
- Number.Float, Number.Oct, Number.Hex,
- Number.Float, Number.Float], code.split()):
- wanted.append(item)
- wanted.append((Text, ' '))
- wanted = wanted[:-1] + [(Text, '\n')]
- assert list(lexer.get_tokens(code)) == wanted
-
-
-def test_switch(lexer):
- fragment = '''\
- int main()
- {
- switch (0)
- {
- case 0:
- default:
- ;
- }
- }
- '''
- tokens = [
- (Token.Keyword.Type, 'int'),
- (Token.Text, ' '),
- (Token.Name.Function, 'main'),
- (Token.Punctuation, '('),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Punctuation, '{'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'switch'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '0'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Punctuation, '{'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'case'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '0'),
- (Token.Operator, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'default'),
- (Token.Operator, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
-
-
-def test_switch_space_before_colon(lexer):
- fragment = '''\
- int main()
- {
- switch (0)
- {
- case 0 :
- default :
- ;
- }
- }
- '''
- tokens = [
- (Token.Keyword.Type, 'int'),
- (Token.Text, ' '),
- (Token.Name.Function, 'main'),
- (Token.Punctuation, '('),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Punctuation, '{'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'switch'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '0'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Punctuation, '{'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'case'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '0'),
- (Token.Text, ' '),
- (Token.Operator, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'default'),
- (Token.Text, ' '),
- (Token.Operator, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
-
-
-def test_label(lexer):
- fragment = '''\
- int main()
- {
- foo:
- goto foo;
- }
- '''
- tokens = [
- (Token.Keyword.Type, 'int'),
- (Token.Text, ' '),
- (Token.Name.Function, 'main'),
- (Token.Punctuation, '('),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Punctuation, '{'),
- (Token.Text, '\n'),
- (Token.Name.Label, 'foo'),
- (Token.Punctuation, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'goto'),
- (Token.Text, ' '),
- (Token.Name, 'foo'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
-
-
-def test_label_space_before_colon(lexer):
- fragment = '''\
- int main()
- {
- foo :
- goto foo;
- }
- '''
- tokens = [
- (Token.Keyword.Type, 'int'),
- (Token.Text, ' '),
- (Token.Name.Function, 'main'),
- (Token.Punctuation, '('),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Punctuation, '{'),
- (Token.Text, '\n'),
- (Token.Name.Label, 'foo'),
- (Token.Text, ' '),
- (Token.Punctuation, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'goto'),
- (Token.Text, ' '),
- (Token.Name, 'foo'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
-
-
-def test_label_followed_by_statement(lexer):
- fragment = '''\
- int main()
- {
- foo:return 0;
- goto foo;
- }
- '''
- tokens = [
- (Token.Keyword.Type, 'int'),
- (Token.Text, ' '),
- (Token.Name.Function, 'main'),
- (Token.Punctuation, '('),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Punctuation, '{'),
- (Token.Text, '\n'),
- (Token.Name.Label, 'foo'),
- (Token.Punctuation, ':'),
- (Token.Keyword, 'return'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '0'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'goto'),
- (Token.Text, ' '),
- (Token.Name, 'foo'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
-
-
-def test_preproc_file(lexer):
- fragment = '#include <foo>\n'
- tokens = [
- (Token.Comment.Preproc, '#'),
- (Token.Comment.Preproc, 'include'),
- (Token.Text, ' '),
- (Token.Comment.PreprocFile, '<foo>'),
- (Token.Comment.Preproc, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_preproc_file2(lexer):
- fragment = '#include "foo.h"\n'
- tokens = [
- (Token.Comment.Preproc, '#'),
- (Token.Comment.Preproc, 'include'),
- (Token.Text, ' '),
- (Token.Comment.PreprocFile, '"foo.h"'),
- (Token.Comment.Preproc, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_coffeescript.py b/tests/test_coffeescript.py
index 5d69a8aa..56af6ba4 100644
--- a/tests/test_coffeescript.py
+++ b/tests/test_coffeescript.py
@@ -50,35 +50,3 @@ def test_coffee_slashes(lexer, golden):
if '/' in s:
is_regex = t is Token.String.Regex
assert is_regex == slashes_are_regex_here, (t, s)
-
-
-def test_mixed_slashes(lexer):
- fragment = 'a?/foo/:1/2;\n'
- tokens = [
- (Token.Name.Other, 'a'),
- (Token.Operator, '?'),
- (Token.Literal.String.Regex, '/foo/'),
- (Token.Operator, ':'),
- (Token.Literal.Number.Integer, '1'),
- (Token.Operator, '/'),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_beware_infinite_loop(lexer):
- # This demonstrates the case that "This isn't really guarding" comment
- # refers to.
- fragment = '/a/x;\n'
- tokens = [
- (Token.Text, ''),
- (Token.Operator, '/'),
- (Token.Name.Other, 'a'),
- (Token.Operator, '/'),
- (Token.Name.Other, 'x'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_coq.py b/tests/test_coq.py
deleted file mode 100644
index 6b4e27da..00000000
--- a/tests/test_coq.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- Coq Tests
- ~~~~~~~~~~~~~
- :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import CoqLexer
-from pygments.token import Token
-
-@pytest.fixture(scope='module')
-def lexer():
- yield CoqLexer()
-
-def test_coq_unicode(lexer):
- fragment = 'Check (α ≻ β).\n'
- tokens = [
- (Token.Keyword.Namespace, 'Check'),
- (Token.Text, ' '),
- (Token.Operator, '('),
- (Token.Name, 'α'),
- (Token.Text, ' '),
- (Token.Name.Builtin.Pseudo, '≻'),
- (Token.Text, ' '),
- (Token.Name, 'β'),
- (Token.Operator, ')'),
- (Token.Operator, '.'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_cpp.py b/tests/test_cpp.py
index 7b6d4db4..bb803690 100644
--- a/tests/test_cpp.py
+++ b/tests/test_cpp.py
@@ -6,35 +6,11 @@
:license: BSD, see LICENSE for details.
"""
-import pytest
-
from pygments.lexers import CppLexer, CLexer
-from pygments.token import Token
from pygments.lexers import guess_lexer
-@pytest.fixture(scope='module')
-def lexer():
- yield CppLexer()
-
-
-def test_good_comment(lexer):
- fragment = '/* foo */\n'
- tokens = [
- (Token.Comment.Multiline, '/* foo */'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_open_comment(lexer):
- fragment = '/* foo\n'
- tokens = [
- (Token.Comment.Multiline, '/* foo\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
def test_guess_c_lexer():
code = '''
#include <stdio.h>
diff --git a/tests/test_crystal.py b/tests/test_crystal.py
index 91ec8007..c97103a5 100644
--- a/tests/test_crystal.py
+++ b/tests/test_crystal.py
@@ -18,139 +18,6 @@ def lexer():
yield CrystalLexer()
-def test_range_syntax1(lexer):
- fragment = '1...3\n'
- tokens = [
- (Number.Integer, '1'),
- (Operator, '...'),
- (Number.Integer, '3'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_range_syntax2(lexer):
- fragment = '1 .. 3\n'
- tokens = [
- (Number.Integer, '1'),
- (Text, ' '),
- (Operator, '..'),
- (Text, ' '),
- (Number.Integer, '3'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolation_nested_curly(lexer):
- fragment = (
- '"A#{ (3..5).group_by { |x| x/2}.map '
- 'do |k,v| "#{k}" end.join }" + "Z"\n')
- tokens = [
- (String.Double, '"'),
- (String.Double, 'A'),
- (String.Interpol, '#{'),
- (Text, ' '),
- (Punctuation, '('),
- (Number.Integer, '3'),
- (Operator, '..'),
- (Number.Integer, '5'),
- (Punctuation, ')'),
- (Operator, '.'),
- (Name, 'group_by'),
- (Text, ' '),
- (String.Interpol, '{'),
- (Text, ' '),
- (Operator, '|'),
- (Name, 'x'),
- (Operator, '|'),
- (Text, ' '),
- (Name, 'x'),
- (Operator, '/'),
- (Number.Integer, '2'),
- (String.Interpol, '}'),
- (Operator, '.'),
- (Name, 'map'),
- (Text, ' '),
- (Keyword, 'do'),
- (Text, ' '),
- (Operator, '|'),
- (Name, 'k'),
- (Punctuation, ','),
- (Name, 'v'),
- (Operator, '|'),
- (Text, ' '),
- (String.Double, '"'),
- (String.Interpol, '#{'),
- (Name, 'k'),
- (String.Interpol, '}'),
- (String.Double, '"'),
- (Text, ' '),
- (Keyword, 'end'),
- (Operator, '.'),
- (Name, 'join'),
- (Text, ' '),
- (String.Interpol, '}'),
- (String.Double, '"'),
- (Text, ' '),
- (Operator, '+'),
- (Text, ' '),
- (String.Double, '"'),
- (String.Double, 'Z'),
- (String.Double, '"'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_escaped_interpolation(lexer):
- fragment = '"\\#{a + b}"\n'
- # i.e. no actual interpolation
- tokens = [
- (String.Double, '"'),
- (String.Escape, '\\#'),
- (String.Double, '{a + b}'),
- (String.Double, '"'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_operator_methods(lexer):
- fragment = '([] of Int32).[]?(5)\n'
- tokens = [
- (Punctuation, '('),
- (Operator, '['),
- (Operator, ']'),
- (Text, ' '),
- (Keyword, 'of'),
- (Text, ' '),
- (Name, 'Int32'),
- (Punctuation, ')'),
- (Operator, '.'),
- (Name.Operator, '[]?'),
- (Punctuation, '('),
- (Number.Integer, '5'),
- (Punctuation, ')'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_array_access(lexer):
- fragment = '[5][5]?\n'
- tokens = [
- (Operator, '['),
- (Number.Integer, '5'),
- (Operator, ']'),
- (Operator, '['),
- (Number.Integer, '5'),
- (Operator, ']?'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
def test_numbers(lexer):
for kind, testset in [
(Number.Integer, '0 1 1_000_000 1u8 11231231231121312i64'),
@@ -212,329 +79,3 @@ def test_empty_percent_strings(lexer):
(Punctuation, ')'),
(Text, '\n'),
]
-
-
-def test_percent_strings(lexer):
- fragment = (
- '%(hello ("world"))\n'
- '%[hello ["world"]]\n'
- '%{hello "world"}\n'
- '%<hello <"world">>\n'
- '%|hello "world"|\n')
- tokens = [
- (String.Other, '%('),
- (String.Other, 'hello '),
- (String.Other, '('),
- (String.Other, '"world"'),
- (String.Other, ')'),
- (String.Other, ')'),
- (Text, '\n'),
- (String.Other, '%['),
- (String.Other, 'hello '),
- (String.Other, '['),
- (String.Other, '"world"'),
- (String.Other, ']'),
- (String.Other, ']'),
- (Text, '\n'),
- (String.Other, '%{'),
- (String.Other, 'hello "world"'),
- (String.Other, '}'),
- (Text, '\n'),
- (String.Other, '%<'),
- (String.Other, 'hello '),
- (String.Other, '<'),
- (String.Other, '"world"'),
- (String.Other, '>'),
- (String.Other, '>'),
- (Text, '\n'),
- (String.Other, '%|'),
- (String.Other, 'hello "world"'),
- (String.Other, '|'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_special_percent_strings(lexer):
- fragment = '%Q(hello \\n #{name})\n%q(hello \\n #{name})\n%w(foo\\nbar baz)\n'
- tokens = [
- (String.Other, '%Q('),
- (String.Other, 'hello '),
- (String.Escape, '\\n'),
- (String.Other, ' '),
- (String.Interpol, '#{'),
- (Name, 'name'),
- (String.Interpol, '}'),
- (String.Other, ')'),
- (Text, '\n'),
- # The ones below have no interpolation.
- (String.Other, '%q('),
- (String.Other, 'hello '),
- (String.Other, '\\'),
- (String.Other, 'n '),
- (String.Other, '#'),
- (String.Other, '{name}'),
- (String.Other, ')'),
- (Text, '\n'),
- (String.Other, '%w('),
- (String.Other, 'foo'),
- (String.Other, '\\'),
- (String.Other, 'nbar baz'),
- (String.Other, ')'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_pseudo_keywords(lexer):
- fragment = (
- 'def f(x : T, line = __LINE__) forall T\n'
- 'if x.is_a?(String)\n'
- 'pp! x\n'
- 'end\n'
- 'end\n')
- tokens = [
- (Keyword, 'def'),
- (Text, ' '),
- (Name.Function, 'f'),
- (Punctuation, '('),
- (Name, 'x'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name, 'T'),
- (Punctuation, ','),
- (Text, ' '),
- (Name, 'line'),
- (Text, ' '),
- (Operator, '='),
- (Text, ' '),
- (Keyword.Pseudo, '__LINE__'),
- (Punctuation, ')'),
- (Text, ' '),
- (Keyword.Pseudo, 'forall'),
- (Text, ' '),
- (Name, 'T'),
- (Text, '\n'),
- (Keyword, 'if'),
- (Text, ' '),
- (Name, 'x'),
- (Keyword.Pseudo, '.is_a?'),
- (Punctuation, '('),
- (Name, 'String'),
- (Punctuation, ')'),
- (Text, '\n'),
- (Name.Builtin.Pseudo, 'pp!'),
- (Text, ' '),
- (Name, 'x'),
- (Text, '\n'),
- (Keyword, 'end'),
- (Text, '\n'),
- (Keyword, 'end'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_pseudo_builtins(lexer):
- fragment = 'record Cls do\ndef_equals s\nend\n'
- tokens = [
- (Name.Builtin.Pseudo, 'record'),
- (Text, ' '),
- (Name, 'Cls'),
- (Text, ' '),
- (Keyword, 'do'),
- (Text, '\n'),
- (Name.Builtin.Pseudo, 'def_equals'),
- (Text, ' '),
- (Name, 's'),
- (Text, '\n'),
- (Keyword, 'end'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_constant_and_module(lexer):
- fragment = 'HTTP\nHTTP::Server.new\n'
- tokens = [
- (Name.Constant, 'HTTP'),
- (Text, '\n'),
- (Name, 'HTTP'),
- (Operator, '::'),
- (Name, 'Server'),
- (Operator, '.'),
- (Name, 'new'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_macro(lexer):
- fragment = (
- 'def<=>(other : self) : Int\n'
- '{%for field in %w(first_name middle_name last_name)%}\n'
- 'cmp={{field.id}}<=>other.{{field.id}}\n'
- 'return cmp if cmp!=0\n'
- '{%end%}\n'
- '0\n'
- 'end\n')
- tokens = [
- (Keyword, 'def'),
- (Name.Function, '<=>'),
- (Punctuation, '('),
- (Name, 'other'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Keyword, 'self'),
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name, 'Int'),
- (Text, '\n'),
- (String.Interpol, '{%'),
- (Keyword, 'for'),
- (Text, ' '),
- (Name, 'field'),
- (Text, ' '),
- (Keyword, 'in'),
- (Text, ' '),
- (String.Other, '%w('),
- (String.Other, 'first_name middle_name last_name'),
- (String.Other, ')'),
- (String.Interpol, '%}'),
- (Text, '\n'),
- (Name, 'cmp'),
- (Operator, '='),
- (String.Interpol, '{{'),
- (Name, 'field'),
- (Operator, '.'),
- (Name, 'id'),
- (String.Interpol, '}}'),
- (Operator, '<=>'),
- (Name, 'other'),
- (Operator, '.'),
- (String.Interpol, '{{'),
- (Name, 'field'),
- (Operator, '.'),
- (Name, 'id'),
- (String.Interpol, '}}'),
- (Text, '\n'),
- (Keyword, 'return'),
- (Text, ' '),
- (Name, 'cmp'),
- (Text, ' '),
- (Keyword, 'if'),
- (Text, ' '),
- (Name, 'cmp'),
- (Operator, '!='),
- (Number.Integer, '0'),
- (Text, '\n'),
- (String.Interpol, '{%'),
- (Keyword, 'end'),
- (String.Interpol, '%}'),
- (Text, '\n'),
- (Number.Integer, '0'),
- (Text, '\n'),
- (Keyword, 'end'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_lib(lexer):
- fragment = (
- '@[Link("some")]\nlib LibSome\n'
- '@[CallConvention("X86_StdCall")]\nfun foo="some.foo"(thing : Void*) : LibC::Int\n'
- 'end\n')
- tokens = [
- (Operator, '@['),
- (Name.Decorator, 'Link'),
- (Punctuation, '('),
- (String.Double, '"'),
- (String.Double, 'some'),
- (String.Double, '"'),
- (Punctuation, ')'),
- (Operator, ']'),
- (Text, '\n'),
- (Keyword, 'lib'),
- (Text, ' '),
- (Name.Namespace, 'LibSome'),
- (Text, '\n'),
- (Operator, '@['),
- (Name.Decorator, 'CallConvention'),
- (Punctuation, '('),
- (String.Double, '"'),
- (String.Double, 'X86_StdCall'),
- (String.Double, '"'),
- (Punctuation, ')'),
- (Operator, ']'),
- (Text, '\n'),
- (Keyword, 'fun'),
- (Text, ' '),
- (Name.Function, 'foo'),
- (Operator, '='),
- (String.Double, '"'),
- (String.Double, 'some.foo'),
- (String.Double, '"'),
- (Punctuation, '('),
- (Name, 'thing'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name, 'Void'),
- (Operator, '*'),
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name, 'LibC'),
- (Operator, '::'),
- (Name, 'Int'),
- (Text, '\n'),
- (Keyword, 'end'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_escaped_bracestring(lexer):
- fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n'
- tokens = [
- (Name, 'str'),
- (Operator, '.'),
- (Name, 'gsub'),
- (Punctuation, '('),
- (String.Regex, '%r{'),
- (String.Regex, '\\\\'),
- (String.Regex, '\\\\'),
- (String.Regex, '}'),
- (Punctuation, ','),
- (Text, ' '),
- (String.Double, '"'),
- (String.Double, '/'),
- (String.Double, '"'),
- (Punctuation, ')'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_annotation(lexer):
- fragment = '@[FOO::Bar::Baz(opt: "xx")]\n'
- tokens = [
- (Operator, '@['),
- (Name.Decorator, 'FOO::Bar::Baz'),
- (Punctuation, '('),
- (String.Symbol, 'opt'),
- (Punctuation, ':'),
- (Text, ' '),
- (String.Double, '"'),
- (String.Double, 'xx'),
- (String.Double, '"'),
- (Punctuation, ')'),
- (Operator, ']'),
- (Text, '\n'),
- ]
diff --git a/tests/test_csound.py b/tests/test_csound.py
deleted file mode 100644
index efb387c2..00000000
--- a/tests/test_csound.py
+++ /dev/null
@@ -1,523 +0,0 @@
-"""
- Csound lexer tests
- ~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from textwrap import dedent
-
-import pytest
-
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
- Punctuation, String, Text
-from pygments.lexers import CsoundOrchestraLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield CsoundOrchestraLexer()
-
-
-def test_comments(lexer):
- fragment = dedent('''\
- /*
- * comment
- */
- ; comment
- // comment
- ''')
- tokens = [
- (Comment.Multiline, '/*\n * comment\n */'),
- (Text, '\n'),
- (Comment.Single, '; comment'),
- (Text, '\n'),
- (Comment.Single, '// comment'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_instrument_blocks(lexer):
- fragment = dedent('''\
- instr/**/1,/**/N_a_M_e_,/**/+Name/**///
- iDuration = p3
- outc:a(aSignal)
- endin
- ''')
- tokens = [
- (Keyword.Declaration, 'instr'),
- (Comment.Multiline, '/**/'),
- (Name.Function, '1'),
- (Punctuation, ','),
- (Comment.Multiline, '/**/'),
- (Name.Function, 'N_a_M_e_'),
- (Punctuation, ','),
- (Comment.Multiline, '/**/'),
- (Punctuation, '+'),
- (Name.Function, 'Name'),
- (Comment.Multiline, '/**/'),
- (Comment.Single, '//'),
- (Text, '\n'),
- (Text, ' '),
- (Keyword.Type, 'i'),
- (Name, 'Duration'),
- (Text, ' '),
- (Operator, '='),
- (Text, ' '),
- (Name.Variable.Instance, 'p3'),
- (Text, '\n'),
- (Text, ' '),
- (Name.Builtin, 'outc'),
- (Punctuation, ':'),
- (Keyword.Type, 'a'),
- (Punctuation, '('),
- (Keyword.Type, 'a'),
- (Name, 'Signal'),
- (Punctuation, ')'),
- (Text, '\n'),
- (Keyword.Declaration, 'endin'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_user_defined_opcodes(lexer):
- fragment = dedent('''\
- opcode/**/aUDO,/**/i[],/**/aik//
- aUDO
- endop
- ''')
- tokens = [
- (Keyword.Declaration, 'opcode'),
- (Comment.Multiline, '/**/'),
- (Name.Function, 'aUDO'),
- (Punctuation, ','),
- (Comment.Multiline, '/**/'),
- (Keyword.Type, 'i[]'),
- (Punctuation, ','),
- (Comment.Multiline, '/**/'),
- (Keyword.Type, 'aik'),
- (Comment.Single, '//'),
- (Text, '\n'),
- (Text, ' '),
- (Name.Function, 'aUDO'),
- (Text, '\n'),
- (Keyword.Declaration, 'endop'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_numbers(lexer):
- fragment = '123 0123456789'
- tokens = [
- (Number.Integer, '123'),
- (Text, ' '),
- (Number.Integer, '0123456789'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- fragment = '0xabcdef0123456789 0XABCDEF'
- tokens = [
- (Keyword.Type, '0x'),
- (Number.Hex, 'abcdef0123456789'),
- (Text, ' '),
- (Keyword.Type, '0X'),
- (Number.Hex, 'ABCDEF'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- fragments = ['1e2', '3e+4', '5e-6', '7E8', '9E+0', '1E-2', '3.', '4.56', '.789']
- for fragment in fragments:
- tokens = [
- (Number.Float, fragment),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_quoted_strings(lexer):
- fragment = '"characters$MACRO."'
- tokens = [
- (String, '"'),
- (String, 'characters'),
- (Comment.Preproc, '$MACRO.'),
- (String, '"'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_braced_strings(lexer):
- fragment = dedent('''\
- {{
- characters$MACRO.
- }}
- ''')
- tokens = [
- (String, '{{'),
- (String, '\ncharacters$MACRO.\n'),
- (String, '}}'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_escape_sequences(lexer):
- for character in ['\\', 'a', 'b', 'n', 'r', 't', '"', '012', '345', '67']:
- escapedCharacter = '\\' + character
- fragment = '"' + escapedCharacter + '"'
- tokens = [
- (String, '"'),
- (String.Escape, escapedCharacter),
- (String, '"'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- fragment = '{{' + escapedCharacter + '}}'
- tokens = [
- (String, '{{'),
- (String.Escape, escapedCharacter),
- (String, '}}'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_operators(lexer):
- fragments = ['+', '-', '~', '¬', '!', '*', '/', '^', '%', '<<', '>>', '<', '>',
- '<=', '>=', '==', '!=', '&', '#', '|', '&&', '||', '?', ':', '+=',
- '-=', '*=', '/=']
- for fragment in fragments:
- tokens = [
- (Operator, fragment),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_global_value_identifiers(lexer):
- for fragment in ['0dbfs', 'A4', 'kr', 'ksmps', 'nchnls', 'nchnls_i', 'sr']:
- tokens = [
- (Name.Variable.Global, fragment),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_keywords(lexer):
- fragments = ['do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen',
- 'kthen', 'od', 'then', 'until', 'while']
- for fragment in fragments:
- tokens = [
- (Keyword, fragment),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- for fragment in ['return', 'rireturn']:
- tokens = [
- (Keyword.Pseudo, fragment),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_labels(lexer):
- fragment = dedent('''\
- aLabel:
- label2:
- ''')
- tokens = [
- (Name.Label, 'aLabel'),
- (Punctuation, ':'),
- (Text, '\n'),
- (Text, ' '),
- (Name.Label, 'label2'),
- (Punctuation, ':'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_printks_and_prints_escape_sequences(lexer):
- escapedCharacters = ['%!', '%%', '%n', '%N', '%r', '%R', '%t', '%T', '\\\\a',
- '\\\\A', '\\\\b', '\\\\B', '\\\\n', '\\\\N', '\\\\r',
- '\\\\R', '\\\\t', '\\\\T']
- for opcode in ['printks', 'prints']:
- for escapedCharacter in escapedCharacters:
- fragment = opcode + ' "' + escapedCharacter + '"'
- tokens = [
- (Name.Builtin, opcode),
- (Text, ' '),
- (String, '"'),
- (String.Escape, escapedCharacter),
- (String, '"'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_goto_statements(lexer):
- for keyword in ['goto', 'igoto', 'kgoto']:
- fragment = keyword + ' aLabel'
- tokens = [
- (Keyword, keyword),
- (Text, ' '),
- (Name.Label, 'aLabel'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- for opcode in ['reinit', 'rigoto', 'tigoto']:
- fragment = opcode + ' aLabel'
- tokens = [
- (Keyword.Pseudo, opcode),
- (Text, ' '),
- (Name.Label, 'aLabel'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- for opcode in ['cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto', 'cnkgoto']:
- fragment = opcode + ' 1==0, aLabel'
- tokens = [
- (Keyword.Pseudo, opcode),
- (Text, ' '),
- (Number.Integer, '1'),
- (Operator, '=='),
- (Number.Integer, '0'),
- (Punctuation, ','),
- (Text, ' '),
- (Name.Label, 'aLabel'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- fragment = 'timout 0, 0, aLabel'
- tokens = [
- (Keyword.Pseudo, 'timout'),
- (Text, ' '),
- (Number.Integer, '0'),
- (Punctuation, ','),
- (Text, ' '),
- (Number.Integer, '0'),
- (Punctuation, ','),
- (Text, ' '),
- (Name.Label, 'aLabel'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
- for opcode in ['loop_ge', 'loop_gt', 'loop_le', 'loop_lt']:
- fragment = opcode + ' 0, 0, 0, aLabel'
- tokens = [
- (Keyword.Pseudo, opcode),
- (Text, ' '),
- (Number.Integer, '0'),
- (Punctuation, ','),
- (Text, ' '),
- (Number.Integer, '0'),
- (Punctuation, ','),
- (Text, ' '),
- (Number.Integer, '0'),
- (Punctuation, ','),
- (Text, ' '),
- (Name.Label, 'aLabel'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_include_directives(lexer):
- for character in ['"', '|']:
- fragment = '#include/**/' + character + 'file.udo' + character
- tokens = [
- (Comment.Preproc, '#include'),
- (Comment.Multiline, '/**/'),
- (String, character + 'file.udo' + character),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_includestr_directives(lexer):
- fragment = '#includestr/**/"$MACRO..udo"'
- tokens = [
- (Comment.Preproc, '#includestr'),
- (Comment.Multiline, '/**/'),
- (String, '"'),
- (Comment.Preproc, '$MACRO.'),
- (String, '.udo'),
- (String, '"'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_object_like_macro_definitions(lexer):
- fragment = dedent('''\
- # \tdefine MACRO#macro_body#
- #define/**/
- MACRO/**/
- #\\#macro
- body\\##
- ''')
- tokens = [
- (Comment.Preproc, '# \tdefine'),
- (Text, ' '),
- (Comment.Preproc, 'MACRO'),
- (Punctuation, '#'),
- (Comment.Preproc, 'macro_body'),
- (Punctuation, '#'),
- (Text, '\n'),
- (Comment.Preproc, '#define'),
- (Comment.Multiline, '/**/'),
- (Text, '\n'),
- (Comment.Preproc, 'MACRO'),
- (Comment.Multiline, '/**/'),
- (Text, '\n'),
- (Punctuation, '#'),
- (Comment.Preproc, '\\#'),
- (Comment.Preproc, 'macro\nbody'),
- (Comment.Preproc, '\\#'),
- (Punctuation, '#'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_like_macro_definitions(lexer):
- fragment = dedent('''\
- #define MACRO(ARG1#ARG2) #macro_body#
- #define/**/
- MACRO(ARG1'ARG2' ARG3)/**/
- #\\#macro
- body\\##
- ''')
- tokens = [
- (Comment.Preproc, '#define'),
- (Text, ' '),
- (Comment.Preproc, 'MACRO'),
- (Punctuation, '('),
- (Comment.Preproc, 'ARG1'),
- (Punctuation, '#'),
- (Comment.Preproc, 'ARG2'),
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, '#'),
- (Comment.Preproc, 'macro_body'),
- (Punctuation, '#'),
- (Text, '\n'),
- (Comment.Preproc, '#define'),
- (Comment.Multiline, '/**/'),
- (Text, '\n'),
- (Comment.Preproc, 'MACRO'),
- (Punctuation, '('),
- (Comment.Preproc, 'ARG1'),
- (Punctuation, "'"),
- (Comment.Preproc, 'ARG2'),
- (Punctuation, "'"),
- (Text, ' '),
- (Comment.Preproc, 'ARG3'),
- (Punctuation, ')'),
- (Comment.Multiline, '/**/'),
- (Text, '\n'),
- (Punctuation, '#'),
- (Comment.Preproc, '\\#'),
- (Comment.Preproc, 'macro\nbody'),
- (Comment.Preproc, '\\#'),
- (Punctuation, '#'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_macro_preprocessor_directives(lexer):
- for directive in ['#ifdef', '#ifndef', '#undef']:
- fragment = directive + ' MACRO'
- tokens = [
- (Comment.Preproc, directive),
- (Text, ' '),
- (Comment.Preproc, 'MACRO'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_other_preprocessor_directives(lexer):
- fragment = dedent('''\
- #else
- #end
- #endif
- ###
- @ \t12345
- @@ \t67890
- ''')
- tokens = [
- (Comment.Preproc, '#else'),
- (Text, '\n'),
- (Comment.Preproc, '#end'),
- (Text, '\n'),
- (Comment.Preproc, '#endif'),
- (Text, '\n'),
- (Comment.Preproc, '###'),
- (Text, '\n'),
- (Comment.Preproc, '@ \t12345'),
- (Text, '\n'),
- (Comment.Preproc, '@@ \t67890'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_like_macros(lexer):
- fragment = "$MACRO.(((x#y\\)))' \"(#'x)\\)x\\))\"# {{x\\))x)\\)(#'}});"
- tokens = [
- (Comment.Preproc, '$MACRO.'),
- (Punctuation, '('),
- (Comment.Preproc, '('),
- (Comment.Preproc, '('),
- (Comment.Preproc, 'x#y\\)'),
- (Comment.Preproc, ')'),
- (Comment.Preproc, ')'),
- (Punctuation, "'"),
- (Comment.Preproc, ' '),
- (String, '"'),
- (Error, '('),
- (Error, '#'),
- (Error, "'"),
- (String, 'x'),
- (Error, ')'),
- (Comment.Preproc, '\\)'),
- (String, 'x'),
- (Comment.Preproc, '\\)'),
- (Error, ')'),
- (String, '"'),
- (Punctuation, '#'),
- (Comment.Preproc, ' '),
- (String, '{{'),
- (String, 'x'),
- (Comment.Preproc, '\\)'),
- (Error, ')'),
- (String, 'x'),
- (Error, ')'),
- (Comment.Preproc, '\\)'),
- (Error, '('),
- (Error, '#'),
- (Error, "'"),
- (String, '}}'),
- (Punctuation, ')'),
- (Comment.Single, ';'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_name(lexer):
- fragment = 'kG:V'
- tokens = [
- (Keyword.Type, 'k'),
- (Name, 'G'),
- (Punctuation, ':'),
- (Name, 'V'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_data.py b/tests/test_data.py
index 83e7c09e..300ad0fe 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -29,11 +29,6 @@ def lexer_json_ld():
yield JsonLdLexer()
-@pytest.fixture(scope='module')
-def lexer_yaml():
- yield YamlLexer()
-
-
@pytest.mark.parametrize(
'text, expected_token_types',
(
@@ -202,79 +197,3 @@ def test_json_ld_keywords_negative_match(lexer_json_ld, keyword):
assert len(tokens) == 6
assert tokens[1][1] is Token.Name.Tag
assert tokens[1][2] == '"%s"' % keyword
-
-
-def test_basic_json(lexer_json):
- fragment = '{"foo": "bar", "foo2": [1, 2, 3], "\\u0123": "\\u0123"}\n'
- tokens = [
- (Token.Punctuation, '{'),
- (Token.Name.Tag, '"foo"'),
- (Token.Punctuation, ':'),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"bar"'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Name.Tag, '"foo2"'),
- (Token.Punctuation, ':'),
- (Token.Text, ' '),
- (Token.Punctuation, '['),
- (Token.Literal.Number.Integer, '1'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '3'),
- (Token.Punctuation, '],'),
- (Token.Text, ' '),
- (Token.Name.Tag, '"\\u0123"'),
- (Token.Punctuation, ':'),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"\\u0123"'),
- (Token.Punctuation, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_json.get_tokens(fragment)) == tokens
-
-
-def test_basic_bare(lexer_bare):
- # This is the same as testBasic for JsonLexer above, except the
- # enclosing curly braces are removed.
- fragment = '"foo": "bar", "foo2": [1, 2, 3]\n'
- tokens = [
- (Token.Name.Tag, '"foo"'),
- (Token.Punctuation, ':'),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"bar"'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Name.Tag, '"foo2"'),
- (Token.Punctuation, ':'),
- (Token.Text, ' '),
- (Token.Punctuation, '['),
- (Token.Literal.Number.Integer, '1'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '3'),
- (Token.Punctuation, ']'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_bare.get_tokens(fragment)) == tokens
-
-
-def test_yaml(lexer_yaml):
- # Bug #1528: This previously parsed 'token # innocent' as a tag
- fragment = 'here: token # innocent: comment\n'
- tokens = [
- (Token.Name.Tag, 'here'),
- (Token.Punctuation, ':'),
- (Token.Text, ' '),
- (Token.Literal.Scalar.Plain, 'token'),
- (Token.Text, ' '),
- (Token.Comment.Single, '# innocent: comment'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_yaml.get_tokens(fragment)) == tokens
diff --git a/tests/test_ezhil.py b/tests/test_ezhil.py
deleted file mode 100644
index 7e1ee579..00000000
--- a/tests/test_ezhil.py
+++ /dev/null
@@ -1,176 +0,0 @@
-"""
- Basic EzhilLexer Test
- ~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2015 Muthiah Annamalai <ezhillang@gmail.com>
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Operator, Number, Text, Token
-from pygments.lexers import EzhilLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield EzhilLexer()
-
-
-def test_sum(lexer):
- fragment = '1+3\n'
- tokens = [
- (Number.Integer, '1'),
- (Operator, '+'),
- (Number.Integer, '3'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_gcd_expr(lexer):
- fragment = '1^3+(5-5)*gcd(a,b)\n'
- tokens = [
- (Token.Number.Integer, '1'),
- (Token.Operator, '^'),
- (Token.Literal.Number.Integer, '3'),
- (Token.Operator, '+'),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '5'),
- (Token.Operator, '-'),
- (Token.Literal.Number.Integer, '5'),
- (Token.Punctuation, ')'),
- (Token.Operator, '*'),
- (Token.Name, 'gcd'),
- (Token.Punctuation, '('),
- (Token.Name, 'a'),
- (Token.Operator, ','),
- (Token.Name, 'b'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_if_statement(lexer):
- fragment = """@( 0 > 3 ) ஆனால்
- பதிப்பி "wont print"
-முடி"""
- tokens = [
- (Token.Operator, '@'),
- (Token.Punctuation, '('),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '0'),
- (Token.Text, ' '),
- (Token.Operator, '>'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '3'),
- (Token.Text, ' '),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Keyword, 'ஆனால்'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'பதிப்பி'),
- (Token.Text, ' '),
- (Token.Literal.String, '"wont print"'),
- (Token.Text, '\n'),
- (Token.Keyword, 'முடி'),
- (Token.Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function(lexer):
- fragment = """# (C) முத்தையா அண்ணாமலை 2013, 2015
-நிரல்பாகம் gcd ( x, y )
-மு = max(x,y)
- q = min(x,y)
-
-@( q == 0 ) ஆனால்
- பின்கொடு மு
-முடி
-பின்கொடு gcd( மு - q , q )
-முடி\n"""
- tokens = [
- (Token.Comment.Single,
- '# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85'
- '\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'),
- (Token.Keyword, 'நிரல்பாகம்'),
- (Token.Text, ' '),
- (Token.Name, 'gcd'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Text, ' '),
- (Token.Name, 'x'),
- (Token.Operator, ','),
- (Token.Text, ' '),
- (Token.Name, 'y'),
- (Token.Text, ' '),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Name, '\u0bae\u0bc1'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Name.Builtin, 'max'),
- (Token.Punctuation, '('),
- (Token.Name, 'x'),
- (Token.Operator, ','),
- (Token.Name, 'y'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Name, 'q'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Name.Builtin, 'min'),
- (Token.Punctuation, '('),
- (Token.Name, 'x'),
- (Token.Operator, ','),
- (Token.Name, 'y'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Text, '\n'),
- (Token.Operator, '@'),
- (Token.Punctuation, '('),
- (Token.Text, ' '),
- (Token.Name, 'q'),
- (Token.Text, ' '),
- (Token.Operator, '=='),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '0'),
- (Token.Text, ' '),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Keyword, 'ஆனால்'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'பின்கொடு'),
- (Token.Text, ' '),
- (Token.Name, '\u0bae\u0bc1'),
- (Token.Text, '\n'),
- (Token.Keyword, 'முடி'),
- (Token.Text, '\n'),
- (Token.Keyword, '\u0baa\u0bbf\u0ba9\u0bcd\u0b95\u0bca\u0b9f\u0bc1'),
- (Token.Text, ' '),
- (Token.Name, 'gcd'),
- (Token.Punctuation, '('),
- (Token.Text, ' '),
- (Token.Name, '\u0bae\u0bc1'),
- (Token.Text, ' '),
- (Token.Operator, '-'),
- (Token.Text, ' '),
- (Token.Name, 'q'),
- (Token.Text, ' '),
- (Token.Operator, ','),
- (Token.Text, ' '),
- (Token.Name, 'q'),
- (Token.Text, ' '),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- (Token.Keyword, 'முடி'), # '\u0bae\u0bc1\u0b9f\u0bbf'),
- (Token.Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_gdscript.py b/tests/test_gdscript.py
deleted file mode 100644
index bed0fec1..00000000
--- a/tests/test_gdscript.py
+++ /dev/null
@@ -1,166 +0,0 @@
-"""
- GDScript Tests
- ~~~~~~~~~~~~~~
-
- :copyright: Copyright 2020 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import GDScriptLexer
-from pygments.token import Token
-
-
-@pytest.fixture(scope="module")
-def lexer():
- yield GDScriptLexer()
-
-
-def test_variable_declaration_and_assigment(lexer):
- fragment = "var abc = 5.4"
- tokens = [
- (Token.Keyword, "var"),
- (Token.Text, " "),
- (Token.Name, "abc"),
- (Token.Text, " "),
- (Token.Operator, "="),
- (Token.Text, " "),
- (Token.Number.Float, "5.4"),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_simple_function(lexer):
- fragment = "func abc(arg):\n\tprint(\"Hello, World!\")"
- tokens = [
- (Token.Keyword, "func"),
- (Token.Text, " "),
- (Token.Name, "abc"),
- (Token.Punctuation, "("),
- (Token.Name, "arg"),
- (Token.Punctuation, ")"),
- (Token.Punctuation, ":"),
- (Token.Text, "\n"),
- (Token.Text, "\t"),
- (Token.Name.Builtin, "print"),
- (Token.Punctuation, "("),
- (Token.Literal.String.Double, "\""),
- (Token.Literal.String.Double, "Hello, World!"),
- (Token.Literal.String.Double, "\""),
- (Token.Punctuation, ")"),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_with_types(lexer):
- fragment = "func abc(arg: String) -> void:\n\tprint(\"Hello\", arg)"
- tokens = [
- (Token.Keyword, "func"),
- (Token.Text, " "),
- (Token.Name, "abc"),
- (Token.Punctuation, "("),
- (Token.Name, "arg"),
- (Token.Punctuation, ":"),
- (Token.Text, " "),
- (Token.Name.Builtin.Type, "String"),
- (Token.Punctuation, ")"),
- (Token.Text, " "),
- (Token.Operator, "-"),
- (Token.Operator, ">"),
- (Token.Text, " "),
- (Token.Name, "void"),
- (Token.Punctuation, ":"),
- (Token.Text, "\n"),
- (Token.Text, "\t"),
- (Token.Name.Builtin, "print"),
- (Token.Punctuation, "("),
- (Token.Literal.String.Double, "\""),
- (Token.Literal.String.Double, "Hello"),
- (Token.Literal.String.Double, "\""),
- (Token.Punctuation, ","),
- (Token.Text, " "),
- (Token.Name, "arg"),
- (Token.Punctuation, ")"),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_signal(lexer):
- fragment = "signal sig (arg1, arg2)"
- tokens = [
- (Token.Keyword, "signal"),
- (Token.Text, " "),
- (Token.Name, "sig"),
- (Token.Text, " "),
- (Token.Punctuation, "("),
- (Token.Name, "arg1"),
- (Token.Punctuation, ","),
- (Token.Text, " "),
- (Token.Name, "arg2"),
- (Token.Punctuation, ")"),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_export_array(lexer):
- fragment = "export (Array, AudioStream) var streams"
- tokens = [
- (Token.Keyword, "export"),
- (Token.Text, " "),
- (Token.Punctuation, "("),
- (Token.Name.Builtin.Type, "Array"),
- (Token.Punctuation, ","),
- (Token.Text, " "),
- (Token.Name, "AudioStream"),
- (Token.Punctuation, ")"),
- (Token.Text, " "),
- (Token.Keyword, "var"),
- (Token.Text, " "),
- (Token.Name, "streams"),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_inner_class(lexer):
- fragment = "class InnerClass:\n\tvar a = 5"
- tokens = [
- (Token.Keyword, "class"),
- (Token.Text, " "),
- (Token.Name, "InnerClass"),
- (Token.Punctuation, ":"),
- (Token.Text, "\n"),
- (Token.Text, "\t"),
- (Token.Keyword, "var"),
- (Token.Text, " "),
- (Token.Name, "a"),
- (Token.Text, " "),
- (Token.Operator, "="),
- (Token.Text, " "),
- (Token.Literal.Number.Integer, "5"),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_comment(lexer):
- fragment = "# Comment"
- tokens = [
- (Token.Comment.Single, "# Comment"),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_multiline_string(lexer):
- fragment = '"""\nMultiline\n"""'
- tokens = [
- (Token.Literal.String.Doc, '"""\nMultiline\n"""'),
- (Token.Text, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_grammar_notation.py b/tests/test_grammar_notation.py
deleted file mode 100644
index b0c56c82..00000000
--- a/tests/test_grammar_notation.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""
- Basic Grammar Notation Tests
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import PegLexer
-
-
-@pytest.fixture(scope='module')
-def lexer_peg():
- yield PegLexer()
-
-
-def test_peg_basic(lexer_peg):
- fragment = 'rule<-("terminal"/nonterminal/[cls])*\n'
- tokens = [
- (Token.Name.Class, 'rule'),
- (Token.Operator, '<-'),
- (Token.Punctuation, '('),
- (Token.String.Double, '"terminal"'),
- (Token.Operator, '/'),
- (Token.Name.Class, 'nonterminal'),
- (Token.Operator, '/'),
- (Token.Punctuation, '['),
- (Token.String, 'cls'),
- (Token.Punctuation, ']'),
- (Token.Punctuation, ')'),
- (Token.Operator, '*'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_peg.get_tokens(fragment)) == tokens
-
-
-def test_peg_operators(lexer_peg):
- # see for example:
- # - https://github.com/gvanrossum/pegen
- # - https://nim-lang.org/docs/pegs.html
- fragment = "rule = 'a' | 'b'\n"
- tokens = [
- (Token.Name.Class, 'rule'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.String.Single, "'a'"),
- (Token.Text, ' '),
- (Token.Operator, '|'),
- (Token.Text, ' '),
- (Token.String.Single, "'b'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer_peg.get_tokens(fragment)) == tokens
- fragment = "rule: 'a' ~ 'b'\n"
- tokens = [
- (Token.Name.Class, 'rule'),
- (Token.Operator, ':'),
- (Token.Text, ' '),
- (Token.String.Single, "'a'"),
- (Token.Text, ' '),
- (Token.Operator, '~'),
- (Token.Text, ' '),
- (Token.String.Single, "'b'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer_peg.get_tokens(fragment)) == tokens
-
-
-def test_peg_modified_strings(lexer_peg):
- # see for example:
- # - http://textx.github.io/Arpeggio/
- # - https://nim-lang.org/docs/pegs.html
- # - https://github.com/erikrose/parsimonious
- fragment = '~"regex" i"insensitive" "multimod"ilx ("not modified")\n'
- tokens = [
- # can't handle parsimonious-style regex while ~ is a cut operator
- (Token.Operator, '~'),
- (Token.String.Double, '"regex"'),
- (Token.Text, ' '),
- (Token.String.Double, 'i"insensitive"'),
- (Token.Text, ' '),
- (Token.String.Double, '"multimod"ilx'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.String.Double, '"not modified"'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_peg.get_tokens(fragment)) == tokens
diff --git a/tests/test_haskell.py b/tests/test_haskell.py
deleted file mode 100644
index c271d8b4..00000000
--- a/tests/test_haskell.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
- Haskell Tests
- ~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import HaskellLexer
-from pygments.token import Token
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield HaskellLexer()
-
-
-def test_promoted_names(lexer):
- fragment = "'x ': '[]\n"
- tokens = [
- (Token.Name, '\'x'),
- (Token.Text, ' '),
- (Token.Keyword.Type, '\':'),
- (Token.Text, ' '),
- (Token.Keyword.Type, '\'[]'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_hdl.py b/tests/test_hdl.py
deleted file mode 100644
index 3411147f..00000000
--- a/tests/test_hdl.py
+++ /dev/null
@@ -1,686 +0,0 @@
-"""
- HDL Tests
- ~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import SystemVerilogLexer
-from pygments.token import (Comment, Keyword, Name, Number, Operator,
- Punctuation, Text)
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield SystemVerilogLexer()
-
-
-SYSTEMVERILOG_BASIC_TEXT = """\
-// Adder flops the sum of its inputs
-module Adder #(
- parameter int N = 42
-) (
- output logic [N-1:0] y,
- output logic co,
-
- input logic [N-1:0] a,
- input logic [N-1:0] b,
- input logic ci,
-
- input logic clk
-);
- always_ff @(posedge clk) begin
- {co, y} <= a + b + ci;
- end
-endmodule : Adder
-"""
-
-SYSTEMVERILOG_BASIC_TOKENS = [
- (Comment.Single, '// Adder flops the sum of its inputs\n'),
- (Keyword, 'module'),
- (Text, ' '),
- (Name, 'Adder'),
- (Text, ' '),
- (Punctuation, '#'),
- (Punctuation, '('),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'parameter'),
- (Text, ' '),
- (Keyword.Type, 'int'),
- (Text, ' '),
- (Name, 'N'),
- (Text, ' '),
- (Operator, '='),
- (Text, ' '),
- (Number.Integer, '42'),
- (Text, '\n'),
-
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, '('),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'output'),
- (Text, ' '),
- (Keyword.Type, 'logic'),
- (Text, ' '),
- (Punctuation, '['),
- (Name, 'N'),
- (Operator, '-'),
- (Number.Integer, '1'),
- (Operator, ':'),
- (Number.Integer, '0'),
- (Punctuation, ']'),
- (Text, ' '),
- (Name, 'y'),
- (Punctuation, ','),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'output'),
- (Text, ' '),
- (Keyword.Type, 'logic'),
- (Text, ' '),
- (Name, 'co'),
- (Punctuation, ','),
- (Text, '\n'),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'input'),
- (Text, ' '),
- (Keyword.Type, 'logic'),
- (Text, ' '),
- (Punctuation, '['),
- (Name, 'N'),
- (Operator, '-'),
- (Number.Integer, '1'),
- (Operator, ':'),
- (Number.Integer, '0'),
- (Punctuation, ']'),
- (Text, ' '),
- (Name, 'a'),
- (Punctuation, ','),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'input'),
- (Text, ' '),
- (Keyword.Type, 'logic'),
- (Text, ' '),
- (Punctuation, '['),
- (Name, 'N'),
- (Operator, '-'),
- (Number.Integer, '1'),
- (Operator, ':'),
- (Number.Integer, '0'),
- (Punctuation, ']'),
- (Text, ' '),
- (Name, 'b'),
- (Punctuation, ','),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'input'),
- (Text, ' '),
- (Keyword.Type, 'logic'),
- (Text, ' '),
- (Name, 'ci'),
- (Punctuation, ','),
- (Text, '\n'),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'input'),
- (Text, ' '),
- (Keyword.Type, 'logic'),
- (Text, ' '),
- (Name, 'clk'),
- (Text, '\n'),
-
- (Punctuation, ')'),
- (Punctuation, ';'),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'always_ff'),
- (Text, ' '),
- (Punctuation, '@'),
- (Punctuation, '('),
- (Keyword, 'posedge'),
- (Text, ' '),
- (Name, 'clk'),
- (Punctuation, ')'),
- (Text, ' '),
- (Keyword, 'begin'),
- (Text, '\n'),
-
- (Text, ' '),
- (Punctuation, '{'),
- (Name, 'co'),
- (Punctuation, ','),
- (Text, ' '),
- (Name, 'y'),
- (Punctuation, '}'),
- (Text, ' '),
- (Operator, '<'),
- (Operator, '='),
- (Text, ' '),
- (Name, 'a'),
- (Text, ' '),
- (Operator, '+'),
- (Text, ' '),
- (Name, 'b'),
- (Text, ' '),
- (Operator, '+'),
- (Text, ' '),
- (Name, 'ci'),
- (Punctuation, ';'),
- (Text, '\n'),
-
- (Text, ' '),
- (Keyword, 'end'),
- (Text, '\n'),
-
- (Keyword, 'endmodule'),
- (Text, ' '),
- (Operator, ':'),
- (Text, ' '),
- (Name, 'Adder'),
- (Text, '\n'),
-]
-
-def test_systemverilog_basic(lexer):
- """A basic SystemVerilog test.
-
- Examine tokens emitted by the SV lexer for a trivial module.
- Not intended to stress any particular corner of the language.
- """
- tokens = list(lexer.get_tokens(SYSTEMVERILOG_BASIC_TEXT))
- assert tokens == SYSTEMVERILOG_BASIC_TOKENS
-
-
-# Believe it or not, SystemVerilog supports spaces before and after the base
-# specifier (ie 'b, 'd, 'h). See IEEE 1800-2017 Section 5.7.1 for examples.
-SYSTEMVERILOG_NUMBERS_TEXT = """
-8'b10101010
-8 'b10101010
-8'b 10101010
-8'sb10101010
-8'Sb10101010
-8'B10101010
-8'b1010_1010
-8'b10xXzZ?10
-
-24'o01234567
-24 'o01234567
-24'o 01234567
-24'so01234567
-24'So01234567
-24'O01234567
-24'o0123_4567
-24'o01xXzZ?7
-
-32'd27182818
-32 'd27182818
-32'd 27182818
-32'sd27182818
-32'Sd27182818
-32'D27182818
-32'd2718_2818
-32'd27xXzZ?8
-
-32'hdeadbeef
-32 'hdeadbeef
-32'h deadbeef
-32'shdeadbeef
-32'Shdeadbeef
-32'Hdeadbeef
-32'hdead_beef
-32'hdexXzZ?f
-
-'0 '1 'x 'X 'z 'Z
-
-42 1234_5678
-"""
-
-SYSTEMVERILOG_NUMBERS_TOKENS = [
- (Number.Bin, "8'b10101010"),
- (Text, '\n'),
- (Number.Bin, "8 'b10101010"),
- (Text, '\n'),
- (Number.Bin, "8'b 10101010"),
- (Text, '\n'),
- (Number.Bin, "8'sb10101010"),
- (Text, '\n'),
- (Number.Bin, "8'Sb10101010"),
- (Text, '\n'),
- (Number.Bin, "8'B10101010"),
- (Text, '\n'),
- (Number.Bin, "8'b1010_1010"),
- (Text, '\n'),
- (Number.Bin, "8'b10xXzZ?10"),
- (Text, '\n'),
- (Text, '\n'),
- (Number.Oct, "24'o01234567"),
- (Text, '\n'),
- (Number.Oct, "24 'o01234567"),
- (Text, '\n'),
- (Number.Oct, "24'o 01234567"),
- (Text, '\n'),
- (Number.Oct, "24'so01234567"),
- (Text, '\n'),
- (Number.Oct, "24'So01234567"),
- (Text, '\n'),
- (Number.Oct, "24'O01234567"),
- (Text, '\n'),
- (Number.Oct, "24'o0123_4567"),
- (Text, '\n'),
- (Number.Oct, "24'o01xXzZ?7"),
- (Text, '\n'),
- (Text, '\n'),
- (Number.Integer, "32'd27182818"),
- (Text, '\n'),
- (Number.Integer, "32 'd27182818"),
- (Text, '\n'),
- (Number.Integer, "32'd 27182818"),
- (Text, '\n'),
- (Number.Integer, "32'sd27182818"),
- (Text, '\n'),
- (Number.Integer, "32'Sd27182818"),
- (Text, '\n'),
- (Number.Integer, "32'D27182818"),
- (Text, '\n'),
- (Number.Integer, "32'd2718_2818"),
- (Text, '\n'),
- (Number.Integer, "32'd27xXzZ?8"),
- (Text, '\n'),
- (Text, '\n'),
- (Number.Hex, "32'hdeadbeef"),
- (Text, '\n'),
- (Number.Hex, "32 'hdeadbeef"),
- (Text, '\n'),
- (Number.Hex, "32'h deadbeef"),
- (Text, '\n'),
- (Number.Hex, "32'shdeadbeef"),
- (Text, '\n'),
- (Number.Hex, "32'Shdeadbeef"),
- (Text, '\n'),
- (Number.Hex, "32'Hdeadbeef"),
- (Text, '\n'),
- (Number.Hex, "32'hdead_beef"),
- (Text, '\n'),
- (Number.Hex, "32'hdexXzZ?f"),
- (Text, '\n'),
- (Text, '\n'),
- (Number, "'0"),
- (Text, ' '),
- (Number, "'1"),
- (Text, ' '),
- (Number, "'x"),
- (Text, ' '),
- (Number, "'X"),
- (Text, ' '),
- (Number, "'z"),
- (Text, ' '),
- (Number, "'Z"),
- (Text, '\n'),
- (Text, '\n'),
- (Number.Integer, '42'),
- (Text, ' '),
- (Number.Integer, '1234_5678'),
- (Text, '\n'),
-]
-
-def test_systemverilog_numbers(lexer):
- """Test most types of numbers"""
- tokens = list(lexer.get_tokens(SYSTEMVERILOG_NUMBERS_TEXT))
- assert tokens == SYSTEMVERILOG_NUMBERS_TOKENS
-
-
-# See 1800-2017 Table 11-2: Operator Precedence and Associativity
-# Note that the duplicates (unary/binary) have been removed,
-# ie '+', '-', '&', '|', '^', '~^', '^~'
-SYSTEMVERILOG_OPERATORS_TEXT = """
-() [] :: .
-+ - ! ~ & ~& | ~| ^ ~^ ^~ ++ --
-**
-* / %
-<< >> <<< >>>
-< <= > >= inside dist
-== != === !== ==? !=?
-&&
-||
-?:
--> <->
-= += -= *= /= %= &= ^= |= <<= >>= <<<= >>>= := :/ <=
-{} {{}}
-"""
-
-# Note: This is a inconsistent mix of operator and punctuation
-SYSTEMVERILOG_OPERATORS_TOKENS = [
- (Punctuation, '('),
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, '['),
- (Punctuation, ']'),
- (Text, ' '),
- # Note: This should be '::'
- (Operator, ':'),
- (Operator, ':'),
- (Text, ' '),
- (Punctuation, '.'),
- (Text, '\n'),
- (Operator, '+'),
- (Text, ' '),
- (Operator, '-'),
- (Text, ' '),
- (Operator, '!'),
- (Text, ' '),
- (Operator, '~'),
- (Text, ' '),
- (Operator, '&'),
- (Text, ' '),
- # Note: This should be '~&'
- (Operator, '~'),
- (Operator, '&'),
- (Text, ' '),
- (Operator, '|'),
- (Text, ' '),
- # Note: This should be '~|'
- (Operator, '~'),
- (Operator, '|'),
- (Text, ' '),
- (Operator, '^'),
- (Text, ' '),
- # Note: This should be '~^'
- (Operator, '~'),
- (Operator, '^'),
- (Text, ' '),
- # Note: This should be '^~'
- (Operator, '^'),
- (Operator, '~'),
- (Text, ' '),
- # Note: This should be '++'
- (Operator, '+'),
- (Operator, '+'),
- (Text, ' '),
- # Note: This should be '--'
- (Operator, '-'),
- (Operator, '-'),
- (Text, '\n'),
- # Note: This should be '**'
- (Operator, '*'),
- (Operator, '*'),
- (Text, '\n'),
- (Operator, '*'),
- (Text, ' '),
- (Operator, '/'),
- (Text, ' '),
- (Operator, '%'),
- (Text, '\n'),
- # Note: This should be '<<'
- (Operator, '<'),
- (Operator, '<'),
- (Text, ' '),
- # Note: This should be '>>'
- (Operator, '>'),
- (Operator, '>'),
- (Text, ' '),
- # Note: This should be '<<<'
- (Operator, '<'),
- (Operator, '<'),
- (Operator, '<'),
- (Text, ' '),
- # Note: This should be '>>>'
- (Operator, '>'),
- (Operator, '>'),
- (Operator, '>'),
- (Text, '\n'),
- (Operator, '<'),
- (Text, ' '),
- # Note: This should be '<='
- (Operator, '<'),
- (Operator, '='),
- (Text, ' '),
- (Operator, '>'),
- (Text, ' '),
- # Note: This should be '>='
- (Operator, '>'),
- (Operator, '='),
- (Text, ' '),
- (Operator.Word, 'inside'),
- (Text, ' '),
- (Operator.Word, 'dist'),
- (Text, '\n'),
- # Note: This should be '=='
- (Operator, '='),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '!='
- (Operator, '!'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '==='
- (Operator, '='),
- (Operator, '='),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '!=='
- (Operator, '!'),
- (Operator, '='),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '==?'
- (Operator, '='),
- (Operator, '='),
- (Operator, '?'),
- (Text, ' '),
- # Note: This should be '!=?'
- (Operator, '!'),
- (Operator, '='),
- (Operator, '?'),
- (Text, '\n'),
- # Note: This should be '&&'
- (Operator, '&'),
- (Operator, '&'),
- (Text, '\n'),
- # Note: This should be '||'
- (Operator, '|'),
- (Operator, '|'),
- (Text, '\n'),
- (Operator, '?'),
- (Operator, ':'),
- (Text, '\n'),
- # Note: This should be '->'
- (Operator, '-'),
- (Operator, '>'),
- (Text, ' '),
- # Note: This should be '<->'
- (Operator, '<'),
- (Operator, '-'),
- (Operator, '>'),
- (Text, '\n'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '+='
- (Operator, '+'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '-='
- (Operator, '-'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '*='
- (Operator, '*'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '/='
- (Operator, '/'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '%='
- (Operator, '%'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '&='
- (Operator, '&'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '^='
- (Operator, '^'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '|='
- (Operator, '|'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '<<='
- (Operator, '<'),
- (Operator, '<'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '>>='
- (Operator, '>'),
- (Operator, '>'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '<<<='
- (Operator, '<'),
- (Operator, '<'),
- (Operator, '<'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be '>>>='
- (Operator, '>'),
- (Operator, '>'),
- (Operator, '>'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be ':='
- (Operator, ':'),
- (Operator, '='),
- (Text, ' '),
- # Note: This should be ':/'
- (Operator, ':'),
- (Operator, '/'),
- (Text, ' '),
- # Note: This should be '<='
- (Operator, '<'),
- (Operator, '='),
- (Text, '\n'),
- (Punctuation, '{'),
- (Punctuation, '}'),
- (Text, ' '),
- # Note: This should be '{{'
- (Punctuation, '{'),
- (Punctuation, '{'),
- # Note: This should be '}}'
- (Punctuation, '}'),
- (Punctuation, '}'),
- (Text, '\n'),
-]
-
-def test_systemverilog_operators(lexer):
- """Test various operators"""
- tokens = list(lexer.get_tokens(SYSTEMVERILOG_OPERATORS_TEXT))
- assert tokens == SYSTEMVERILOG_OPERATORS_TOKENS
-
-
-# Most of the interesting types of class declarations
-SYSTEMVERILOG_CLASSES_TEXT = """
-class Foo;
-endclass
-
-class Bar;
-endclass : Bar
-
-class Fiz extends Buz;
-endclass : Fiz
-
-class Free #(parameter type T = byte) extends Beer #(T);
-endclass : Free
-"""
-
-SYSTEMVERILOG_CLASSES_TOKENS = [
- (Keyword.Declaration, 'class'),
- (Text, ' '),
- (Name.Class, 'Foo'),
- (Punctuation, ';'),
- (Text, '\n'),
- (Keyword.Declaration, 'endclass'),
- (Text, '\n'),
- (Text, '\n'),
- (Keyword.Declaration, 'class'),
- (Text, ' '),
- (Name.Class, 'Bar'),
- (Punctuation, ';'),
- (Text, '\n'),
- (Keyword.Declaration, 'endclass'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name.Class, 'Bar'),
- (Text, '\n'),
- (Text, '\n'),
- (Keyword.Declaration, 'class'),
- (Text, ' '),
- (Name.Class, 'Fiz'),
- (Text, ' '),
- (Keyword.Declaration, 'extends'),
- (Text, ' '),
- (Name.Class, 'Buz'),
- (Punctuation, ';'),
- (Text, '\n'),
- (Keyword.Declaration, 'endclass'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name.Class, 'Fiz'),
- (Text, '\n'),
- (Text, '\n'),
- (Keyword.Declaration, 'class'),
- (Text, ' '),
- (Name.Class, 'Free'),
- (Text, ' '),
- (Punctuation, '#'),
- (Punctuation, '('),
- (Keyword, 'parameter'),
- (Text, ' '),
- (Keyword.Type, 'type'),
- (Text, ' '),
- (Name, 'T'),
- (Text, ' '),
- (Operator, '='),
- (Text, ' '),
- (Keyword.Type, 'byte'),
- (Punctuation, ')'),
- (Text, ' '),
- (Keyword.Declaration, 'extends'),
- (Text, ' '),
- (Name.Class, 'Beer'),
- (Text, ' '),
- (Punctuation, '#'),
- (Punctuation, '('),
- (Name, 'T'),
- (Punctuation, ')'),
- (Punctuation, ';'),
- (Text, '\n'),
- (Keyword.Declaration, 'endclass'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name.Class, 'Free'),
- (Text, '\n'),
-]
-
-def test_systemverilog_classes(lexer):
- """Test class/extends/endclass group captures"""
- tokens = list(lexer.get_tokens(SYSTEMVERILOG_CLASSES_TEXT))
- assert tokens == SYSTEMVERILOG_CLASSES_TOKENS
diff --git a/tests/test_idris.py b/tests/test_idris.py
deleted file mode 100644
index e8c4b6de..00000000
--- a/tests/test_idris.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""
- Basic IdrisLexer Test
- ~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2020 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Keyword, Text, Name, Operator, Literal
-from pygments.lexers import IdrisLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield IdrisLexer()
-
-def test_reserved_word(lexer):
- fragment = 'namespace Foobar\n links : String\n links = "abc"'
- tokens = [
- (Keyword.Reserved, 'namespace'),
- (Text, ' '),
- (Keyword.Type, 'Foobar'),
- (Text, '\n'),
- (Text, ' '),
- (Name.Function, 'links'),
- (Text, ' '),
- (Operator.Word, ':'),
- (Text, ' '),
- (Keyword.Type, 'String'),
- (Text, '\n'),
- (Text, ' '),
- (Text, ' '),
- (Text, 'links'),
- (Text, ' '),
- (Operator.Word, '='),
- (Text, ' '),
- (Literal.String, '"'),
- (Literal.String, 'abc'),
- (Literal.String, '"'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_compiler_directive(lexer):
- fragment = '%link C "object.o"\n%name Vect xs'
- tokens = [
- (Keyword.Reserved, '%link'),
- (Text, ' '),
- (Keyword.Type, 'C'),
- (Text, ' '),
- (Literal.String, '"'),
- (Literal.String, 'object.o'),
- (Literal.String, '"'),
- (Text, '\n'),
- (Keyword.Reserved, '%name'),
- (Text, ' '),
- (Keyword.Type, 'Vect'),
- (Text, ' '),
- (Text, 'xs'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_ini_lexer.py b/tests/test_ini_lexer.py
deleted file mode 100644
index 9959a9ac..00000000
--- a/tests/test_ini_lexer.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- Pygments INI lexer tests
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-from pygments.lexers.configs import IniLexer
-from pygments.token import Token, String, Keyword, Name, Operator
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield IniLexer()
-
-
-def test_indented_entries(lexer):
- fragment = \
- '[section]\n' \
- ' key1=value1\n' \
- ' key2=value2\n'
- tokens = [
- (Keyword, '[section]'),
- (Token.Text, '\n '),
- (Name.Attribute, 'key1'),
- (Operator, '='),
- (String, 'value1'),
- (Token.Text, '\n '),
- (Name.Attribute, 'key2'),
- (Operator, '='),
- (String, 'value2'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = \
- '[section]\n' \
- ' key1 = value1\n' \
- ' key2 = value2\n'
- tokens = [
- (Keyword, '[section]'),
- (Token.Text, '\n '),
- (Name.Attribute, 'key1'),
- (Token.Text, ' '),
- (Operator, '='),
- (Token.Text, ' '),
- (String, 'value1'),
- (Token.Text, '\n '),
- (Name.Attribute, 'key2'),
- (Token.Text, ' '),
- (Operator, '='),
- (Token.Text, ' '),
- (String, 'value2'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = \
- '[section]\n' \
- ' key 1 = value 1\n' \
- ' key 2 = value 2\n'
- tokens = [
- (Keyword, '[section]'),
- (Token.Text, '\n '),
- (Name.Attribute, 'key 1'),
- (Token.Text, ' '),
- (Operator, '='),
- (Token.Text, ' '),
- (String, 'value 1'),
- (Token.Text, '\n '),
- (Name.Attribute, 'key 2'),
- (Token.Text, ' '),
- (Operator, '='),
- (Token.Text, ' '),
- (String, 'value 2'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_java.py b/tests/test_java.py
index 410c25ab..4e253cee 100644
--- a/tests/test_java.py
+++ b/tests/test_java.py
@@ -19,66 +19,6 @@ def lexer():
yield JavaLexer()
-def test_enhanced_for(lexer):
- fragment = 'label:\nfor(String var2: var1) {}\n'
- tokens = [
- (Name.Label, 'label:'),
- (Text, '\n'),
- (Keyword, 'for'),
- (Punctuation, '('),
- (Name, 'String'),
- (Text, ' '),
- (Name, 'var2'),
- (Punctuation, ':'),
- (Text, ' '),
- (Name, 'var1'),
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, '{'),
- (Punctuation, '}'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_numeric_literals(lexer):
- fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0'
- fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n'
- tokens = [
- (Number.Integer, '0'),
- (Text, ' '),
- (Number.Integer, '5L'),
- (Text, ' '),
- (Number.Integer, '9__542_72l'),
- (Text, ' '),
- (Number.Hex, '0xbEEf'),
- (Text, ' '),
- (Number.Hex, '0X9_A'),
- (Text, ' '),
- (Number.Oct, '0_35'),
- (Text, ' '),
- (Number.Oct, '01'),
- (Text, ' '),
- (Number.Bin, '0b0___101_0'),
- (Text, ' '),
- (Number.Float, '0.'),
- (Text, ' '),
- (Number.Float, '.7_17F'),
- (Text, ' '),
- (Number.Float, '3e-1_3d'),
- (Text, ' '),
- (Number.Float, '1f'),
- (Text, ' '),
- (Number.Float, '6_01.9e+3'),
- (Text, ' '),
- (Number.Float, '0x.1Fp3'),
- (Text, ' '),
- (Number.Float, '0XEP8D'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
@pytest.mark.parametrize(
'text',
(
diff --git a/tests/test_javascript.py b/tests/test_javascript.py
index 35225142..3109f3fe 100644
--- a/tests/test_javascript.py
+++ b/tests/test_javascript.py
@@ -85,21 +85,3 @@ def test_hexadecimal_literals_negative_matches(lexer, text):
@pytest.fixture(scope='module')
def ts_lexer():
yield TypeScriptLexer()
-
-def test_function_definition(ts_lexer):
- fragment = u'async function main() {\n}'
- tokens = [
- (Token.Keyword, u'async'),
- (Token.Text, u' '),
- (Token.Keyword.Declaration, u'function'),
- (Token.Text, u' '),
- (Token.Name.Other, u'main'),
- (Token.Punctuation, u'('),
- (Token.Punctuation, u')'),
- (Token.Text, u' '),
- (Token.Punctuation, u'{'),
- (Token.Text, u'\n'),
- (Token.Punctuation, u'}'),
- (Token.Text, u'\n'),
- ]
- assert list(ts_lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_julia.py b/tests/test_julia.py
deleted file mode 100644
index ee928861..00000000
--- a/tests/test_julia.py
+++ /dev/null
@@ -1,58 +0,0 @@
-"""
- Julia Tests
- ~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import JuliaLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield JuliaLexer()
-
-
-def test_unicode(lexer):
- """
- Test that unicode character, √, in an expression is recognized
- """
- fragment = 's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
- tokens = [
- (Token.Name, 's'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Operator, '\u221a'),
- (Token.Punctuation, '('),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '1'),
- (Token.Operator, '/'),
- (Token.Name, 'n'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Operator, '*'),
- (Token.Text, ' '),
- (Token.Name, 'sum'),
- (Token.Punctuation, '('),
- (Token.Name, 'count'),
- (Token.Text, ' '),
- (Token.Operator, '.^'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Operator, '-'),
- (Token.Text, ' '),
- (Token.Name, 'mu'),
- (Token.Text, ' '),
- (Token.Operator, '.^'),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_kotlin.py b/tests/test_kotlin.py
deleted file mode 100644
index 8d67f83d..00000000
--- a/tests/test_kotlin.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""
- Basic JavaLexer Test
- ~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Text, Name, Keyword, Punctuation, String
-from pygments.lexers import KotlinLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield KotlinLexer()
-
-
-def test_can_cope_with_backtick_names_in_functions(lexer):
- fragment = 'fun `wo bble`'
- tokens = [
- (Keyword, 'fun'),
- (Text, ' '),
- (Name.Function, '`wo bble`'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_can_cope_with_commas_and_dashes_in_backtick_Names(lexer):
- fragment = 'fun `wo,-bble`'
- tokens = [
- (Keyword, 'fun'),
- (Text, ' '),
- (Name.Function, '`wo,-bble`'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_can_cope_with_destructuring(lexer):
- fragment = 'val (a, b) = '
- tokens = [
- (Keyword, 'val'),
- (Text, ' '),
- (Punctuation, '('),
- (Name.Property, 'a'),
- (Punctuation, ','),
- (Text, ' '),
- (Name.Property, 'b'),
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, '='),
- (Text, ' '),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_can_cope_generics_in_destructuring(lexer):
- fragment = 'val (a: List<Something>, b: Set<Wobble>) ='
- tokens = [
- (Keyword, 'val'),
- (Text, ' '),
- (Punctuation, '('),
- (Name.Property, 'a'),
- (Punctuation, ':'),
- (Text, ' '),
- (Name.Property, 'List'),
- (Punctuation, '<'),
- (Name, 'Something'),
- (Punctuation, '>'),
- (Punctuation, ','),
- (Text, ' '),
- (Name.Property, 'b'),
- (Punctuation, ':'),
- (Text, ' '),
- (Name.Property, 'Set'),
- (Punctuation, '<'),
- (Name, 'Wobble'),
- (Punctuation, '>'),
- (Punctuation, ')'),
- (Text, ' '),
- (Punctuation, '='),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_can_cope_with_generics(lexer):
- fragment = 'inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {'
- tokens = [
- (Keyword, 'inline fun'),
- (Text, ' '),
- (Punctuation, '<'),
- (Keyword, 'reified'),
- (Text, ' '),
- (Name, 'T'),
- (Text, ' '),
- (Punctuation, ':'),
- (Text, ' '),
- (Name, 'ContractState'),
- (Punctuation, '>'),
- (Text, ' '),
- (Name.Class, 'VaultService'),
- (Punctuation, '.'),
- (Name.Function, 'queryBy'),
- (Punctuation, '('),
- (Punctuation, ')'),
- (Punctuation, ':'),
- (Text, ' '),
- (Name, 'Vault'),
- (Punctuation, '.'),
- (Name, 'Page'),
- (Punctuation, '<'),
- (Name, 'T'),
- (Punctuation, '>'),
- (Text, ' '),
- (Punctuation, '{'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_should_cope_with_multiline_comments(lexer):
- fragment = '"""\nthis\nis\na\ncomment"""'
- tokens = [
- (String, '"""\nthis\nis\na\ncomment"""'),
- (Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_markdown_lexer.py b/tests/test_markdown_lexer.py
index 32457b5a..8415c212 100644
--- a/tests/test_markdown_lexer.py
+++ b/tests/test_markdown_lexer.py
@@ -166,126 +166,6 @@ def test_setext_subheading(lexer):
assert list(lexer.get_tokens(fragment)) == tokens
-def test_task_list(lexer):
- fragment = '- [ ] sample task'
- tokens = [
- (Keyword, '- '),
- (Keyword, '[ ]'),
- (Token.Text, ' '),
- (Token.Text, 'sample'),
- (Token.Text, ' '),
- (Token.Text, 'task'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '* [ ] sample task'
- tokens = [
- (Keyword, '* '),
- (Keyword, '[ ]'),
- (Token.Text, ' '),
- (Token.Text, 'sample'),
- (Token.Text, ' '),
- (Token.Text, 'task'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = ' * [ ] sample task'
- tokens = [
- (Token.Text, ' '),
- (Keyword, '* '),
- (Keyword, '[ ]'),
- (Token.Text, ' '),
- (Token.Text, 'sample'),
- (Token.Text, ' '),
- (Token.Text, 'task'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_bulleted_list(lexer):
- fragment = '* foo\n* bar'
- tokens = [
- (Keyword, '*'),
- (Token.Text, ' '),
- (Token.Text, 'foo'),
- (Token.Text, '\n'),
- (Keyword, '*'),
- (Token.Text, ' '),
- (Token.Text, 'bar'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '- foo\n- bar'
- tokens = [
- (Keyword, '-'),
- (Token.Text, ' '),
- (Token.Text, 'foo'),
- (Token.Text, '\n'),
- (Keyword, '-'),
- (Token.Text, ' '),
- (Token.Text, 'bar'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '* *foo*\n* bar'
- tokens = [
- (Keyword, '*'),
- (Token.Text, ' '),
- (Generic.Emph, '*foo*'),
- (Token.Text, '\n'),
- (Keyword, '*'),
- (Token.Text, ' '),
- (Token.Text, 'bar'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '```\ncode\n```\n* *foo*\n* bar'
- tokens = [
- (String.Backtick, '```\ncode\n```\n'),
- (Keyword, '*'),
- (Token.Text, ' '),
- (Generic.Emph, '*foo*'),
- (Token.Text, '\n'),
- (Keyword, '*'),
- (Token.Text, ' '),
- (Token.Text, 'bar'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_numbered_list(lexer):
- fragment = '1. foo\n2. bar'
- tokens = [
- (Keyword, '1.'),
- (Token.Text, ' '),
- (Token.Text, 'foo'),
- (Token.Text, '\n'),
- (Keyword, '2.'),
- (Token.Text, ' '),
- (Token.Text, 'bar'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_quote(lexer):
- fragment = '> a\n> quote'
- tokens = [
- (Keyword, '> '),
- (Generic.Emph, 'a\n'),
- (Keyword, '> '),
- (Generic.Emph, 'quote\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
def test_invalid_code_block(lexer):
fragments = (
'```code```',
@@ -296,341 +176,3 @@ def test_invalid_code_block(lexer):
for fragment in fragments:
for token, _ in lexer.get_tokens(fragment):
assert token != String.Backtick
-
-
-def test_code_block_fenced_by_backticks(lexer):
- fragments = (
- '```\ncode\n```\n',
- '```\nmulti\n`line`\ncode\n```\n',
- )
- for fragment in fragments:
- tokens = [
- (String.Backtick, fragment),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_code_block_with_language(lexer):
- fragments = (
- '```python\nimport this\n```\n',
- )
- for fragment in fragments:
- tokens = [
- (String.Backtick, '```'),
- (String.Backtick, 'python'),
- (Token.Text, '\n'),
- (Token.Keyword.Namespace, 'import'),
- (Token.Text, ' '),
- (Token.Name.Namespace, 'this'),
- (Token.Text, '\n'),
- (String.Backtick, '```\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_inline_code(lexer):
- fragment = 'code: `code`'
- tokens = [
- (Token.Text, 'code:'),
- (Token.Text, ' '),
- (String.Backtick, '`code`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = ' `**code**`'
- tokens = [
- (Token.Text, ' '),
- (String.Backtick, '`**code**`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '(`code`)'
- tokens = [
- (Token.Text, '('),
- (String.Backtick, '`code`'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '* `code`'
- tokens = [
- (Token.Keyword, '*'),
- (Token.Text, ' '),
- (String.Backtick, '`code`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '```\ncode\n```\n* nocode\n* `code`'
- tokens = [
- (String.Backtick, '```\ncode\n```\n'),
- (Token.Keyword, '*'),
- (Token.Text, ' '),
- (Token.Text, 'nocode'),
- (Token.Text, '\n'),
- (Token.Keyword, '*'),
- (Token.Text, ' '),
- (String.Backtick, '`code`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '- `code`'
- tokens = [
- (Token.Keyword, '-'),
- (Token.Text, ' '),
- (String.Backtick, '`code`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '1. `code`'
- tokens = [
- (Token.Keyword, '1.'),
- (Token.Text, ' '),
- (String.Backtick, '`code`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = 'code (`in brackets`)'
- tokens = [
- (Token.Text, 'code'),
- (Token.Text, ' '),
- (Token.Text, '('),
- (String.Backtick, '`in brackets`'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_invalid_bold(lexer):
- fragments = (
- '**no bold__',
- '__no bold**',
- '*no bold*',
- '_no bold_',
- )
-
- for fragment in fragments:
- for token, _ in lexer.get_tokens(fragment):
- assert token != Generic.Strong
-
-
-def test_bold_fenced_by_asterisk(lexer):
- fragment = '**bold**'
- tokens = [
- (Generic.Strong, '**bold**'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '(**bold**)'
- tokens = [
- (Token.Text, '('),
- (Generic.Strong, '**bold**'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_bold_fenced_by_underscore(lexer):
- fragment = '__bold__'
- tokens = [
- (Generic.Strong, '__bold__'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '(__bold__)'
- tokens = [
- (Token.Text, '('),
- (Generic.Strong, '__bold__'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_invalid_italics(lexer):
- fragments = (
- '*no italics_',
- '_no italics*',
- '**no italics**',
- '__no italics__',
- )
-
- for fragment in fragments:
- for token, _ in lexer.get_tokens(fragment):
- assert token != Generic.Emph
-
-
-def test_italics_fenced_by_asterisk(lexer):
- fragment = '*italics*'
- tokens = [
- (Generic.Emph, '*italics*'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '(*italics*)'
- tokens = [
- (Token.Text, '('),
- (Generic.Emph, '*italics*'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_italics_fenced_by_underscore(lexer):
- fragment = '_italics_'
- tokens = [
- (Generic.Emph, '_italics_'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '(_italics_)'
- tokens = [
- (Token.Text, '('),
- (Generic.Emph, '_italics_'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_escape_italics(lexer):
- fragments = (
- r'\*no italics\*',
- r'\_ no italics \_',
- )
-
- for fragment in fragments:
- for token, _ in lexer.get_tokens(fragment):
- assert token != Generic.Emph
-
-
-def test_italics_no_multiline(lexer):
- fragment = '*no\nitalics*'
-
- for token, _ in lexer.get_tokens(fragment):
- assert token != Generic.Emph
-
-
-def test_italics_and_bold(lexer):
- fragment = '**bold** and *italics*'
- tokens = [
- (Generic.Strong, '**bold**'),
- (Token.Text, ' '),
- (Token.Text, 'and'),
- (Token.Text, ' '),
- (Generic.Emph, '*italics*'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '*italics* and **bold**'
- tokens = [
- (Generic.Emph, '*italics*'),
- (Token.Text, ' '),
- (Token.Text, 'and'),
- (Token.Text, ' '),
- (Generic.Strong, '**bold**'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_strikethrough(lexer):
- fragment = '~~striked~~not striked'
- tokens = [
- (Generic.Deleted, '~~striked~~'),
- (Token.Text, 'not'),
- (Token.Text, ' '),
- (Token.Text, 'striked'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_mentions(lexer):
- fragment = 'note for @me:'
- tokens = [
- (Token.Text, 'note'),
- (Token.Text, ' '),
- (Token.Text, 'for'),
- (Token.Text, ' '),
- (Name.Entity, '@me:'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_topics(lexer):
- fragment = 'message to #you:'
- tokens = [
- (Token.Text, 'message'),
- (Token.Text, ' '),
- (Token.Text, 'to'),
- (Token.Text, ' '),
- (Name.Entity, '#you:'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_links(lexer):
- fragment = '[text](link)'
- tokens = [
- (Token.Text, '['),
- (Token.Name.Tag, 'text'),
- (Token.Text, ']'),
- (Token.Text, '('),
- (Token.Name.Attribute, 'link'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '![Image of foo](https://bar.baz)'
- tokens = [
- (Token.Text, '!['),
- (Token.Name.Tag, 'Image of foo'),
- (Token.Text, ']'),
- (Token.Text, '('),
- (Token.Name.Attribute, 'https://bar.baz'),
- (Token.Text, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_reference_style_links(lexer):
- fragment = '[an example][id]'
- tokens = [
- (Token.Text, '['),
- (Token.Name.Tag, 'an example'),
- (Token.Text, ']'),
- (Token.Text, '['),
- (Token.Name.Label, 'id'),
- (Token.Text, ']'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
- fragment = '[id]: http://example.com'
- tokens = [
- (Token.Text, '['),
- (Token.Name.Label, 'id'),
- (Token.Text, ']: '),
- (Token.Name.Attribute, 'http://example.com'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_matlab.py b/tests/test_matlab.py
deleted file mode 100644
index b375c8da..00000000
--- a/tests/test_matlab.py
+++ /dev/null
@@ -1,305 +0,0 @@
-"""
- MATLAB Tests
- ~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import MatlabLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield MatlabLexer()
-
-
-def test_single_line(lexer):
- """
- Test that a single line with strings, a method, and numbers is parsed correctly.
- """
- fragment = "set('T',300,'P',101325);\n"
- tokens = [
- (Token.Name, 'set'),
- (Token.Punctuation, '('),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "T'"),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '300'),
- (Token.Punctuation, ','),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "P'"),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '101325'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_line_continuation(lexer):
- """
- Test that line continuation by ellipses does not produce generic
- output on the second line.
- """
- fragment = "set('T',300,...\n'P',101325);\n"
- tokens = [
- (Token.Name, 'set'),
- (Token.Punctuation, '('),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "T'"),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '300'),
- (Token.Punctuation, ','),
- (Token.Keyword, '...'),
- (Token.Text.Whitespace, '\n'),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "P'"),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '101325'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_keywords_ended_by_newline(lexer):
- """Test that keywords on their own line are marked as keywords."""
- fragment = "if x > 100\n disp('x > 100')\nelse\n disp('x < 100')\nend\n"
- tokens = [
- (Token.Keyword, 'if'),
- (Token.Text.Whitespace, ' '),
- (Token.Name, 'x'),
- (Token.Text.Whitespace, ' '),
- (Token.Operator, '>'),
- (Token.Text.Whitespace, ' '),
- (Token.Literal.Number.Integer, '100'),
- (Token.Text.Whitespace, '\n '),
- (Token.Name.Builtin, 'disp'),
- (Token.Punctuation, '('),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "x > 100'"),
- (Token.Punctuation, ')'),
- (Token.Text.Whitespace, '\n'),
- (Token.Keyword, 'else'),
- (Token.Text.Whitespace, '\n '),
- (Token.Name.Builtin, 'disp'),
- (Token.Punctuation, '('),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "x < 100'"),
- (Token.Punctuation, ')'),
- (Token.Text.Whitespace, '\n'),
- (Token.Keyword, 'end'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_comment_after_continuation(lexer):
- """
- Test that text after the line continuation ellipses is marked as a comment.
- """
- fragment = "set('T',300,... a comment\n'P',101325);\n"
- tokens = [
- (Token.Name, 'set'),
- (Token.Punctuation, '('),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "T'"),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '300'),
- (Token.Punctuation, ','),
- (Token.Keyword, '...'),
- (Token.Comment, ' a comment'),
- (Token.Text.Whitespace, '\n'),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "P'"),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '101325'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_multiple_spaces_variable_assignment(lexer):
- """
- Test that multiple spaces with an equal sign doesn't get formatted to a string.
- """
- fragment = 'x = 100;\n'
- tokens = [
- (Token.Name, 'x'),
- (Token.Text.Whitespace, ' '),
- (Token.Punctuation, '='),
- (Token.Text.Whitespace, ' '),
- (Token.Literal.Number.Integer, '100'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_operator_multiple_space(lexer):
- """
- Test that multiple spaces with an operator doesn't get formatted to a string.
- """
- fragment = 'x > 100;\n'
- tokens = [
- (Token.Name, 'x'),
- (Token.Text.Whitespace, ' '),
- (Token.Operator, '>'),
- (Token.Text.Whitespace, ' '),
- (Token.Literal.Number.Integer, '100'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_one_space_assignment(lexer):
- """Test that one space before an equal sign is formatted correctly."""
- fragment = 'x = 100;\n'
- tokens = [
- (Token.Name, 'x'),
- (Token.Text.Whitespace, ' '),
- (Token.Punctuation, '='),
- (Token.Text.Whitespace, ' '),
- (Token.Literal.Number.Integer, '100'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_command_mode(lexer):
- """
- MATLAB allows char function arguments to not be enclosed by parentheses
- or contain quote characters, as long as they are space separated. Test
- that one common such function is formatted appropriately.
- """
- fragment = 'help sin\n'
- tokens = [
- (Token.Name, 'help'),
- (Token.Text.Whitespace, ' '),
- (Token.Literal.String, 'sin'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-
-MATLAB_SAMPLE_CLASS = """
-classdef Name < dynamicprops
- properties
- % i am a comment
- name1
- name2
- end
- properties (Constant = true, SetAccess = protected)
- % i too am a comment
- matrix = [0, 1, 2];
- string = 'i am a string'
- end
- methods
- % i am also a comment
- function self = Name()
- % i am a comment inside a constructor
- end
- end
-end
-""".strip()
-
-def test_classes_with_properties(lexer):
- whitespace = Token.Text.Whitespace
- tokens = [
- (Token.Keyword, 'classdef'),
- (whitespace, ' '),
- (Token.Name, 'Name'),
- (whitespace, ' '),
- (Token.Operator, '<'),
- (whitespace, ' '),
- (Token.Keyword, 'dynamicprops'),
- (whitespace, '\n '),
- (Token.Keyword, 'properties'),
- (whitespace, '\n '),
- (Token.Comment, '% i am a comment'),
- (whitespace, '\n '),
- (Token.Name, 'name1'),
- (whitespace, '\n '),
- (Token.Name, 'name2'),
- (whitespace, '\n '),
- (Token.Keyword, 'end'),
- (whitespace, '\n '),
- (Token.Keyword, 'properties'),
- (whitespace, ' '),
- (Token.Punctuation, '('),
- (Token.Name.Builtin, 'Constant'),
- (whitespace, ' '),
- (Token.Punctuation, '='),
- (whitespace, ' '),
- (Token.Keyword, 'true'),
- (Token.Punctuation, ','),
- (whitespace, ' '),
- (Token.Name.Builtin, 'SetAccess'),
- (whitespace, ' '),
- (Token.Punctuation, '='),
- (whitespace, ' '),
- (Token.Keyword, 'protected'),
- (Token.Punctuation, ')'),
- (whitespace, "\n "),
- (Token.Comment, '% i too am a comment'),
- (whitespace, '\n '),
- (Token.Name, 'matrix'),
- (whitespace, ' '),
- (Token.Punctuation, '='),
- (whitespace, ' '),
- (Token.Punctuation, '['),
- (Token.Literal.Number.Integer, '0'),
- (Token.Punctuation, ','),
- (whitespace, ' '),
- (Token.Literal.Number.Integer, '1'),
- (Token.Punctuation, ','),
- (whitespace, ' '),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ']'),
- (Token.Punctuation, ';'),
- (whitespace, '\n '),
- (Token.Name, 'string'),
- (whitespace, ' '),
- (Token.Punctuation, '='),
- (whitespace, ' '),
- (Token.Literal.String, "'"),
- (Token.Literal.String, "i am a string'"),
- (whitespace, '\n '),
- (Token.Keyword, 'end'),
- (whitespace, '\n '),
- (Token.Keyword, 'methods'),
- (whitespace, '\n '),
- (Token.Comment, '% i am also a comment'),
- (whitespace, '\n '),
- (Token.Keyword, 'function'),
- (whitespace, ' '),
- (Token.Text, 'self'),
- (whitespace, ' '),
- (Token.Punctuation, '='),
- (whitespace, ' '),
- (Token.Name.Function, 'Name'),
- (Token.Punctuation, '('),
- (Token.Punctuation, ')'),
- (whitespace, '\n '),
- (Token.Comment, '% i am a comment inside a constructor'),
- (whitespace, '\n '),
- (Token.Keyword, 'end'),
- (whitespace, '\n '),
- (Token.Keyword, 'end'),
- (whitespace, '\n'),
- (Token.Keyword, 'end'),
- (whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(MATLAB_SAMPLE_CLASS)) == tokens
-
diff --git a/tests/test_objectiveclexer.py b/tests/test_objectiveclexer.py
deleted file mode 100644
index d4ea6126..00000000
--- a/tests/test_objectiveclexer.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""
- Basic CLexer Test
- ~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import ObjectiveCLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield ObjectiveCLexer()
-
-
-def test_literal_number_int(lexer):
- fragment = '@(1);\n'
- expected = [
- (Token.Literal, '@('),
- (Token.Literal.Number.Integer, '1'),
- (Token.Literal, ')'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
-
-
-def test_literal_number_expression(lexer):
- fragment = '@(1+2);\n'
- expected = [
- (Token.Literal, '@('),
- (Token.Literal.Number.Integer, '1'),
- (Token.Operator, '+'),
- (Token.Literal.Number.Integer, '2'),
- (Token.Literal, ')'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
-
-
-def test_literal_number_nested_expression(lexer):
- fragment = '@(1+(2+3));\n'
- expected = [
- (Token.Literal, '@('),
- (Token.Literal.Number.Integer, '1'),
- (Token.Operator, '+'),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '2'),
- (Token.Operator, '+'),
- (Token.Literal.Number.Integer, '3'),
- (Token.Punctuation, ')'),
- (Token.Literal, ')'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
-
-
-def test_literal_number_bool(lexer):
- fragment = '@NO;\n'
- expected = [
- (Token.Literal.Number, '@NO'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
-
-
-def test_literal_number_bool_expression(lexer):
- fragment = '@(YES);\n'
- expected = [
- (Token.Literal, '@('),
- (Token.Name.Builtin, 'YES'),
- (Token.Literal, ')'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
-
-
-def test_module_import(lexer):
- fragment = '@import ModuleA;\n'
- expected = [
- (Token.Keyword, '@import'),
- (Token.Text, ' '),
- (Token.Name, 'ModuleA'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
diff --git a/tests/test_php.py b/tests/test_php.py
deleted file mode 100644
index fe67f4c2..00000000
--- a/tests/test_php.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
- PHP Tests
- ~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import PhpLexer
-from pygments.token import Token
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield PhpLexer()
-
-
-def test_string_escaping_run(lexer):
- fragment = '<?php $x="{\\""; ?>\n'
- tokens = [
- (Token.Comment.Preproc, '<?php'),
- (Token.Text, ' '),
- (Token.Name.Variable, '$x'),
- (Token.Operator, '='),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, '{'),
- (Token.Literal.String.Escape, '\\"'),
- (Token.Literal.String.Double, '"'),
- (Token.Punctuation, ';'),
- (Token.Text, ' '),
- (Token.Comment.Preproc, '?>'),
- (Token.Other, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_praat.py b/tests/test_praat.py
deleted file mode 100644
index cb2758be..00000000
--- a/tests/test_praat.py
+++ /dev/null
@@ -1,212 +0,0 @@
-"""
- Praat lexer tests
- ~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import PraatLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield PraatLexer()
-
-
-def test_numeric_assignment(lexer):
- fragment = 'var = -15e4\n'
- tokens = [
- (Token.Text, 'var'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Operator, '-'),
- (Token.Literal.Number, '15e4'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def testStringAssignment(lexer):
- fragment = 'var$ = "foo"\n'
- tokens = [
- (Token.Text, 'var$'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Literal.String, '"'),
- (Token.Literal.String, 'foo'),
- (Token.Literal.String, '"'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_string_escaped_quotes(lexer):
- fragment = '"it said ""foo"""\n'
- tokens = [
- (Token.Literal.String, '"'),
- (Token.Literal.String, 'it said '),
- (Token.Literal.String, '"'),
- (Token.Literal.String, '"'),
- (Token.Literal.String, 'foo'),
- (Token.Literal.String, '"'),
- (Token.Literal.String, '"'),
- (Token.Literal.String, '"'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_call(lexer):
- fragment = 'selected("Sound", i+(a*b))\n'
- tokens = [
- (Token.Name.Function, 'selected'),
- (Token.Punctuation, '('),
- (Token.Literal.String, '"'),
- (Token.Literal.String, 'Sound'),
- (Token.Literal.String, '"'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Text, 'i'),
- (Token.Operator, '+'),
- (Token.Text, '('),
- (Token.Text, 'a'),
- (Token.Operator, '*'),
- (Token.Text, 'b'),
- (Token.Text, ')'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_broken_unquoted_string(lexer):
- fragment = 'printline string\n... \'interpolated\' string\n'
- tokens = [
- (Token.Keyword, 'printline'),
- (Token.Text, ' '),
- (Token.Literal.String, 'string'),
- (Token.Text, '\n'),
- (Token.Punctuation, '...'),
- (Token.Text, ' '),
- (Token.Literal.String.Interpol, "'interpolated'"),
- (Token.Text, ' '),
- (Token.Literal.String, 'string'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_inline_if(lexer):
- fragment = 'var = if true == 1 then -1 else 0 fi'
- tokens = [
- (Token.Text, 'var'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Keyword, 'if'),
- (Token.Text, ' '),
- (Token.Text, 'true'),
- (Token.Text, ' '),
- (Token.Operator, '=='),
- (Token.Text, ' '),
- (Token.Literal.Number, '1'),
- (Token.Text, ' '),
- (Token.Keyword, 'then'),
- (Token.Text, ' '),
- (Token.Operator, '-'),
- (Token.Literal.Number, '1'),
- (Token.Text, ' '),
- (Token.Keyword, 'else'),
- (Token.Text, ' '),
- (Token.Literal.Number, '0'),
- (Token.Text, ' '),
- (Token.Keyword, 'fi'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolation_boundary(lexer):
- fragment = '"\'" + "\'"'
- tokens = [
- (Token.Literal.String, '"'),
- (Token.Literal.String, "'"),
- (Token.Literal.String, '"'),
- (Token.Text, ' '),
- (Token.Operator, '+'),
- (Token.Text, ' '),
- (Token.Literal.String, '"'),
- (Token.Literal.String, "'"),
- (Token.Literal.String, '"'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolated_numeric_indexed(lexer):
- fragment = "'a[3]'"
- tokens = [
- (Token.Literal.String.Interpol, "'a[3]'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolated_numeric_hash(lexer):
- fragment = "'a[\"b\"]'"
- tokens = [
- (Token.Literal.String.Interpol, "'a[\"b\"]'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolated_string_indexed(lexer):
- fragment = "'a$[3]'"
- tokens = [
- (Token.Literal.String.Interpol, "'a$[3]'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolated_string_hash(lexer):
- fragment = "'a$[\"b\"]'"
- tokens = [
- (Token.Literal.String.Interpol, "'a$[\"b\"]'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolated_numeric_with_precision(lexer):
- fragment = "'a:3'"
- tokens = [
- (Token.Literal.String.Interpol, "'a:3'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolated_indexed_numeric_with_precision(lexer):
- fragment = "'a[3]:3'"
- tokens = [
- (Token.Literal.String.Interpol, "'a[3]:3'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolated_local_numeric_with_precision(lexer):
- fragment = "'a.a:3'"
- tokens = [
- (Token.Literal.String.Interpol, "'a.a:3'"),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_promql.py b/tests/test_promql.py
deleted file mode 100644
index 5d04b6bb..00000000
--- a/tests/test_promql.py
+++ /dev/null
@@ -1,309 +0,0 @@
-"""
- Basic PromQLLexer Tests
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import PromQLLexer
-from pygments.token import Token
-
-
-@pytest.fixture(scope="module")
-def lexer():
- yield PromQLLexer()
-
-
-def test_metric(lexer):
- fragment = "go_gc_duration_seconds"
- tokens = [
- (Token.Name.Variable, "go_gc_duration_seconds"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_metric_one_label(lexer):
- fragment = 'go_gc_duration_seconds{instance="localhost:9090"}'
- tokens = [
- (Token.Name.Variable, "go_gc_duration_seconds"),
- (Token.Punctuation, "{"),
- (Token.Name.Label, "instance"),
- (Token.Operator, "="),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "localhost:9090"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, "}"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_metric_multiple_labels(lexer):
- fragment = 'go_gc_duration_seconds{instance="localhost:9090",job="alertmanager"}'
- tokens = [
- (Token.Name.Variable, "go_gc_duration_seconds"),
- (Token.Punctuation, "{"),
- (Token.Name.Label, "instance"),
- (Token.Operator, "="),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "localhost:9090"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, ","),
- (Token.Name.Label, "job"),
- (Token.Operator, "="),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "alertmanager"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, "}"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_metric_multiple_labels_with_spaces(lexer):
- fragment = 'go_gc_duration_seconds{ instance="localhost:9090", job="alertmanager" }'
- tokens = [
- (Token.Name.Variable, "go_gc_duration_seconds"),
- (Token.Punctuation, "{"),
- (Token.Text.Whitespace, " "),
- (Token.Name.Label, "instance"),
- (Token.Operator, "="),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "localhost:9090"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, " "),
- (Token.Name.Label, "job"),
- (Token.Operator, "="),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "alertmanager"),
- (Token.Punctuation, '"'),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, "}"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_expression_and_comment(lexer):
- fragment = 'go_gc_duration_seconds{instance="localhost:9090"} # single comment\n'
- tokens = [
- (Token.Name.Variable, "go_gc_duration_seconds"),
- (Token.Punctuation, "{"),
- (Token.Name.Label, "instance"),
- (Token.Operator, "="),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "localhost:9090"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, "}"),
- (Token.Text.Whitespace, " "),
- (Token.Comment.Single, "# single comment"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_delta(lexer):
- fragment = 'delta(cpu_temp_celsius{host="zeus"}[2h])'
- tokens = [
- (Token.Keyword.Reserved, "delta"),
- (Token.Operator, "("),
- (Token.Name.Variable, "cpu_temp_celsius"),
- (Token.Punctuation, "{"),
- (Token.Name.Label, "host"),
- (Token.Operator, "="),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "zeus"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, "}"),
- (Token.Punctuation, "["),
- (Token.Literal.String, "2h"),
- (Token.Punctuation, "]"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_sum_with_args(lexer):
- fragment = 'sum by (app, proc) (instance_memory_usage_bytes)\n'
- tokens = [
- (Token.Keyword, "sum"),
- (Token.Text.Whitespace, " "),
- (Token.Keyword, "by"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "("),
- (Token.Name.Variable, "app"),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, " "),
- (Token.Name.Variable, "proc"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "("),
- (Token.Name.Variable, "instance_memory_usage_bytes"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_multi_line(lexer):
- fragment = """label_replace(
- sum by (instance) (
- irate(node_disk_read_bytes_total[2m])
- ) / 1024 / 1024,
- "device",
- 'disk',
- "instance",
- ".*"
-)
-"""
- tokens = [
- (Token.Keyword.Reserved, "label_replace"),
- (Token.Operator, "("),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Keyword, "sum"),
- (Token.Text.Whitespace, " "),
- (Token.Keyword, "by"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "("),
- (Token.Name.Variable, "instance"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "("),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Keyword.Reserved, "irate"),
- (Token.Operator, "("),
- (Token.Name.Variable, "node_disk_read_bytes_total"),
- (Token.Punctuation, "["),
- (Token.Literal.String, "2m"),
- (Token.Punctuation, "]"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "/"),
- (Token.Text.Whitespace, " "),
- (Token.Literal.Number.Integer, "1024"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "/"),
- (Token.Text.Whitespace, " "),
- (Token.Literal.Number.Integer, "1024"),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "device"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, "'"),
- (Token.Literal.String, "disk"),
- (Token.Punctuation, "'"),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "instance"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, ".*"),
- (Token.Punctuation, '"'),
- (Token.Text.Whitespace, "\n"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_function_multi_line_with_offset(lexer):
- fragment = """label_replace(
- avg by(instance)
- (irate(node_cpu_seconds_total{mode = "idle"}[5m] offset 3s)
- ) * 100,
- "device",
- "cpu",
- "instance",
- ".*"
-)"""
- tokens = [
- (Token.Keyword.Reserved, "label_replace"),
- (Token.Operator, "("),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Keyword, "avg"),
- (Token.Text.Whitespace, " "),
- (Token.Keyword, "by"),
- (Token.Operator, "("),
- (Token.Name.Variable, "instance"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "("),
- (Token.Keyword.Reserved, "irate"),
- (Token.Operator, "("),
- (Token.Name.Variable, "node_cpu_seconds_total"),
- (Token.Punctuation, "{"),
- (Token.Name.Label, "mode"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "="),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "idle"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, "}"),
- (Token.Punctuation, "["),
- (Token.Literal.String, "5m"),
- (Token.Punctuation, "]"),
- (Token.Text.Whitespace, " "),
- (Token.Keyword, "offset"),
- (Token.Text.Whitespace, " "),
- (Token.Literal.String, "3s"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, " "),
- (Token.Operator, "*"),
- (Token.Text.Whitespace, " "),
- (Token.Literal.Number.Integer, "100"),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "device"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "cpu"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, "instance"),
- (Token.Punctuation, '"'),
- (Token.Punctuation, ","),
- (Token.Text.Whitespace, "\n"),
- (Token.Text.Whitespace, " "),
- (Token.Punctuation, '"'),
- (Token.Literal.String, ".*"),
- (Token.Punctuation, '"'),
- (Token.Text.Whitespace, "\n"),
- (Token.Operator, ")"),
- (Token.Text.Whitespace, "\n"),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_properties.py b/tests/test_properties.py
deleted file mode 100644
index 5da3fb86..00000000
--- a/tests/test_properties.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""
- Properties Tests
- ~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers.configs import PropertiesLexer
-from pygments.token import Token
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield PropertiesLexer()
-
-
-def test_comments(lexer):
- """
- Assures lines lead by either # or ! are recognized as a comment
- """
- fragment = '! a comment\n# also a comment\n'
- tokens = [
- (Token.Comment, '! a comment'),
- (Token.Text, '\n'),
- (Token.Comment, '# also a comment'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_leading_whitespace_comments(lexer):
- fragment = ' # comment\n'
- tokens = [
- (Token.Text, ' '),
- (Token.Comment, '# comment'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_escaped_space_in_key(lexer):
- fragment = 'key = value\n'
- tokens = [
- (Token.Name.Attribute, 'key'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Literal.String, 'value'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_escaped_space_in_value(lexer):
- fragment = 'key = doubleword\\ value\n'
- tokens = [
- (Token.Name.Attribute, 'key'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Literal.String, 'doubleword\\ value'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_space_delimited_kv_pair(lexer):
- fragment = 'key value\n'
- tokens = [
- (Token.Name.Attribute, 'key'),
- (Token.Text, ' '),
- (Token.Literal.String, 'value\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_just_key(lexer):
- fragment = 'justkey\n'
- tokens = [
- (Token.Name.Attribute, 'justkey'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_just_key_with_space(lexer):
- fragment = 'just\\ key\n'
- tokens = [
- (Token.Name.Attribute, 'just\\ key'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_python.py b/tests/test_python.py
deleted file mode 100644
index 12529d4a..00000000
--- a/tests/test_python.py
+++ /dev/null
@@ -1,889 +0,0 @@
-"""
- Python Tests
- ~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import PythonLexer, Python3Lexer
-from pygments.token import Token
-
-import re
-
-
-@pytest.fixture(scope='module')
-def lexer2():
- yield PythonLexer()
-
-
-@pytest.fixture(scope='module')
-def lexer3():
- yield Python3Lexer()
-
-
-def test_cls_builtin(lexer2):
- """
- Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo
- """
- fragment = 'class TestClass():\n @classmethod\n def hello(cls):\n pass\n'
- tokens = [
- (Token.Keyword, 'class'),
- (Token.Text, ' '),
- (Token.Name.Class, 'TestClass'),
- (Token.Punctuation, '('),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Name.Decorator, '@classmethod'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'def'),
- (Token.Text, ' '),
- (Token.Name.Function, 'hello'),
- (Token.Punctuation, '('),
- (Token.Name.Builtin.Pseudo, 'cls'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ':'),
- (Token.Text, '\n'),
- (Token.Text, ' '),
- (Token.Keyword, 'pass'),
- (Token.Text, '\n'),
- ]
- assert list(lexer2.get_tokens(fragment)) == tokens
-
-
-def test_needs_name(lexer3):
- """
- Tests that '@' is recognized as an Operator
- """
- fragment = 'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
- tokens = [
- (Token.Name, 'S'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Name, 'H'),
- (Token.Text, ' '),
- (Token.Operator, '@'),
- (Token.Text, ' '),
- (Token.Name, 'beta'),
- (Token.Text, ' '),
- (Token.Operator, '-'),
- (Token.Text, ' '),
- (Token.Name, 'r'),
- (Token.Punctuation, ')'),
- (Token.Operator, '.'),
- (Token.Name, 'T'),
- (Token.Text, ' '),
- (Token.Operator, '@'),
- (Token.Text, ' '),
- (Token.Name, 'inv'),
- (Token.Punctuation, '('),
- (Token.Name, 'H'),
- (Token.Text, ' '),
- (Token.Operator, '@'),
- (Token.Text, ' '),
- (Token.Name, 'V'),
- (Token.Text, ' '),
- (Token.Operator, '@'),
- (Token.Text, ' '),
- (Token.Name, 'H'),
- (Token.Operator, '.'),
- (Token.Name, 'T'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Operator, '@'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Name, 'H'),
- (Token.Text, ' '),
- (Token.Operator, '@'),
- (Token.Text, ' '),
- (Token.Name, 'beta'),
- (Token.Text, ' '),
- (Token.Operator, '-'),
- (Token.Text, ' '),
- (Token.Name, 'r'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer3.get_tokens(fragment)) == tokens
-
-
-def test_pep_515(lexer3):
- """
- Tests that the lexer can parse numeric literals with underscores
- """
- fragments = (
- (Token.Literal.Number.Integer, '1_000_000'),
- (Token.Literal.Number.Float, '1_000.000_001'),
- (Token.Literal.Number.Float, '1_000e1_000j'),
- (Token.Literal.Number.Hex, '0xCAFE_F00D'),
- (Token.Literal.Number.Bin, '0b_0011_1111_0100_1110'),
- (Token.Literal.Number.Oct, '0o_777_123'),
- )
-
- for token, fragment in fragments:
- tokens = [
- (token, fragment),
- (Token.Text, '\n'),
- ]
- assert list(lexer3.get_tokens(fragment)) == tokens
-
-
-def test_walrus_operator(lexer3):
- """
- Tests that ':=' is recognized as an Operator
- """
- fragment = 'if (a := 2) > 4:'
- tokens = [
- (Token.Keyword, 'if'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Name, 'a'),
- (Token.Text, ' '),
- (Token.Operator, ':='),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Operator, '>'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '4'),
- (Token.Punctuation, ':'),
- (Token.Text, '\n'),
- ]
- assert list(lexer3.get_tokens(fragment)) == tokens
-
-
-def test_fstring(lexer3):
- """
- Tests that the lexer can parse f-strings
- """
- fragments_and_tokens = (
- # examples from PEP-0498
- (
- "f'My name is {name}, my age next year is {age+1}, my anniversary is {anniversary:%A, %B %d, %Y}.'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'My name is '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'name'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ', my age next year is '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'age'),
- (Token.Operator, '+'),
- (Token.Literal.Number.Integer, '1'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ', my anniversary is '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'anniversary'),
- (Token.Literal.String.Interpol, ':'),
- (Token.Literal.String.Single, '%A, %B %d, %Y'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, '.'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'He said his name is {name!r}.'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'He said his name is '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'name'),
- (Token.Literal.String.Interpol, '!r}'),
- (Token.Literal.String.Single, '.'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
-
- ), (
- "f'input={value:#06x}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'input='),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'value'),
- (Token.Literal.String.Interpol, ':'),
- (Token.Literal.String.Single, '#06x'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- """f'{"quoted string"}'\n""",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'quoted string'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- """f'{f"{inner}"}'\n""", # not in the PEP
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'inner'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- # SyntaxError: f-string expression part cannot include a backslash
- "f'{\\'quoted string\\'}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Error, '\\'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'quoted string'),
- (Token.Literal.String.Escape, "\\'"),
- (Token.Literal.String.Single, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'{{ {4*10} }}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Escape, '{{'),
- (Token.Literal.String.Single, ' '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Literal.Number.Integer, '4'),
- (Token.Operator, '*'),
- (Token.Literal.Number.Integer, '10'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ' '),
- (Token.Literal.String.Escape, '}}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'{{{4*10}}}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Escape, '{{'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Literal.Number.Integer, '4'),
- (Token.Operator, '*'),
- (Token.Literal.Number.Integer, '10'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Escape, '}}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "fr'x={4*10}'\n",
- [
- (Token.Literal.String.Affix, 'fr'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, "x="),
- (Token.Literal.String.Interpol, '{'),
- (Token.Literal.Number.Integer, '4'),
- (Token.Operator, '*'),
- (Token.Literal.Number.Integer, '10'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- """f'abc {a["x"]} def'\n""",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'abc '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'a'),
- (Token.Punctuation, '['),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'x'),
- (Token.Literal.String.Double, '"'),
- (Token.Punctuation, ']'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ' def'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'''abc {a['x']} def'''\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'''"),
- (Token.Literal.String.Single, 'abc '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'a'),
- (Token.Punctuation, '['),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'x'),
- (Token.Literal.String.Single, "'"),
- (Token.Punctuation, ']'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ' def'),
- (Token.Literal.String.Single, "'''"),
- (Token.Text, '\n')
- ]
- ), (
- """f'''{x
-+1}'''\n""",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'''"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'x'),
- (Token.Text, '\n'),
- (Token.Operator, '+'),
- (Token.Literal.Number.Integer, '1'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'''"),
- (Token.Text, '\n')
- ]
- ), (
- """f'''{d[0
-]}'''\n""",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'''"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'd'),
- (Token.Punctuation, '['),
- (Token.Literal.Number.Integer, '0'),
- (Token.Text, '\n'),
- (Token.Punctuation, ']'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'''"),
- (Token.Text, '\n')
- ]
- ), (
- "f'result: {value:{width}.{precision}}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'result: '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'value'),
- (Token.Literal.String.Interpol, ':'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'width'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, '.'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'precision'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "'a' 'b' f'{x}' '{c}' f'str<{y:^4}>' 'd' 'e'\n",
- [
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'a'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, ' '),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'b'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, ' '),
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'x'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, ' '),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{c}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, ' '),
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'str<'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'y'),
- (Token.Literal.String.Interpol, ':'),
- (Token.Literal.String.Single, '^4'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, '>'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, ' '),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'd'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, ' '),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'e'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'{i}:{d[i]}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'i'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ':'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'd'),
- (Token.Punctuation, '['),
- (Token.Name, 'i'),
- (Token.Punctuation, ']'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'x = {x:+3}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, "x = "),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'x'),
- (Token.Literal.String.Interpol, ':'),
- (Token.Literal.String.Single, '+3'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'{fn(lst,2)} {fn(lst,3)}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'fn'),
- (Token.Punctuation, '('),
- (Token.Name, 'lst'),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ')'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ' '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'fn'),
- (Token.Punctuation, '('),
- (Token.Name, 'lst'),
- (Token.Punctuation, ','),
- (Token.Literal.Number.Integer, '3'),
- (Token.Punctuation, ')'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'mapping is { {a:b for (a, b) in ((1, 2), (3, 4))} }'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'mapping is '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Text, ' '),
- (Token.Punctuation, '{'),
- (Token.Name, 'a'),
- (Token.Punctuation, ':'),
- (Token.Name, 'b'),
- (Token.Text, ' '),
- (Token.Keyword, 'for'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Name, 'a'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Name, 'b'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Operator.Word, 'in'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '1'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '3'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '4'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, '}'),
- (Token.Text, ' '),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- """f'a={d["a"]}'\n""",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'a='),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'd'),
- (Token.Punctuation, '['),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'a'),
- (Token.Literal.String.Double, '"'),
- (Token.Punctuation, ']'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'a={d[a]}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, 'a='),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'd'),
- (Token.Punctuation, '['),
- (Token.Name, 'a'),
- (Token.Punctuation, ']'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "fr'{header}:\\s+'\n",
- [
- (Token.Literal.String.Affix, 'fr'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'header'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ':'),
- (Token.Literal.String.Single, '\\'),
- (Token.Literal.String.Single, 's+'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'{a!r}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'a'),
- (Token.Literal.String.Interpol, '!r}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'{(lambda x: x*2)(3)}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Punctuation, '('),
- (Token.Keyword, 'lambda'),
- (Token.Text, ' '),
- (Token.Name, 'x'),
- (Token.Punctuation, ':'),
- (Token.Text, ' '),
- (Token.Name, 'x'),
- (Token.Operator, '*'),
- (Token.Literal.Number.Integer, '2'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '3'),
- (Token.Punctuation, ')'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "extra = f'{extra},waiters:{len(self._waiters)}'\n",
- [
- (Token.Name, 'extra'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Text, ' '),
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'extra'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, ',waiters:'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name.Builtin, 'len'),
- (Token.Punctuation, '('),
- (Token.Name.Builtin.Pseudo, 'self'),
- (Token.Operator, '.'),
- (Token.Name, '_waiters'),
- (Token.Punctuation, ')'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- 'message.append(f" [line {lineno:2d}]")\n',
- [
- (Token.Name, 'message'),
- (Token.Operator, '.'),
- (Token.Name, 'append'),
- (Token.Punctuation, '('),
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, ' [line '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'lineno'),
- (Token.Literal.String.Interpol, ':'),
- (Token.Literal.String.Double, '2d'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, ']'),
- (Token.Literal.String.Double, '"'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n')
- ]
- ),
- # Examples from https://bugs.python.org/issue36817
- (
- 'f"{foo=}"\n',
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'foo'),
- (Token.Literal.String.Interpol, '=}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n')
- ]
- ), (
- "f'{foo=!s}'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'foo'),
- (Token.Literal.String.Interpol, '=!s}'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- 'f"{math.pi=!f:.2f}"\n',
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'math'),
- (Token.Operator, '.'),
- (Token.Name, 'pi'),
- (Token.Literal.String.Interpol, '=!f:'),
- (Token.Literal.String.Double, '.2f'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n')
- ]
- ), (
- 'f"{ chr(65) =}"\n',
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Text, ' '),
- (Token.Name.Builtin, 'chr'),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '65'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Literal.String.Interpol, '=}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n')
- ]
- ), (
- 'f"{chr(65) = }"\n',
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name.Builtin, 'chr'),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '65'),
- (Token.Punctuation, ')'),
- (Token.Text, ' '),
- (Token.Literal.String.Interpol, '= }'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n')
- ]
- ), (
- "f'*{n=:30}*'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, '*'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'n'),
- (Token.Literal.String.Interpol, '=:'),
- (Token.Literal.String.Single, '30'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, '*'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- "f'*{n=!r:30}*'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, '*'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'n'),
- (Token.Literal.String.Interpol, '=!r:'),
- (Token.Literal.String.Single, '30'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, '*'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- """f"*{f'{n=}':30}*"\n""",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, '*'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'n'),
- (Token.Literal.String.Interpol, '=}'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Interpol, ':'),
- (Token.Literal.String.Double, '30'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, '*'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n')
- ]
- ), (
- "f'*{n=:+<30}*'\n",
- [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'"),
- (Token.Literal.String.Single, '*'),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'n'),
- (Token.Literal.String.Interpol, '=:'),
- (Token.Literal.String.Single, '+<30'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, '*'),
- (Token.Literal.String.Single, "'"),
- (Token.Text, '\n')
- ]
- ), (
- """
- f'''{foo
- = !s:20}'''\n""",
- [
- (Token.Text, ' '),
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Single, "'''"),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'foo'),
- (Token.Text, '\n '),
- (Token.Literal.String.Interpol, '= !s:'),
- (Token.Literal.String.Single, '20'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Single, "'''"),
- (Token.Text, '\n')
- ]
- )
-
- )
-
- for fragment,tokens in fragments_and_tokens:
- assert list(lexer3.get_tokens(fragment)) == tokens
-
- # Now switch between single and double quotes, to cover both cases equally
- rep = {"'":'"', '"':"'"}
- pattern = re.compile("|".join(rep.keys()))
- for fragment,tokens in fragments_and_tokens:
- fragment = pattern.sub(lambda m: rep[m.group(0)], fragment)
- tokens = list(tokens)
- for i,(token,match) in enumerate(tokens):
- if token == Token.Literal.String.Single:
- token = Token.Literal.String.Double
- elif token == Token.Literal.String.Double:
- token = Token.Literal.String.Single
- match = pattern.sub(lambda m: rep[m.group(0)], match)
- tokens[i] = (token, match)
- assert list(lexer3.get_tokens(fragment)) == tokens
-
-def test_raw_fstring(lexer3):
- """
- Tests that the lexer can parse raw f-strings
- """
- # Just raw
- fragment = r'rf"m_\nu = x"'
- tokens = [
- (Token.Literal.String.Affix, 'rf'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'm_'),
- (Token.Literal.String.Double, '\\'),
- (Token.Literal.String.Double, 'nu = x'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n')
- ]
- # Just f-string
- fragment = r'f"m_\nu = {x}"'
- tokens = [
- (Token.Literal.String.Affix, 'f'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'm_'),
- (Token.Literal.String.Escape, '\\n'),
- (Token.Literal.String.Double, 'u = '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'x'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n'),
- ]
- # Raw behavior inside {{...}}
- fragment = r'rf"m_{{\nu}} = {x}"'
- tokens = [
- (Token.Literal.String.Affix, 'rf'),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'm_'),
- (Token.Literal.String.Escape, '{{'),
- (Token.Literal.String.Double, '\\'),
- (Token.Literal.String.Double, 'nu'),
- (Token.Literal.String.Escape, '}}'),
- (Token.Literal.String.Double, ' = '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Name, 'x'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n'),
- ]
- assert list(lexer3.get_tokens(fragment)) == tokens
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
deleted file mode 100644
index cedeb93f..00000000
--- a/tests/test_qbasiclexer.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
- Tests for QBasic
- ~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers.basic import QBasicLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield QBasicLexer()
-
-
-def test_keywords_with_dollar(lexer):
- fragment = 'DIM x\nx = RIGHT$("abc", 1)\n'
- expected = [
- (Token.Keyword.Declaration, 'DIM'),
- (Token.Text.Whitespace, ' '),
- (Token.Name.Variable.Global, 'x'),
- (Token.Text, '\n'),
- (Token.Name.Variable.Global, 'x'),
- (Token.Text.Whitespace, ' '),
- (Token.Operator, '='),
- (Token.Text.Whitespace, ' '),
- (Token.Keyword.Reserved, 'RIGHT$'),
- (Token.Punctuation, '('),
- (Token.Literal.String.Double, '"abc"'),
- (Token.Punctuation, ','),
- (Token.Text.Whitespace, ' '),
- (Token.Literal.Number.Integer.Long, '1'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == expected
diff --git a/tests/test_r.py b/tests/test_r.py
deleted file mode 100644
index ffd3f576..00000000
--- a/tests/test_r.py
+++ /dev/null
@@ -1,111 +0,0 @@
-"""
- R Tests
- ~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import SLexer
-from pygments.token import Token, Name, Punctuation
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield SLexer()
-
-
-def test_call(lexer):
- fragment = 'f(1, a)\n'
- tokens = [
- (Name.Function, 'f'),
- (Punctuation, '('),
- (Token.Literal.Number, '1'),
- (Punctuation, ','),
- (Token.Text, ' '),
- (Token.Name, 'a'),
- (Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_name1(lexer):
- fragment = '._a_2.c'
- tokens = [
- (Name, '._a_2.c'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_name2(lexer):
- # Invalid names are valid if backticks are used
- fragment = '`.1 blah`'
- tokens = [
- (Name, '`.1 blah`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_name3(lexer):
- # Internal backticks can be escaped
- fragment = '`.1 \\` blah`'
- tokens = [
- (Name, '`.1 \\` blah`'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_custom_operator(lexer):
- fragment = '7 % and % 8'
- tokens = [
- (Token.Literal.Number, '7'),
- (Token.Text, ' '),
- (Token.Operator, '% and %'),
- (Token.Text, ' '),
- (Token.Literal.Number, '8'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_indexing(lexer):
- fragment = 'a[1]'
- tokens = [
- (Token.Name, 'a'),
- (Token.Punctuation, '['),
- (Token.Literal.Number, '1'),
- (Token.Punctuation, ']'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_dot_name(lexer):
- fragment = '. <- 1'
- tokens = [
- (Token.Name, '.'),
- (Token.Text, ' '),
- (Token.Operator, '<-'),
- (Token.Text, ' '),
- (Token.Literal.Number, '1'),
- (Token.Text, '\n')
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_dot_indexing(lexer):
- fragment = '.[1]'
- tokens = [
- (Token.Name, '.'),
- (Token.Punctuation, '['),
- (Token.Literal.Number, '1'),
- (Token.Punctuation, ']'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_rdf.py b/tests/test_rdf.py
deleted file mode 100644
index e4c9d1ec..00000000
--- a/tests/test_rdf.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""
- Basic RubyLexer Test
- ~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Name, Punctuation, Text
-from pygments.lexers import TurtleLexer, ShExCLexer
-
-
-@pytest.fixture(scope='module')
-def turtle_lexer():
- yield TurtleLexer()
-
-@pytest.fixture(scope='module')
-def shexc_lexer():
- yield ShExCLexer()
-
-def test_turtle_prefixed_name_starting_with_number(turtle_lexer):
- fragment = 'alice:6f6e4241-75a2-4780-9b2a-40da53082e54\n'
- tokens = [
- (Name.Namespace, 'alice'),
- (Punctuation, ':'),
- (Name.Tag, '6f6e4241-75a2-4780-9b2a-40da53082e54'),
- (Text, '\n'),
- ]
- assert list(turtle_lexer.get_tokens(fragment)) == tokens
-
-def test_shexc_prefixed_name_starting_with_number(shexc_lexer):
- fragment = 'alice:6f6e4241-75a2-4780-9b2a-40da53082e54\n'
- tokens = [
- (Name.Namespace, 'alice'),
- (Punctuation, ':'),
- (Name.Tag, '6f6e4241-75a2-4780-9b2a-40da53082e54'),
- (Text, '\n'),
- ]
- assert list(shexc_lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_ruby.py b/tests/test_ruby.py
deleted file mode 100644
index 98654cc4..00000000
--- a/tests/test_ruby.py
+++ /dev/null
@@ -1,148 +0,0 @@
-"""
- Basic RubyLexer Test
- ~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Operator, Number, Text, Token
-from pygments.lexers import RubyLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield RubyLexer()
-
-
-def test_range_syntax1(lexer):
- fragment = '1..3\n'
- tokens = [
- (Number.Integer, '1'),
- (Operator, '..'),
- (Number.Integer, '3'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_range_syntax2(lexer):
- fragment = '1...3\n'
- tokens = [
- (Number.Integer, '1'),
- (Operator, '...'),
- (Number.Integer, '3'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_range_syntax3(lexer):
- fragment = '1 .. 3\n'
- tokens = [
- (Number.Integer, '1'),
- (Text, ' '),
- (Operator, '..'),
- (Text, ' '),
- (Number.Integer, '3'),
- (Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_interpolation_nested_curly(lexer):
- fragment = (
- '"A#{ (3..5).group_by { |x| x/2}.map '
- 'do |k,v| "#{k}" end.join }" + "Z"\n')
-
- tokens = [
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'A'),
- (Token.Literal.String.Interpol, '#{'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Literal.Number.Integer, '3'),
- (Token.Operator, '..'),
- (Token.Literal.Number.Integer, '5'),
- (Token.Punctuation, ')'),
- (Token.Operator, '.'),
- (Token.Name, 'group_by'),
- (Token.Text, ' '),
- (Token.Literal.String.Interpol, '{'),
- (Token.Text, ' '),
- (Token.Operator, '|'),
- (Token.Name, 'x'),
- (Token.Operator, '|'),
- (Token.Text, ' '),
- (Token.Name, 'x'),
- (Token.Operator, '/'),
- (Token.Literal.Number.Integer, '2'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Operator, '.'),
- (Token.Name, 'map'),
- (Token.Text, ' '),
- (Token.Keyword, 'do'),
- (Token.Text, ' '),
- (Token.Operator, '|'),
- (Token.Name, 'k'),
- (Token.Punctuation, ','),
- (Token.Name, 'v'),
- (Token.Operator, '|'),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Interpol, '#{'),
- (Token.Name, 'k'),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, ' '),
- (Token.Keyword, 'end'),
- (Token.Operator, '.'),
- (Token.Name, 'join'),
- (Token.Text, ' '),
- (Token.Literal.String.Interpol, '}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, ' '),
- (Token.Operator, '+'),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, 'Z'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_operator_methods(lexer):
- fragment = 'x.==4\n'
- tokens = [
- (Token.Name, 'x'),
- (Token.Operator, '.'),
- (Token.Name.Operator, '=='),
- (Token.Literal.Number.Integer, '4'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_escaped_bracestring(lexer):
- fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n'
- tokens = [
- (Token.Name, 'str'),
- (Token.Operator, '.'),
- (Token.Name, 'gsub'),
- (Token.Punctuation, '('),
- (Token.Literal.String.Regex, '%r{'),
- (Token.Literal.String.Regex, '\\\\'),
- (Token.Literal.String.Regex, '\\\\'),
- (Token.Literal.String.Regex, '}'),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"'),
- (Token.Literal.String.Double, '/'),
- (Token.Literal.String.Double, '"'),
- (Token.Punctuation, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_shell.py b/tests/test_shell.py
deleted file mode 100644
index 7704c7eb..00000000
--- a/tests/test_shell.py
+++ /dev/null
@@ -1,238 +0,0 @@
-"""
- Basic Shell Tests
- ~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import BashLexer, BashSessionLexer, MSDOSSessionLexer, \
- PowerShellSessionLexer
-
-
-@pytest.fixture(scope='module')
-def lexer_bash():
- yield BashLexer()
-
-
-@pytest.fixture(scope='module')
-def lexer_session():
- yield BashSessionLexer()
-
-
-@pytest.fixture(scope='module')
-def lexer_msdos():
- yield MSDOSSessionLexer()
-
-
-@pytest.fixture(scope='module')
-def lexer_powershell_session():
- yield PowerShellSessionLexer()
-
-
-def test_curly_no_escape_and_quotes(lexer_bash):
- fragment = 'echo "${a//["b"]/}"\n'
- tokens = [
- (Token.Name.Builtin, 'echo'),
- (Token.Text, ' '),
- (Token.Literal.String.Double, '"'),
- (Token.String.Interpol, '${'),
- (Token.Name.Variable, 'a'),
- (Token.Punctuation, '//['),
- (Token.Literal.String.Double, '"b"'),
- (Token.Punctuation, ']/'),
- (Token.String.Interpol, '}'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_bash.get_tokens(fragment)) == tokens
-
-
-def test_curly_with_escape(lexer_bash):
- fragment = 'echo ${a//[\\"]/}\n'
- tokens = [
- (Token.Name.Builtin, 'echo'),
- (Token.Text, ' '),
- (Token.String.Interpol, '${'),
- (Token.Name.Variable, 'a'),
- (Token.Punctuation, '//['),
- (Token.Literal.String.Escape, '\\"'),
- (Token.Punctuation, ']/'),
- (Token.String.Interpol, '}'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_bash.get_tokens(fragment)) == tokens
-
-
-def test_parsed_single(lexer_bash):
- fragment = "a=$'abc\\''\n"
- tokens = [
- (Token.Name.Variable, 'a'),
- (Token.Operator, '='),
- (Token.Literal.String.Single, "$'abc\\''"),
- (Token.Text, '\n'),
- ]
- assert list(lexer_bash.get_tokens(fragment)) == tokens
-
-
-def test_short_variable_names(lexer_bash):
- fragment = 'x="$"\ny="$_"\nz="$abc"\n'
- tokens = [
- # single lone $
- (Token.Name.Variable, 'x'),
- (Token.Operator, '='),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '$'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n'),
- # single letter shell var
- (Token.Name.Variable, 'y'),
- (Token.Operator, '='),
- (Token.Literal.String.Double, '"'),
- (Token.Name.Variable, '$_'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n'),
- # multi-letter user var
- (Token.Name.Variable, 'z'),
- (Token.Operator, '='),
- (Token.Literal.String.Double, '"'),
- (Token.Name.Variable, '$abc'),
- (Token.Literal.String.Double, '"'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_bash.get_tokens(fragment)) == tokens
-
-
-def test_array_nums(lexer_bash):
- fragment = 'a=(1 2 3)\n'
- tokens = [
- (Token.Name.Variable, 'a'),
- (Token.Operator, '='),
- (Token.Operator, '('),
- (Token.Literal.Number, '1'),
- (Token.Text, ' '),
- (Token.Literal.Number, '2'),
- (Token.Text, ' '),
- (Token.Literal.Number, '3'),
- (Token.Operator, ')'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_bash.get_tokens(fragment)) == tokens
-
-
-def test_end_of_line_nums(lexer_bash):
- fragment = 'a=1\nb=2 # comment\n'
- tokens = [
- (Token.Name.Variable, 'a'),
- (Token.Operator, '='),
- (Token.Literal.Number, '1'),
- (Token.Text, '\n'),
- (Token.Name.Variable, 'b'),
- (Token.Operator, '='),
- (Token.Literal.Number, '2'),
- (Token.Text, ' '),
- (Token.Comment.Single, '# comment\n'),
- ]
- assert list(lexer_bash.get_tokens(fragment)) == tokens
-
-
-def test_newline_in_echo(lexer_session):
- fragment = '$ echo \\\nhi\nhi\n'
- tokens = [
- (Token.Generic.Prompt, '$ '),
- (Token.Name.Builtin, 'echo'),
- (Token.Text, ' '),
- (Token.Literal.String.Escape, '\\\n'),
- (Token.Text, 'hi'),
- (Token.Text, '\n'),
- (Token.Generic.Output, 'hi\n'),
- ]
- assert list(lexer_session.get_tokens(fragment)) == tokens
-
-
-def test_newline_in_ls(lexer_session):
- fragment = '$ ls \\\nhi\nhi\n'
- tokens = [
- (Token.Generic.Prompt, '$ '),
- (Token.Text, 'ls'),
- (Token.Text, ' '),
- (Token.Literal.String.Escape, '\\\n'),
- (Token.Text, 'hi'),
- (Token.Text, '\n'),
- (Token.Generic.Output, 'hi\n'),
- ]
- assert list(lexer_session.get_tokens(fragment)) == tokens
-
-
-def test_comment_after_prompt(lexer_session):
- fragment = '$# comment'
- tokens = [
- (Token.Generic.Prompt, '$'),
- (Token.Comment.Single, '# comment\n'),
- ]
- assert list(lexer_session.get_tokens(fragment)) == tokens
-
-
-def test_msdos_gt_only(lexer_msdos):
- fragment = '> py\nhi\n'
- tokens = [
- (Token.Generic.Prompt, '>'),
- (Token.Text, ' '),
- (Token.Text, 'py'),
- (Token.Text, '\n'),
- (Token.Generic.Output, 'hi\n'),
- ]
- assert list(lexer_msdos.get_tokens(fragment)) == tokens
-
-
-def test_powershell_session(lexer_powershell_session):
- fragment = 'PS C:\\> Get-ChildItem\n'
- tokens = [
- (Token.Generic.Prompt, 'PS C:\\> '),
- (Token.Name.Builtin, 'Get-ChildItem'),
- (Token.Text, '\n')
- ]
- assert list(lexer_powershell_session.get_tokens(fragment)) == tokens
-
- fragment = 'PS> Get-ChildItem\n'
- tokens = [
- (Token.Generic.Prompt, 'PS> '),
- (Token.Name.Builtin, 'Get-ChildItem'),
- (Token.Text, '\n')
- ]
- assert list(lexer_powershell_session.get_tokens(fragment)) == tokens
-
- fragment = 'PS > Get-ChildItem\n'
- tokens = [
- (Token.Generic.Prompt, 'PS > '),
- (Token.Name.Builtin, 'Get-ChildItem'),
- (Token.Text, '\n')
- ]
- assert list(lexer_powershell_session.get_tokens(fragment)) == tokens
-
-
-def test_powershell_remoting_session(lexer_powershell_session):
- fragment = '[Long-NetBIOS-Hostname]: PS C:\\> Get-ChildItem\n'
- tokens = [
- (Token.Generic.Prompt, '[Long-NetBIOS-Hostname]: PS C:\\> '),
- (Token.Name.Builtin, 'Get-ChildItem'),
- (Token.Text, '\n')
- ]
- assert list(lexer_powershell_session.get_tokens(fragment)) == tokens
-
-
-def test_virtualenv(lexer_session):
- fragment = '(env) [~/project]$ foo -h\n'
- tokens = [
- (Token.Generic.Prompt.VirtualEnv, '(env)'),
- (Token.Text, ' '),
- (Token.Generic.Prompt, '[~/project]$ '),
- (Token.Text, 'foo'),
- (Token.Text, ' '),
- (Token.Text, '-h'),
- (Token.Text, '\n'),
- ]
- assert list(lexer_session.get_tokens(fragment)) == tokens
diff --git a/tests/test_smarty.py b/tests/test_smarty.py
deleted file mode 100644
index b7f7cc72..00000000
--- a/tests/test_smarty.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""
- Basic SmartyLexer Test
- ~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers import SmartyLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield SmartyLexer()
-
-
-def test_nested_curly(lexer):
- fragment = '{templateFunction param={anotherFunction} param2=$something}\n'
- tokens = [
- (Token.Comment.Preproc, '{'),
- (Token.Name.Function, 'templateFunction'),
- (Token.Text, ' '),
- (Token.Name.Attribute, 'param'),
- (Token.Operator, '='),
- (Token.Comment.Preproc, '{'),
- (Token.Name.Attribute, 'anotherFunction'),
- (Token.Comment.Preproc, '}'),
- (Token.Text, ' '),
- (Token.Name.Attribute, 'param2'),
- (Token.Operator, '='),
- (Token.Name.Variable, '$something'),
- (Token.Comment.Preproc, '}'),
- (Token.Other, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_templates.py b/tests/test_templates.py
index fb6a5072..f557396b 100644
--- a/tests/test_templates.py
+++ b/tests/test_templates.py
@@ -41,66 +41,3 @@ def test_mason_unnamed_block(lexerMason):
"""
res = lexerMason.analyse_text(text)
assert res == 1.0
-
-def test_mason_handles_tags_correctly(lexerMason):
- fragment = "<%class>\nhas 'foo';\nhas 'bar' => (required => 1);\nhas 'baz' => (isa => 'Int', default => 17);\n</%class>\n"
- tokens = [
- (Token.Name.Tag, '<%class>'),
- (Token.Text, '\n'),
- (Token.Name, ''),
- (Token.Name, 'has'),
- (Token.Text, ' '),
- (Token.Literal.String, "'foo'"),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Name, ''),
- (Token.Name, 'has'),
- (Token.Text, ' '),
- (Token.Literal.String, "'bar'"),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Operator, '>'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Name, ''),
- (Token.Name, 'required'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Operator, '>'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '1'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Name, ''),
- (Token.Name, 'has'),
- (Token.Text, ' '),
- (Token.Literal.String, "'baz'"),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Operator, '>'),
- (Token.Text, ' '),
- (Token.Punctuation, '('),
- (Token.Name, ''),
- (Token.Name, 'isa'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Operator, '>'),
- (Token.Text, ' '),
- (Token.Literal.String, "'Int'"),
- (Token.Punctuation, ','),
- (Token.Text, ' '),
- (Token.Name, ''),
- (Token.Name, 'default'),
- (Token.Text, ' '),
- (Token.Operator, '='),
- (Token.Operator, '>'),
- (Token.Text, ' '),
- (Token.Literal.Number.Integer, '17'),
- (Token.Punctuation, ')'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- (Token.Name.Tag, '</%class>'),
- (Token.Text, '\n'),
- ]
- assert list(lexerMason.get_tokens(fragment)) == tokens \ No newline at end of file
diff --git a/tests/test_textfmts.py b/tests/test_textfmts.py
deleted file mode 100644
index f50a302e..00000000
--- a/tests/test_textfmts.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
- Basic Tests for textfmts
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Token
-from pygments.lexers.textfmts import HttpLexer
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield HttpLexer()
-
-
-def test_http_status_line(lexer):
- fragment = 'HTTP/1.1 200 OK\n'
- tokens = [
- (Token.Keyword.Reserved, 'HTTP'),
- (Token.Operator, '/'),
- (Token.Number, '1.1'),
- (Token.Text, ' '),
- (Token.Number, '200'),
- (Token.Text, ' '),
- (Token.Name.Exception, 'OK'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_http_status_line_without_reason_phrase(lexer):
- fragment = 'HTTP/1.1 200\n'
- tokens = [
- (Token.Keyword.Reserved, 'HTTP'),
- (Token.Operator, '/'),
- (Token.Number, '1.1'),
- (Token.Text, ' '),
- (Token.Number, '200'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_http_status_line_without_reason_phrase_rfc_7230(lexer):
- fragment = 'HTTP/1.1 200 \n'
- tokens = [
- (Token.Keyword.Reserved, 'HTTP'),
- (Token.Operator, '/'),
- (Token.Number, '1.1'),
- (Token.Text, ' '),
- (Token.Number, '200'),
- (Token.Text, ' '),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_application_xml(lexer):
- fragment = 'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
- tokens = [
- (Token.Name.Tag, '<foo'),
- (Token.Name.Tag, '>'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens
-
-
-def test_application_calendar_xml(lexer):
- fragment = 'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
- tokens = [
- (Token.Name.Tag, '<foo'),
- (Token.Name.Tag, '>'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens
diff --git a/tests/test_usd.py b/tests/test_usd.py
index ed474af1..081e37d0 100755
--- a/tests/test_usd.py
+++ b/tests/test_usd.py
@@ -63,509 +63,3 @@ class Features(_Common):
self.assertEqual(
[(Name.Namespace, code), (Whitespace, "\n")], self._get(code),
)
-
- def test_attribute(self):
- """Test different attribute syntax styles."""
- normal = "double foo = 8.0"
-
- self.assertEqual(
- [
- (Keyword.Type, "double"),
- (Whitespace, " "),
- (Name.Attribute, "foo"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Number, "8.0"),
- (Whitespace, "\n"),
- ],
- self._get(normal),
- )
-
- custom = "custom double foo = 8.0"
-
- self.assertEqual(
- [
- (Keyword.Token, "custom"),
- (Whitespace, " "),
- (Keyword.Type, "double"),
- (Whitespace, " "),
- (Name.Attribute, "foo"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Number, "8.0"),
- (Whitespace, "\n"),
- ],
- self._get(custom),
- )
-
- uniform = "uniform double foo = 8.0"
-
- self.assertEqual(
- [
- (Keyword.Token, "uniform"),
- (Whitespace, " "),
- (Keyword.Type, "double"),
- (Whitespace, " "),
- (Name.Attribute, "foo"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Number, "8.0"),
- (Whitespace, "\n"),
- ],
- self._get(uniform),
- )
-
- custom_uniform = "custom uniform double foo = 8.0"
-
- self.assertEqual(
- [
- (Keyword.Token, "custom"),
- (Whitespace, " "),
- (Keyword.Token, "uniform"),
- (Whitespace, " "),
- (Keyword.Type, "double"),
- (Whitespace, " "),
- (Name.Attribute, "foo"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Number, "8.0"),
- (Whitespace, "\n"),
- ],
- self._get(custom_uniform),
- )
-
- underscore = "custom double foo_underscore_name = 8.0"
-
- self.assertEqual(
- [
- (Keyword.Token, "custom"),
- (Text.Whitespace, " "),
- (Keyword.Type, "double"),
- (Text.Whitespace, " "),
- (Name.Attribute, "foo_underscore_name"),
- (Text.Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Number, "8.0"),
- (Whitespace, "\n"),
- ],
- self._get(underscore),
- )
-
- array = "double[] foo_underscore_name = [10.1, 12.0, 13]"
-
- self.assertEqual(
- [
- (Keyword.Type, "double[]"),
- (Text.Whitespace, " "),
- (Name.Attribute, "foo_underscore_name"),
- (Text.Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "["),
- (Number, "10.1"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "12.0"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "13"),
- (Punctuation, "]"),
- (Whitespace, "\n"),
- ],
- self._get(array),
- )
-
- namespaced = "double[] primvar:foo_thing = [10.1, 12.0, 13]"
-
- self.assertEqual(
- [
- (Keyword.Type, "double[]"),
- (Whitespace, " "),
- (Name.Attribute, "primvar:foo_thing"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "["),
- (Number, "10.1"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "12.0"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "13"),
- (Punctuation, "]"),
- (Whitespace, "\n"),
- ],
- self._get(namespaced),
- )
-
- timesamples = textwrap.dedent(
- """\
- custom int[] foo = [8, 10, 14]
- custom int[] foo.timeSamples = {
- 1: [8, 0, 14],
- 2: [-8, 0, 14],
- }
- """
- )
-
- self.assertEqual(
- [
- (Keyword.Token, "custom"),
- (Whitespace, " "),
- (Keyword.Type, "int[]"),
- (Whitespace, " "),
- (Name.Attribute, "foo"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "["),
- (Number, "8"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "10"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "14"),
- (Punctuation, "]"),
- (Whitespace, "\n"),
- (Keyword.Token, "custom"),
- (Whitespace, " "),
- (Keyword.Type, "int[]"),
- (Whitespace, " "),
- (Name.Attribute, "foo"),
- (Text, "."),
- (Name.Keyword.Tokens, "timeSamples"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "{"),
- (Whitespace, "\n "),
- (Number, "1"),
- (Punctuation, ":"),
- (Whitespace, " "),
- (Punctuation, "["),
- (Number, "8"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "0"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "14"),
- (Punctuation, "]"),
- (Punctuation, ","),
- (Whitespace, "\n "),
- (Number, "2"),
- (Punctuation, ":"),
- (Whitespace, " "),
- (Punctuation, "["),
- (Number, "-8"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "0"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "14"),
- (Punctuation, "]"),
- (Punctuation, ","),
- (Whitespace, "\n"),
- (Punctuation, "}"),
- (Whitespace, "\n"),
- ],
- self._get(timesamples),
- )
-
- def test_string_priority(self):
- """Make sure that no other rules override a string match."""
- code = textwrap.dedent(
- '''\
- """
- custom int[] foo = [8, 10, 14]
- """'''
- )
-
- self.assertEqual(
- [
- (String, '"""\ncustom int[] foo = [8, 10, 14]\n"""'),
- (Whitespace, "\n"),
- ],
- self._get(code),
- )
-
- def test_numbers(self):
- """Check that different number representations work."""
- code = "8 8.0123312132, -4 -14.123 1e10 0.1e10 10.123e+10 0.123e-14"
-
- self.assertEqual(
- [
- (Number, "8"),
- (Whitespace, " "),
- (Number, "8.0123312132"),
- (Punctuation, ","),
- (Whitespace, " "),
- (Number, "-4"),
- (Whitespace, " "),
- (Number, "-14.123"),
- (Whitespace, " "),
- (Number, "1e10"),
- (Whitespace, " "),
- (Number, "0.1e10"),
- (Whitespace, " "),
- (Number, "10.123e+10"),
- (Whitespace, " "),
- (Number, "0.123e-14"),
- (Whitespace, "\n"),
- ],
- self._get(code),
- )
-
- def test_composition_arcs(self):
- """Test composition arc syntax highlighting."""
- code = textwrap.dedent(
- """
- def Xform "BottleMedical" (
- kind = "prop"
- payload = @./BottleMedical_payload.usd@</BottleMedical>
- variants = {
- string modelingVariant = "LiquidBottleLg"
- string shadingComplexity = "full"
- }
- add variantSets = ["modelingVariant", "shadingComplexity"]
- )
- {
- variantSet "modelingVariant" = {
- "ALL_VARIANTS" {
- }
- }
- }
- """
- )
-
- self.assertEqual(
- [
- (Keyword.Tokens, "def"),
- (Whitespace, " "),
- (Text, "Xform"),
- (Whitespace, " "),
- (String, '"BottleMedical"'),
- (Whitespace, " "),
- (Punctuation, "("),
- (Whitespace, "\n "),
- (Name.Builtins, "kind"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (String, '"prop"'),
- (Whitespace, "\n "),
- (Keyword.Tokens, "payload"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (String.Interpol, "@./BottleMedical_payload.usd@"),
- (Name.Namespace, "</BottleMedical>"),
- (Whitespace, "\n "),
- (Keyword.Tokens, "variants"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "{"),
- (Whitespace, "\n "),
- (Keyword.Type, "string"),
- (Whitespace, " "),
- (Name.Attribute, "modelingVariant"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (String, '"LiquidBottleLg"'),
- (Whitespace, "\n "),
- (Keyword.Type, "string"),
- (Whitespace, " "),
- (Name.Attribute, "shadingComplexity"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (String, '"full"'),
- (Whitespace, "\n "),
- (Punctuation, "}"),
- (Whitespace, "\n "),
- (Keyword.Type, "add"),
- (Text.Whitespace, " "),
- (Name.Attribute, "variantSets"),
- (Text.Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "["),
- (String, '"modelingVariant"'),
- (Punctuation, ","),
- (Whitespace, " "),
- (String, '"shadingComplexity"'),
- (Punctuation, "]"),
- (Whitespace, "\n"),
- (Punctuation, ")"),
- (Whitespace, "\n"),
- (Punctuation, "{"),
- (Whitespace, "\n "),
- (Keyword.Tokens, "variantSet"),
- (Whitespace, " "),
- (String, '"modelingVariant"'),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "{"),
- (Whitespace, "\n "),
- (String, '"ALL_VARIANTS"'),
- (Whitespace, " "),
- (Punctuation, "{"),
- (Whitespace, "\n "),
- (Punctuation, "}"),
- (Whitespace, "\n "),
- (Punctuation, "}"),
- (Whitespace, "\n"),
- (Punctuation, "}"),
- (Whitespace, "\n"),
- ],
- self._get(code),
- )
-
- def test_string_single_line(self):
- """Check a single string for the correct highlight."""
- code = '"Some \'text"'
-
- self.assertEqual(
- [(String, code), (Whitespace, "\n")], self._get(code),
- )
-
- def test_string_multiple_line(self):
- """Check that different multi-line strings work correctly."""
- code1 = textwrap.dedent(
- '''\
- """
- Some text multiline
- """'''
- )
-
- self.assertEqual(
- [(String, code1), (Whitespace, "\n"),], self._get(code1),
- )
-
- code2 = textwrap.dedent(
- '''\
- """Some text multiline
- """'''
- )
-
- self.assertEqual(
- [(String, code2), (Whitespace, "\n"),], self._get(code2),
- )
-
- code3 = textwrap.dedent(
- '''\
- """
- Some text multiline"""'''
- )
-
- self.assertEqual(
- [(String, code3), (Whitespace, "\n"),], self._get(code3),
- )
-
-
-class EdgeCases(_Common):
- """Any bugs / weird cases that deserve special attention."""
-
- def test_metadata(self):
- """Make sure metadata [the stuff inside ()s] don't match as Attributes."""
- code = textwrap.dedent(
- """
- float[] primvars:skel:jointWeights = [1] (
- elementSize = 1
- interpolation = "constant"
- )
- """
- )
-
- self.assertEqual(
- [
- (Keyword.Type, "float[]"),
- (Whitespace, " "),
- (Name.Attribute, "primvars:skel:jointWeights"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Punctuation, "["),
- (Number, "1"),
- (Punctuation, "]"),
- (Whitespace, " "),
- (Punctuation, "("),
- (Whitespace, "\n "),
- (Name.Builtins, "elementSize"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (Number, "1"),
- (Whitespace, "\n "),
- (Name.Builtins, "interpolation"),
- (Whitespace, " "),
- (Operator, "="),
- (Whitespace, " "),
- (String, '"constant"'),
- (Whitespace, "\n"),
- (Punctuation, ")"),
- (Whitespace, "\n"),
- ],
- self._get(code),
- )
-
- def test_outer_match(self):
- """Make sure that text between located between quotes and @@s are not matched."""
- at_sign = "@firststring@ something else @secondstring@"
-
- self.assertEqual(
- [
- (String.Interpol, "@firststring@"),
- (Whitespace, " "),
- (Text, "something"),
- (Whitespace, " "),
- (Text, "else"),
- (Whitespace, " "),
- (String.Interpol, "@secondstring@"),
- (Whitespace, "\n"),
- ],
- self._get(at_sign),
- )
-
- single = "'firststring' something else 'secondstring'"
-
- self.assertEqual(
- [
- (String, "'firststring'"),
- (Whitespace, " "),
- (Text, "something"),
- (Whitespace, " "),
- (Text, "else"),
- (Whitespace, " "),
- (String, "'secondstring'"),
- (Whitespace, "\n"),
- ],
- self._get(single),
- )
-
- double = "'firststring' something else 'secondstring'"
-
- self.assertEqual(
- [
- (String, "'firststring'"),
- (Whitespace, " "),
- (Text, "something"),
- (Whitespace, " "),
- (Text, "else"),
- (Whitespace, " "),
- (String, "'secondstring'"),
- (Whitespace, "\n"),
- ],
- self._get(double),
- )
diff --git a/tests/test_whiley.py b/tests/test_whiley.py
deleted file mode 100644
index cc8248e8..00000000
--- a/tests/test_whiley.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
- Whiley Test
- ~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.lexers import WhileyLexer
-from pygments.token import Token
-
-
-@pytest.fixture(scope='module')
-def lexer():
- yield WhileyLexer()
-
-
-def test_whiley_operator(lexer):
- fragment = '123 \u2200 x\n'
- tokens = [
- (Token.Literal.Number.Integer, '123'),
- (Token.Text, ' '),
- (Token.Operator, '\u2200'),
- (Token.Text, ' '),
- (Token.Name, 'x'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_yang.py b/tests/test_yang.py
deleted file mode 100644
index 20bbaca6..00000000
--- a/tests/test_yang.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""
- Basic Yang Test
- ~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import pytest
-
-from pygments.token import Operator, Number, Text, Token
-from pygments.lexers import YangLexer
-
-@pytest.fixture(scope='module')
-def lexer():
- yield YangLexer()
-
-def test_namespace_1(lexer):
- """
- Namespace `urn:test:std:yang` should not be explicitly highlighted
- """
- fragment = 'namespace urn:test:std:yang;\n'
- tokens = [
- (Token.Keyword, 'namespace'),
- (Token.Text.Whitespace, ' '),
- (Token.Name.Variable, 'urn:test:std:yang'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_namespace_2(lexer):
- """
- namespace-prefix `yang` should be explicitly highlighted
- """
- fragment = 'type yang:counter64;\n'
- tokens = [
- (Token.Keyword, 'type'),
- (Token.Text.Whitespace, ' '),
- (Token.Name.Namespace, 'yang'),
- (Token.Punctuation, ':'),
- (Token.Name.Variable, 'counter64'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_revision_date(lexer):
- """
- Revision-date `2020-08-03` should be explicitly highlighted
- """
- fragment = 'revision 2020-03-08{\n'
- tokens = [
- (Token.Keyword, 'revision'),
- (Token.Text.Whitespace, ' '),
- (Token.Name.Label, '2020-03-08'),
- (Token.Punctuation, '{'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_integer_value(lexer):
- """
- Integer value `5` should be explicitly highlighted
- """
- fragment = 'value 5;\n'
- tokens = [
- (Token.Keyword, 'value'),
- (Token.Text.Whitespace, ' '),
- (Token.Number.Integer, '5'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_string_value(lexer):
- """
- String value `"5"` should be not explicitly highlighted
- """
- fragment = 'value "5";\n'
- tokens = [
- (Token.Keyword, 'value'),
- (Token.Text.Whitespace, ' '),
- (Token.String.Double, '"5"'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-def test_float_value(lexer):
- """
- Float value `1.1` should be explicitly highlighted
- """
- fragment = 'yang-version 1.1;\n'
- tokens = [
- (Token.Keyword, 'yang-version'),
- (Token.Text.Whitespace, ' '),
- (Token.Number.Float, '1.1'),
- (Token.Punctuation, ';'),
- (Token.Text.Whitespace, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens