From debda34e2d4f28d6d369cdafdcba4791702f63fc Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sun, 17 Jan 2021 07:44:58 -0800 Subject: Run pyupgrade across codebase to modernize syntax and patterns (#1622) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit pyupgrade is a tool to automatically upgrade syntax for newer versions of the Python language. The project has been Python 3 only since 35544e2fc6eed0ce4a27ec7285aac71ff0ddc473, allowing for several cleanups: - Remove unnecessary "-*- coding: utf-8 -*-" cookie. Python 3 reads all source files as utf-8 by default. - Replace IOError/EnvironmentError with OSError. Python 3 unified these exceptions. The old names are aliases only. - Use the Python 3 shorter super() syntax. - Remove "utf8" argument form encode/decode. In Python 3, this value is the default. - Remove "r" from open() calls. In Python 3, this value is the default. - Remove u prefix from Unicode strings. In Python 3, all strings are Unicode. - Replace io.open() with builtin open(). In Python 3, these functions are functionally equivalent. Co-authored-by: Matthäus G. Chajdas --- tests/test_coffeescript.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) (limited to 'tests/test_coffeescript.py') diff --git a/tests/test_coffeescript.py b/tests/test_coffeescript.py index 60ddfcd8..5d69a8aa 100644 --- a/tests/test_coffeescript.py +++ b/tests/test_coffeescript.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ CoffeeScript tests ~~~~~~~~~~~~~~~~~~ @@ -54,17 +53,17 @@ def test_coffee_slashes(lexer, golden): def test_mixed_slashes(lexer): - fragment = u'a?/foo/:1/2;\n' + fragment = 'a?/foo/:1/2;\n' tokens = [ - (Token.Name.Other, u'a'), - (Token.Operator, u'?'), - (Token.Literal.String.Regex, u'/foo/'), - (Token.Operator, u':'), - (Token.Literal.Number.Integer, u'1'), - (Token.Operator, u'/'), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), + (Token.Name.Other, 'a'), + (Token.Operator, '?'), + (Token.Literal.String.Regex, '/foo/'), + (Token.Operator, ':'), + (Token.Literal.Number.Integer, '1'), + (Token.Operator, '/'), + (Token.Literal.Number.Integer, '2'), + (Token.Punctuation, ';'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == tokens -- cgit v1.2.1