summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--coverage/config.py1
-rw-r--r--coverage/phystokens.py6
-rw-r--r--tests/test_phystokens.py9
3 files changed, 7 insertions, 9 deletions
diff --git a/coverage/config.py b/coverage/config.py
index 7b14267..02c4917 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -16,7 +16,6 @@ class HandyConfigParser(configparser.RawConfigParser):
"""Our specialization of ConfigParser."""
def __init__(self, section_prefix):
- # pylint: disable=super-init-not-called
configparser.RawConfigParser.__init__(self)
self.section_prefix = section_prefix
diff --git a/coverage/phystokens.py b/coverage/phystokens.py
index 1bc6330..6d8e1ec 100644
--- a/coverage/phystokens.py
+++ b/coverage/phystokens.py
@@ -67,6 +67,7 @@ def phys_tokens(toks):
last_lineno = elineno
+@contract(source='unicode')
def source_token_lines(source):
"""Generate a series of lines, one for each line in `source`.
@@ -135,11 +136,10 @@ class CachedTokenizer(object):
self.last_text = None
self.last_tokens = None
+ @contract(text='unicode')
def generate_tokens(self, text):
"""A stand-in for `tokenize.generate_tokens`."""
- # Check the type first so we don't compare bytes to unicode and get
- # warnings.
- if type(text) != type(self.last_text) or text != self.last_text:
+ if text != self.last_text:
self.last_text = text
readline = iternext(text.splitlines(True))
self.last_tokens = list(tokenize.generate_tokens(readline))
diff --git a/tests/test_phystokens.py b/tests/test_phystokens.py
index 19f813a..6fa4a44 100644
--- a/tests/test_phystokens.py
+++ b/tests/test_phystokens.py
@@ -6,17 +6,18 @@ import re
from coverage import env
from coverage.phystokens import source_token_lines, source_encoding
from coverage.phystokens import neuter_encoding_declaration
+from coverage.python import get_python_source
from tests.coveragetest import CoverageTest
-SIMPLE = """\
+SIMPLE = u"""\
# yay!
def foo():
say('two = %d' % 2)
"""
-MIXED_WS = """\
+MIXED_WS = u"""\
def hello():
a="Hello world!"
\tb="indented"
@@ -45,9 +46,7 @@ class PhysTokensTest(CoverageTest):
def check_file_tokenization(self, fname):
"""Use the contents of `fname` for `check_tokenization`."""
- with open(fname) as f:
- source = f.read()
- self.check_tokenization(source)
+ self.check_tokenization(get_python_source(fname))
def test_simple(self):
self.assertEqual(list(source_token_lines(SIMPLE)),