summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES3
-rw-r--r--pygments/lexers/functional.py14
-rw-r--r--pygments/lexers/text.py2
-rw-r--r--tests/examplefiles/AcidStateAdvanced.hs209
-rw-r--r--tests/test_basic_api.py32
-rw-r--r--tests/test_cmdline.py46
-rw-r--r--tests/test_examplefiles.py6
-rw-r--r--tests/test_html_formatter.py40
-rw-r--r--tests/test_latex_formatter.py18
-rw-r--r--tests/test_regexlexer.py2
-rw-r--r--tests/test_token.py17
-rw-r--r--tests/test_using_api.py2
-rw-r--r--tests/test_util.py53
13 files changed, 343 insertions, 101 deletions
diff --git a/CHANGES b/CHANGES
index 424b827e..273ed2df 100644
--- a/CHANGES
+++ b/CHANGES
@@ -8,6 +8,9 @@ Version 1.5.1
-------------
(in development)
+- Fix Template Haskell highlighting (PR#63)
+
+
Version 1.5
-----------
(codename Zeitdilatation, released Mar 10, 2012)
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index ee44f7ce..d4f9a32c 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -358,8 +358,8 @@ class HaskellLexer(RegexLexer):
(r'\berror\b', Name.Exception),
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'^[_a-z][\w\']*', Name.Function),
- (r'[_a-z][\w\']*', Name),
- (r'[A-Z][\w\']*', Keyword.Type),
+ (r"'?[_a-z][\w']*", Name),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
# Operators
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
@@ -1696,7 +1696,7 @@ class ElixirLexer(RegexLexer):
r'defp|def|defprotocol|defimpl|defrecord|defmacro|defdelegate|'
r'defexception|exit|raise|throw)\b(?![?!])|'
r'(?<!\.)\b(do|\-\>)\b\s*', Keyword),
- (r'\b(import|require|use|recur|quote|unquote|super)\b(?![?!])',
+ (r'\b(import|require|use|recur|quote|unquote|super|refer)\b(?![?!])',
Keyword.Namespace),
(r'(?<!\.)\b(and|not|or|when|xor|in)\b', Operator.Word),
(r'%=|\*=|\*\*=|\+=|\-=|\^=|\|\|=|'
@@ -1707,11 +1707,9 @@ class ElixirLexer(RegexLexer):
r'<=>|&&?|%\(\)|%\[\]|%\{\}|\+\+?|\-\-?|\|\|?|\!|//|[%&`/\|]|'
r'\*\*?|=?~|<\-)|([a-zA-Z_]\w*([?!])?)(:)(?!:)', String.Symbol),
(r':"', String.Symbol, 'interpoling_symbol'),
- (r'\b(nil|true|false)\b(?![?!])', Name.Constant),
- (r'\b[A-Z]\w*\b', Name.Constant),
- (r'\b(__(FILE|LINE|MODULE|STOP_ITERATOR|EXCEPTION|OP|REF|FUNCTION|'
- r'BLOCK|KVBLOCK)__)\b(?![?!])', Name.Builtin.Pseudo),
- (r'[a-zA-Z_!]\w*[!\?]?', Name),
+ (r'\b(nil|true|false)\b(?![?!])|\b[A-Z]\w*\b', Name.Constant),
+ (r'\b(__(FILE|LINE|MODULE|LOCAL|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_!][\w_]*[!\?]?', Name),
(r'[(){};,/\|:\\\[\]]', Punctuation),
(r'@[a-zA-Z_]\w*|&\d', Name.Variable),
(r'\b(0[xX][0-9A-Fa-f]+|\d(_?\d)*(\.(?![^\d\s])'
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 0796312f..733302bc 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -1718,7 +1718,7 @@ class PyPyLogLexer(RegexLexer):
r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
r"int_is_true|"
r"uint_floordiv|uint_ge|uint_lt|"
- r"float_add|float_sub|float_mul|float_truediv|"
+ r"float_add|float_sub|float_mul|float_truediv|float_neg|"
r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
r"cast_int_to_float|cast_float_to_int|"
diff --git a/tests/examplefiles/AcidStateAdvanced.hs b/tests/examplefiles/AcidStateAdvanced.hs
new file mode 100644
index 00000000..9e3e7718
--- /dev/null
+++ b/tests/examplefiles/AcidStateAdvanced.hs
@@ -0,0 +1,209 @@
+{-# LANGUAGE DeriveDataTypeable, FlexibleContexts, GeneralizedNewtypeDeriving
+ , MultiParamTypeClasses, OverloadedStrings, ScopedTypeVariables, TemplateHaskell
+ , TypeFamilies, FlexibleInstances #-}
+module Main where
+import Control.Applicative (Applicative, Alternative, (<$>))
+import Control.Exception.Lifted (bracket)
+import Control.Monad.Trans.Control (MonadBaseControl)
+import Control.Monad (MonadPlus, mplus)
+import Control.Monad.Reader (MonadReader, ReaderT(..), ask)
+import Control.Monad.Trans (MonadIO(..))
+import Data.Acid ( AcidState(..), EventState(..), EventResult(..)
+ , Query(..), QueryEvent(..), Update(..), UpdateEvent(..)
+ , IsAcidic(..), makeAcidic, openLocalState
+ )
+import Data.Acid.Local ( createCheckpointAndClose
+ , openLocalStateFrom
+ )
+import Data.Acid.Advanced (query', update')
+import Data.Maybe (fromMaybe)
+import Data.SafeCopy (SafeCopy, base, deriveSafeCopy)
+import Data.Data (Data, Typeable)
+import Data.Lens ((%=), (!=))
+import Data.Lens.Template (makeLens)
+import Data.Text.Lazy (Text)
+import Happstack.Server ( Happstack, HasRqData, Method(GET, POST), Request(rqMethod)
+ , Response
+ , ServerPartT(..), WebMonad, FilterMonad, ServerMonad
+ , askRq, decodeBody, dir, defaultBodyPolicy, lookText
+ , mapServerPartT, nullConf, nullDir, ok, simpleHTTP
+ , toResponse
+ )
+import Prelude hiding (head, id)
+import System.FilePath ((</>))
+import Text.Blaze ((!))
+import Text.Blaze.Html4.Strict (body, head, html, input, form, label, p, title, toHtml)
+import Text.Blaze.Html4.Strict.Attributes (action, enctype, for, id, method, name, type_, value)
+class HasAcidState m st where
+ getAcidState :: m (AcidState st)
+query :: forall event m.
+ ( Functor m
+ , MonadIO m
+ , QueryEvent event
+ , HasAcidState m (EventState event)
+ ) =>
+ event
+ -> m (EventResult event)
+query event =
+ do as <- getAcidState
+ query' (as :: AcidState (EventState event)) event
+update :: forall event m.
+ ( Functor m
+ , MonadIO m
+ , UpdateEvent event
+ , HasAcidState m (EventState event)
+ ) =>
+ event
+ -> m (EventResult event)
+update event =
+ do as <- getAcidState
+ update' (as :: AcidState (EventState event)) event
+-- | bracket the opening and close of the `AcidState` handle.
+
+-- automatically creates a checkpoint on close
+withLocalState :: (MonadBaseControl IO m, MonadIO m, IsAcidic st, Typeable st) =>
+ Maybe FilePath -- ^ path to state directory
+ -> st -- ^ initial state value
+ -> (AcidState st -> m a) -- ^ function which uses the `AcidState` handle
+ -> m a
+withLocalState mPath initialState =
+ bracket (liftIO $ (maybe openLocalState openLocalStateFrom mPath) initialState)
+ (liftIO . createCheckpointAndClose)
+-- State that stores a hit count
+
+data CountState = CountState { _count :: Integer }
+ deriving (Eq, Ord, Data, Typeable, Show)
+
+$(deriveSafeCopy 0 'base ''CountState)
+$(makeLens ''CountState)
+
+initialCountState :: CountState
+initialCountState = CountState { _count = 0 }
+
+incCount :: Update CountState Integer
+incCount = count %= succ
+
+$(makeAcidic ''CountState ['incCount])
+-- State that stores a greeting
+data GreetingState = GreetingState { _greeting :: Text }
+ deriving (Eq, Ord, Data, Typeable, Show)
+
+$(deriveSafeCopy 0 'base ''GreetingState)
+$(makeLens ''GreetingState)
+
+initialGreetingState :: GreetingState
+initialGreetingState = GreetingState { _greeting = "Hello" }
+
+getGreeting :: Query GreetingState Text
+getGreeting = _greeting <$> ask
+
+setGreeting :: Text -> Update GreetingState Text
+setGreeting txt = greeting != txt
+
+$(makeAcidic ''GreetingState ['getGreeting, 'setGreeting])
+data Acid = Acid { acidCountState :: AcidState CountState
+ , acidGreetingState :: AcidState GreetingState
+ }
+
+withAcid :: Maybe FilePath -> (Acid -> IO a) -> IO a
+withAcid mBasePath action =
+ let basePath = fromMaybe "_state" mBasePath
+ in withLocalState (Just $ basePath </> "count") initialCountState $ \c ->
+ withLocalState (Just $ basePath </> "greeting") initialGreetingState $ \g ->
+ action (Acid c g)
+newtype App a = App { unApp :: ServerPartT (ReaderT Acid IO) a }
+ deriving ( Functor, Alternative, Applicative, Monad, MonadPlus, MonadIO
+ , HasRqData, ServerMonad ,WebMonad Response, FilterMonad Response
+ , Happstack, MonadReader Acid)
+
+runApp :: Acid -> App a -> ServerPartT IO a
+runApp acid (App sp) = mapServerPartT (flip runReaderT acid) sp
+instance HasAcidState App CountState where
+ getAcidState = acidCountState <$> ask
+
+instance HasAcidState App GreetingState where
+ getAcidState = acidGreetingState <$> ask
+page :: App Response
+page =
+ do nullDir
+ g <- greet
+ c <- update IncCount -- ^ a CountState event
+ ok $ toResponse $
+ html $ do
+ head $ do
+ title "acid-state demo"
+ body $ do
+ form ! action "/" ! method "POST" ! enctype "multipart/form-data" $ do
+ label "new message: " ! for "msg"
+ input ! type_ "text" ! id "msg" ! name "greeting"
+ input ! type_ "submit" ! value "update message"
+ p $ toHtml g
+ p $ do "This page has been loaded "
+ toHtml c
+ " time(s)."
+ where
+ greet =
+ do m <- rqMethod <$> askRq
+ case m of
+ POST ->
+ do decodeBody (defaultBodyPolicy "/tmp/" 0 1000 1000)
+ newGreeting <- lookText "greeting"
+ update (SetGreeting newGreeting) -- ^ a GreetingState event
+ return newGreeting
+ GET ->
+ do query GetGreeting -- ^ a GreetingState event
+main :: IO ()
+main =
+ withAcid Nothing $ \acid ->
+ simpleHTTP nullConf $ runApp acid page
+newtype FooState = FooState { foo :: Text }
+ deriving (Eq, Ord, Data, Typeable, SafeCopy)
+
+initialFooState :: FooState
+initialFooState = FooState { foo = "foo" }
+
+askFoo :: Query FooState Text
+askFoo = foo <$> ask
+
+$(makeAcidic ''FooState ['askFoo])
+fooPlugin :: (Happstack m, HasAcidState m FooState) => m Response
+fooPlugin =
+ dir "foo" $ do
+ txt <- query AskFoo
+ ok $ toResponse txt
+data Acid' = Acid' { acidCountState' :: AcidState CountState
+ , acidGreetingState' :: AcidState GreetingState
+ , acidFooState' :: AcidState FooState
+ }
+withAcid' :: Maybe FilePath -> (Acid' -> IO a) -> IO a
+withAcid' mBasePath action =
+ let basePath = fromMaybe "_state" mBasePath
+ in withLocalState (Just $ basePath </> "count") initialCountState $ \c ->
+ withLocalState (Just $ basePath </> "greeting") initialGreetingState $ \g ->
+ withLocalState (Just $ basePath </> "foo") initialFooState $ \f ->
+ action (Acid' c g f)
+newtype App' a = App' { unApp' :: ServerPartT (ReaderT Acid' IO) a }
+ deriving ( Functor, Alternative, Applicative, Monad, MonadPlus, MonadIO
+ , HasRqData, ServerMonad ,WebMonad Response, FilterMonad Response
+ , Happstack, MonadReader Acid')
+
+instance HasAcidState App' FooState where
+ getAcidState = acidFooState' <$> ask
+fooAppPlugin :: App' Response
+fooAppPlugin = fooPlugin
+fooReaderPlugin :: ReaderT (AcidState FooState) (ServerPartT IO) Response
+fooReaderPlugin = fooPlugin
+instance HasAcidState (ReaderT (AcidState FooState) (ServerPartT IO)) FooState where
+ getAcidState = ask
+withFooPlugin :: (MonadIO m, MonadBaseControl IO m) =>
+ FilePath -- ^ path to state directory
+ -> (ServerPartT IO Response -> m a) -- ^ function that uses fooPlugin
+ -> m a
+withFooPlugin basePath f =
+ do withLocalState (Just $ basePath </> "foo") initialFooState $ \fooState ->
+ f $ runReaderT fooReaderPlugin fooState
+main' :: IO ()
+main' =
+ withFooPlugin "_state" $ \fooPlugin' ->
+ withAcid Nothing $ \acid ->
+ simpleHTTP nullConf $ fooPlugin' `mplus` runApp acid page
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index b0fc2ce0..426d02c8 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -238,13 +238,17 @@ class FiltersTest(unittest.TestCase):
for x in filters.FILTERS.keys():
lx = lexers.PythonLexer()
lx.add_filter(x, **filter_args.get(x, {}))
- text = open(TESTFILE, 'rb').read().decode('utf-8')
+ fp = open(TESTFILE, 'rb')
+ try:
+ text = fp.read().decode('utf-8')
+ finally:
+ fp.close()
tokens = list(lx.get_tokens(text))
roundtext = ''.join([t[1] for t in tokens])
if x not in ('whitespace', 'keywordcase'):
# these filters change the text
- self.assertEquals(roundtext, text,
- "lexer roundtrip with %s filter failed" % x)
+ self.assertEqual(roundtext, text,
+ "lexer roundtrip with %s filter failed" % x)
def test_raiseonerror(self):
lx = lexers.PythonLexer()
@@ -254,24 +258,32 @@ class FiltersTest(unittest.TestCase):
def test_whitespace(self):
lx = lexers.PythonLexer()
lx.add_filter('whitespace', spaces='%')
- text = open(TESTFILE, 'rb').read().decode('utf-8')
+ fp = open(TESTFILE, 'rb')
+ try:
+ text = fp.read().decode('utf-8')
+ finally:
+ fp.close()
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
- self.failIf(' ' in lxtext)
+ self.assertFalse(' ' in lxtext)
def test_keywordcase(self):
lx = lexers.PythonLexer()
lx.add_filter('keywordcase', case='capitalize')
- text = open(TESTFILE, 'rb').read().decode('utf-8')
+ fp = open(TESTFILE, 'rb')
+ try:
+ text = fp.read().decode('utf-8')
+ finally:
+ fp.close()
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
- self.assert_('Def' in lxtext and 'Class' in lxtext)
+ self.assertTrue('Def' in lxtext and 'Class' in lxtext)
def test_codetag(self):
lx = lexers.PythonLexer()
lx.add_filter('codetagify')
text = u'# BUG: text'
tokens = list(lx.get_tokens(text))
- self.assertEquals('# ', tokens[0][1])
- self.assertEquals('BUG', tokens[1][1])
+ self.assertEqual('# ', tokens[0][1])
+ self.assertEqual('BUG', tokens[1][1])
def test_codetag_boundary(self):
# ticket #368
@@ -279,4 +291,4 @@ class FiltersTest(unittest.TestCase):
lx.add_filter('codetagify')
text = u'# DEBUG: text'
tokens = list(lx.get_tokens(text))
- self.assertEquals('# DEBUG: text', tokens[0][1])
+ self.assertEqual('# DEBUG: text', tokens[0][1])
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index 6a285fcc..56036183 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -38,64 +38,68 @@ class CmdLineTest(unittest.TestCase):
def test_L_opt(self):
c, o, e = run_cmdline("-L")
- self.assertEquals(c, 0)
- self.assert_("Lexers" in o and "Formatters" in o and
- "Filters" in o and "Styles" in o)
+ self.assertEqual(c, 0)
+ self.assertTrue("Lexers" in o and "Formatters" in o and
+ "Filters" in o and "Styles" in o)
c, o, e = run_cmdline("-L", "lexer")
- self.assertEquals(c, 0)
- self.assert_("Lexers" in o and "Formatters" not in o)
+ self.assertEqual(c, 0)
+ self.assertTrue("Lexers" in o and "Formatters" not in o)
c, o, e = run_cmdline("-L", "lexers")
- self.assertEquals(c, 0)
+ self.assertEqual(c, 0)
def test_O_opt(self):
filename = TESTFILE
c, o, e = run_cmdline("-Ofull=1,linenos=true,foo=bar",
"-fhtml", filename)
- self.assertEquals(c, 0)
- self.assert_("<html" in o)
- self.assert_('class="linenos"' in o)
+ self.assertEqual(c, 0)
+ self.assertTrue("<html" in o)
+ self.assertTrue('class="linenos"' in o)
def test_P_opt(self):
filename = TESTFILE
c, o, e = run_cmdline("-Pfull", "-Ptitle=foo, bar=baz=,",
"-fhtml", filename)
- self.assertEquals(c, 0)
- self.assert_("<title>foo, bar=baz=,</title>" in o)
+ self.assertEqual(c, 0)
+ self.assertTrue("<title>foo, bar=baz=,</title>" in o)
def test_F_opt(self):
filename = TESTFILE
c, o, e = run_cmdline("-Fhighlight:tokentype=Name.Blubb,"
"names=TESTFILE filename",
"-fhtml", filename)
- self.assertEquals(c, 0)
- self.assert_('<span class="n-Blubb' in o)
+ self.assertEqual(c, 0)
+ self.assertTrue('<span class="n-Blubb' in o)
def test_H_opt(self):
c, o, e = run_cmdline("-H", "formatter", "html")
- self.assertEquals(c, 0)
- self.assert_('HTML' in o)
+ self.assertEqual(c, 0)
+ self.assertTrue('HTML' in o)
def test_S_opt(self):
c, o, e = run_cmdline("-S", "default", "-f", "html", "-O", "linenos=1")
- self.assertEquals(c, 0)
+ self.assertEqual(c, 0)
def test_invalid_opts(self):
for opts in [("-L", "-lpy"), ("-L", "-fhtml"), ("-L", "-Ox"),
("-a",), ("-Sst", "-lpy"), ("-H",),
("-H", "formatter"),]:
- self.assert_(run_cmdline(*opts)[0] == 2)
+ self.assertTrue(run_cmdline(*opts)[0] == 2)
def test_normal(self):
# test that cmdline gives the same output as library api
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter
filename = TESTFILE
- code = open(filename, 'rb').read()
+ fp = open(filename, 'rb')
+ try:
+ code = fp.read()
+ finally:
+ fp.close()
output = highlight(code, PythonLexer(), HtmlFormatter())
c, o, e = run_cmdline("-lpython", "-fhtml", filename)
- self.assertEquals(o, output)
- self.assertEquals(e, "")
- self.assertEquals(c, 0)
+ self.assertEqual(o, output)
+ self.assertEqual(e, "")
+ self.assertEqual(c, 0)
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index 4ab2912e..41acf4ef 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -49,7 +49,11 @@ def test_example_files():
yield check_lexer, lx, absfn, outfn
def check_lexer(lx, absfn, outfn):
- text = open(absfn, 'rb').read()
+ fp = open(absfn, 'rb')
+ try:
+ text = fp.read()
+ finally:
+ fp.close()
text = text.replace(b('\r\n'), b('\n'))
text = text.strip(b('\n')) + b('\n')
try:
diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py
index b0b36c4d..284a6c75 100644
--- a/tests/test_html_formatter.py
+++ b/tests/test_html_formatter.py
@@ -23,8 +23,11 @@ import support
TESTFILE, TESTDIR = support.location(__file__)
-tokensource = list(PythonLexer().get_tokens(
- uni_open(TESTFILE, encoding='utf-8').read()))
+fp = uni_open(TESTFILE, encoding='utf-8')
+try:
+ tokensource = list(PythonLexer().get_tokens(fp.read()))
+finally:
+ fp.close()
class HtmlFormatterTest(unittest.TestCase):
@@ -39,7 +42,7 @@ class HtmlFormatterTest(unittest.TestCase):
stripped_html = re.sub('<.*?>', '', houtfile.getvalue())
escaped_text = escape_html(noutfile.getvalue())
- self.assertEquals(stripped_html, escaped_text)
+ self.assertEqual(stripped_html, escaped_text)
def test_external_css(self):
# test correct behavior
@@ -52,13 +55,13 @@ class HtmlFormatterTest(unittest.TestCase):
fmt1.format(tokensource, tfile)
try:
fmt2.format(tokensource, tfile)
- self.assert_(isfile(join(TESTDIR, 'fmt2.css')))
+ self.assertTrue(isfile(join(TESTDIR, 'fmt2.css')))
except IOError:
# test directory not writable
pass
tfile.close()
- self.assert_(isfile(join(dirname(tfile.name), 'fmt1.css')))
+ self.assertTrue(isfile(join(dirname(tfile.name), 'fmt1.css')))
os.unlink(join(dirname(tfile.name), 'fmt1.css'))
try:
os.unlink(join(TESTDIR, 'fmt2.css'))
@@ -81,7 +84,7 @@ class HtmlFormatterTest(unittest.TestCase):
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
- self.assert_(re.search("<pre>\s+1\s+2\s+3", html))
+ self.assertTrue(re.search("<pre>\s+1\s+2\s+3", html))
def test_linenos_with_startnum(self):
optdict = dict(linenos=True, linenostart=5)
@@ -89,7 +92,7 @@ class HtmlFormatterTest(unittest.TestCase):
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
- self.assert_(re.search("<pre>\s+5\s+6\s+7", html))
+ self.assertTrue(re.search("<pre>\s+5\s+6\s+7", html))
def test_lineanchors(self):
optdict = dict(lineanchors="foo")
@@ -97,7 +100,7 @@ class HtmlFormatterTest(unittest.TestCase):
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
- self.assert_(re.search("<pre><a name=\"foo-1\">", html))
+ self.assertTrue(re.search("<pre><a name=\"foo-1\">", html))
def test_lineanchors_with_startnum(self):
optdict = dict(lineanchors="foo", linenostart=5)
@@ -105,7 +108,7 @@ class HtmlFormatterTest(unittest.TestCase):
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
- self.assert_(re.search("<pre><a name=\"foo-5\">", html))
+ self.assertTrue(re.search("<pre><a name=\"foo-5\">", html))
def test_valid_output(self):
# test all available wrappers
@@ -119,29 +122,34 @@ class HtmlFormatterTest(unittest.TestCase):
catname = os.path.join(TESTDIR, 'dtds', 'HTML4.soc')
try:
import subprocess
- ret = subprocess.Popen(['nsgmls', '-s', '-c', catname, pathname],
- stdout=subprocess.PIPE).wait()
+ po = subprocess.Popen(['nsgmls', '-s', '-c', catname, pathname],
+ stdout=subprocess.PIPE)
+ ret = po.wait()
+ output = po.stdout.read()
+ po.stdout.close()
except OSError:
# nsgmls not available
pass
else:
- self.failIf(ret, 'nsgmls run reported errors')
+ if ret:
+ print output
+ self.assertFalse(ret, 'nsgmls run reported errors')
os.unlink(pathname)
def test_get_style_defs(self):
fmt = HtmlFormatter()
sd = fmt.get_style_defs()
- self.assert_(sd.startswith('.'))
+ self.assertTrue(sd.startswith('.'))
fmt = HtmlFormatter(cssclass='foo')
sd = fmt.get_style_defs()
- self.assert_(sd.startswith('.foo'))
+ self.assertTrue(sd.startswith('.foo'))
sd = fmt.get_style_defs('.bar')
- self.assert_(sd.startswith('.bar'))
+ self.assertTrue(sd.startswith('.bar'))
sd = fmt.get_style_defs(['.bar', '.baz'])
fl = sd.splitlines()[0]
- self.assert_('.bar' in fl and '.baz' in fl)
+ self.assertTrue('.bar' in fl and '.baz' in fl)
def test_unicode_options(self):
fmt = HtmlFormatter(title=u'Föö',
diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py
index 0c9c9122..8412ec41 100644
--- a/tests/test_latex_formatter.py
+++ b/tests/test_latex_formatter.py
@@ -22,7 +22,11 @@ TESTFILE, TESTDIR = support.location(__file__)
class LatexFormatterTest(unittest.TestCase):
def test_valid_output(self):
- tokensource = list(PythonLexer().get_tokens(open(TESTFILE).read()))
+ fp = open(TESTFILE)
+ try:
+ tokensource = list(PythonLexer().get_tokens(fp.read()))
+ finally:
+ fp.close()
fmt = LatexFormatter(full=True, encoding='latin1')
handle, pathname = tempfile.mkstemp('.tex')
@@ -34,14 +38,18 @@ class LatexFormatterTest(unittest.TestCase):
tfile.close()
try:
import subprocess
- ret = subprocess.Popen(['latex', '-interaction=nonstopmode',
- pathname],
- stdout=subprocess.PIPE).wait()
+ po = subprocess.Popen(['latex', '-interaction=nonstopmode',
+ pathname], stdout=subprocess.PIPE)
+ ret = po.wait()
+ output = po.stdout.read()
+ po.stdout.close()
except OSError:
# latex not available
pass
else:
- self.failIf(ret, 'latex run reported errors')
+ if ret:
+ print output
+ self.assertFalse(ret, 'latex run reported errors')
os.unlink(pathname)
os.chdir(old_wd)
diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py
index fbb71ad6..74b64d9b 100644
--- a/tests/test_regexlexer.py
+++ b/tests/test_regexlexer.py
@@ -34,6 +34,6 @@ class TupleTransTest(unittest.TestCase):
def test(self):
lx = TestLexer()
toks = list(lx.get_tokens_unprocessed('abcde'))
- self.assertEquals(toks,
+ self.assertEqual(toks,
[(0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
(3, Text.Beer, 'd'), (4, Text.Root, 'e')])
diff --git a/tests/test_token.py b/tests/test_token.py
index 490c966c..a9d1edeb 100644
--- a/tests/test_token.py
+++ b/tests/test_token.py
@@ -8,8 +8,6 @@
"""
import unittest
-import StringIO
-import sys
from pygments import token
@@ -17,8 +15,7 @@ from pygments import token
class TokenTest(unittest.TestCase):
def test_tokentype(self):
- e = self.assertEquals
- r = self.assertRaises
+ e = self.assertEqual
t = token.String
@@ -27,13 +24,13 @@ class TokenTest(unittest.TestCase):
e(t.__class__, token._TokenType)
def test_functions(self):
- self.assert_(token.is_token_subtype(token.String, token.String))
- self.assert_(token.is_token_subtype(token.String, token.Literal))
- self.failIf(token.is_token_subtype(token.Literal, token.String))
+ self.assertTrue(token.is_token_subtype(token.String, token.String))
+ self.assertTrue(token.is_token_subtype(token.String, token.Literal))
+ self.assertFalse(token.is_token_subtype(token.Literal, token.String))
- self.assert_(token.string_to_tokentype(token.String) is token.String)
- self.assert_(token.string_to_tokentype('') is token.Token)
- self.assert_(token.string_to_tokentype('String') is token.String)
+ self.assertTrue(token.string_to_tokentype(token.String) is token.String)
+ self.assertTrue(token.string_to_tokentype('') is token.Token)
+ self.assertTrue(token.string_to_tokentype('String') is token.String)
def test_sanity_check(self):
stp = token.STANDARD_TYPES.copy()
diff --git a/tests/test_using_api.py b/tests/test_using_api.py
index e645a881..83d3f18e 100644
--- a/tests/test_using_api.py
+++ b/tests/test_using_api.py
@@ -32,7 +32,7 @@ class UsingStateTest(unittest.TestCase):
expected = [(Text, 'a'), (String, '"'), (Keyword, 'bcd'),
(String, '"'), (Text, 'e\n')]
t = list(TestLexer().get_tokens('a"bcd"e'))
- self.assertEquals(t, expected)
+ self.assertEqual(t, expected)
def test_error(self):
def gen():
diff --git a/tests/test_util.py b/tests/test_util.py
index d994e5fa..6b931eb2 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -8,7 +8,6 @@
"""
import unittest
-import os
from pygments import util
@@ -23,7 +22,7 @@ class UtilTest(unittest.TestCase):
def test_getoptions(self):
raises = self.assertRaises
- equals = self.assertEquals
+ equals = self.assertEqual
equals(util.get_bool_opt({}, 'a', True), True)
equals(util.get_bool_opt({}, 'a', 1), True)
@@ -56,20 +55,20 @@ class UtilTest(unittest.TestCase):
other text
"""
- self.assertEquals(util.docstring_headline(f1), "docstring headline")
- self.assertEquals(util.docstring_headline(f2), "docstring headline")
+ self.assertEqual(util.docstring_headline(f1), "docstring headline")
+ self.assertEqual(util.docstring_headline(f2), "docstring headline")
def test_analysator_returns_float(self):
# If an analysator wrapped by make_analysator returns a floating point
# number, then that number will be returned by the wrapper.
- self.assertEquals(FakeLexer.analyse('0.5'), 0.5)
+ self.assertEqual(FakeLexer.analyse('0.5'), 0.5)
def test_analysator_returns_boolean(self):
# If an analysator wrapped by make_analysator returns a boolean value,
# then the wrapper will return 1.0 if the boolean was True or 0.0 if
# it was False.
- self.assertEquals(FakeLexer.analyse(True), 1.0)
- self.assertEquals(FakeLexer.analyse(False), 0.0)
+ self.assertEqual(FakeLexer.analyse(True), 1.0)
+ self.assertEqual(FakeLexer.analyse(False), 0.0)
def test_analysator_raises_exception(self):
# If an analysator wrapped by make_analysator raises an exception,
@@ -78,40 +77,40 @@ class UtilTest(unittest.TestCase):
def analyse(text):
raise RuntimeError('something bad happened')
analyse = util.make_analysator(analyse)
- self.assertEquals(ErrorLexer.analyse(''), 0.0)
+ self.assertEqual(ErrorLexer.analyse(''), 0.0)
def test_analysator_value_error(self):
# When converting the analysator's return value to a float a
# ValueError may occur. If that happens 0.0 is returned instead.
- self.assertEquals(FakeLexer.analyse('bad input'), 0.0)
+ self.assertEqual(FakeLexer.analyse('bad input'), 0.0)
def test_analysator_type_error(self):
# When converting the analysator's return value to a float a
# TypeError may occur. If that happens 0.0 is returned instead.
- self.assertEquals(FakeLexer.analyse(None), 0.0)
+ self.assertEqual(FakeLexer.analyse(None), 0.0)
def test_shebang_matches(self):
- self.assert_(util.shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?'))
- self.assert_(util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?'))
- self.assert_(util.shebang_matches('#!/usr/bin/startsomethingwith python',
- r'python(2\.\d)?'))
- self.assert_(util.shebang_matches('#!C:\\Python2.4\\Python.exe',
- r'python(2\.\d)?'))
+ self.assertTrue(util.shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?'))
+ self.assertTrue(util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?'))
+ self.assertTrue(util.shebang_matches('#!/usr/bin/startsomethingwith python',
+ r'python(2\.\d)?'))
+ self.assertTrue(util.shebang_matches('#!C:\\Python2.4\\Python.exe',
+ r'python(2\.\d)?'))
- self.failIf(util.shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?'))
- self.failIf(util.shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?'))
- self.failIf(util.shebang_matches('#!', r'python'))
+ self.assertFalse(util.shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?'))
+ self.assertFalse(util.shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?'))
+ self.assertFalse(util.shebang_matches('#!', r'python'))
def test_doctype_matches(self):
- self.assert_(util.doctype_matches('<!DOCTYPE html PUBLIC "a"> <html>',
- 'html.*'))
- self.failIf(util.doctype_matches('<?xml ?> <DOCTYPE html PUBLIC "a"> <html>',
- 'html.*'))
- self.assert_(util.html_doctype_matches(
+ self.assertTrue(util.doctype_matches(
+ '<!DOCTYPE html PUBLIC "a"> <html>', 'html.*'))
+ self.assertFalse(util.doctype_matches(
+ '<?xml ?> <DOCTYPE html PUBLIC "a"> <html>', 'html.*'))
+ self.assertTrue(util.html_doctype_matches(
'<?xml ?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN">'))
def test_xml(self):
- self.assert_(util.looks_like_xml(
+ self.assertTrue(util.looks_like_xml(
'<?xml ?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN">'))
- self.assert_(util.looks_like_xml('<html xmlns>abc</html>'))
- self.failIf(util.looks_like_xml('<html>'))
+ self.assertTrue(util.looks_like_xml('<html xmlns>abc</html>'))
+ self.assertFalse(util.looks_like_xml('<html>'))