summaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
authorJonathan Slenders <jonathan@slenders.be>2014-10-03 23:40:19 +0200
committerJonathan Slenders <jonathan@slenders.be>2014-10-03 23:40:19 +0200
commit0795c47142e1ae1f147973def10db46ab493fc4d (patch)
tree81498e6b1a83128a3b089051a7593490dd7106a7 /tests
parent37388b437a12ff40d828d7e25100dfc2524a199e (diff)
Token.{Menu,Toolbar}.Completer renamed to Token.{Menu.Toolbar}.Completions.
Diffstat (limited to 'tests')
-rw-r--r--tests/old_tests.py38
1 files changed, 4 insertions, 34 deletions
diff --git a/tests/old_tests.py b/tests/old_tests.py
index e59ed404..8a6bcf2c 100644
--- a/tests/old_tests.py
+++ b/tests/old_tests.py
@@ -8,43 +8,13 @@ from __future__ import unicode_literals
import unittest
-
-#class PromptTest(unittest.TestCase):
-# def setUp(self):
-# self.line = Line()
-# self.line.insert_text('some text')
-#
-# self.code = Code(self.line.document)
-# self.prompt = Prompt(self.line, self.code)
-#
-# def _test_token_text_list(self, data):
-# # Test whether data is list of (Token, text) tuples.
-# for token, text in data:
-# self.assertIsInstance(token, pygments.token._TokenType)
-# self.assertIsInstance(text, six.text_type)
-#
-# def test_get_prompt(self):
-# result = list(self.prompt.get_prompt())
-# self._test_token_text_list(result)
-#
-# def test_second_line_prefix(self):
-# result = list(self.prompt.get_second_line_prefix())
-# self._test_token_text_list(result)
-#
-# def test_get_help_tokens(self):
-# result = list(self.prompt.get_second_line_prefix())
-# self._test_token_text_list(result)
-#
-
-#--
-
-
-from prompt_toolkit.contrib.shell.lexer import ParametersLexer, TextToken
+from prompt_toolkit.contrib.shell.lexer import ShellLexer, TextToken
from pygments.token import Token
-class ParameterLexerTest(unittest.TestCase):
+
+class ShellLexerTest(unittest.TestCase):
def setUp(self):
- self.lexer = ParametersLexer(stripnl=False, stripall=False, ensurenl=False)
+ self.lexer = ShellLexer(stripnl=False, stripall=False, ensurenl=False)
def test_simple(self):
t = list(self.lexer.get_tokens('aaa bbb ccc'))