|
|
|
@ -1068,7 +1068,7 @@ class TestTokenize(TestCase): |
|
|
|
encoding = object() |
|
|
|
encoding_used = None |
|
|
|
def mock_detect_encoding(readline): |
|
|
|
return encoding, ['first', 'second'] |
|
|
|
return encoding, [b'first', b'second'] |
|
|
|
|
|
|
|
def mock__tokenize(readline, encoding): |
|
|
|
nonlocal encoding_used |
|
|
|
@ -1087,7 +1087,7 @@ class TestTokenize(TestCase): |
|
|
|
counter += 1 |
|
|
|
if counter == 5: |
|
|
|
return b'' |
|
|
|
return counter |
|
|
|
return str(counter).encode() |
|
|
|
|
|
|
|
orig_detect_encoding = tokenize_module.detect_encoding |
|
|
|
orig__tokenize = tokenize_module._tokenize |
|
|
|
@ -1095,7 +1095,8 @@ class TestTokenize(TestCase): |
|
|
|
tokenize_module._tokenize = mock__tokenize |
|
|
|
try: |
|
|
|
results = tokenize(mock_readline) |
|
|
|
self.assertEqual(list(results), ['first', 'second', 1, 2, 3, 4]) |
|
|
|
self.assertEqual(list(results), |
|
|
|
[b'first', b'second', b'1', b'2', b'3', b'4']) |
|
|
|
finally: |
|
|
|
tokenize_module.detect_encoding = orig_detect_encoding |
|
|
|
tokenize_module._tokenize = orig__tokenize |
|
|
|
|