diff options
| author | Anthony Sottile <asottile@umich.edu> | 2018-10-23 21:44:22 +0000 |
|---|---|---|
| committer | Anthony Sottile <asottile@umich.edu> | 2018-10-23 21:44:22 +0000 |
| commit | 63b91c95ea7795e3c3c90f2d643f685bfff312e9 (patch) | |
| tree | 1fc4622d96dc346c1440fd46b323d8c8fb4de091 /src | |
| parent | f5d6cf907ba5eb8b5dd55f5101b2678f1def3283 (diff) | |
| parent | a42bfdf6d2a9479648831dda619e179516827a93 (diff) | |
| download | flake8-63b91c95ea7795e3c3c90f2d643f685bfff312e9.tar.gz | |
Merge branch 'match_newlines_py3' into 'master'
Fix inconsistent newlines read from a file in python3
Closes #457
See merge request pycqa/flake8!253
Diffstat (limited to 'src')
| -rw-r--r-- | src/flake8/processor.py | 9 |
1 files changed, 3 insertions, 6 deletions
diff --git a/src/flake8/processor.py b/src/flake8/processor.py index fdf0a98..18f9f1d 100644 --- a/src/flake8/processor.py +++ b/src/flake8/processor.py @@ -1,6 +1,5 @@ """Module containing our file processor that tokenizes a file for checks.""" import contextlib -import io import logging import sys import tokenize @@ -308,11 +307,9 @@ class FileProcessor(object): def _readlines_py3(self): # type: () -> List[str] try: - with open(self.filename, "rb") as fd: - (coding, lines) = tokenize.detect_encoding(fd.readline) - textfd = io.TextIOWrapper(fd, coding, line_buffering=True) - return [l.decode(coding) for l in lines] + textfd.readlines() - except (LookupError, SyntaxError, UnicodeError): + with tokenize.open(self.filename) as fd: + return fd.readlines() + except (SyntaxError, UnicodeError): # If we can't detect the codec with tokenize.detect_encoding, or # the detected encoding is incorrect, just fallback to latin-1. with open(self.filename, encoding="latin-1") as fd: |
