summaryrefslogtreecommitdiff
path: root/Python
diff options
context:
space:
mode:
Diffstat (limited to 'Python')
-rw-r--r--Python/Python-tokenize.c6
-rw-r--r--Python/import.c1
-rw-r--r--Python/traceback.c4
3 files changed, 5 insertions, 6 deletions
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index fa71328255..d3ebbe1331 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -47,7 +47,7 @@ tokenizeriter_new_impl(PyTypeObject *type, const char *source)
if (filename == NULL) {
return NULL;
}
- self->tok = PyTokenizer_FromUTF8(source, 1);
+ self->tok = _PyTokenizer_FromUTF8(source, 1);
if (self->tok == NULL) {
Py_DECREF(filename);
return NULL;
@@ -61,7 +61,7 @@ tokenizeriter_next(tokenizeriterobject *it)
{
const char *start;
const char *end;
- int type = PyTokenizer_Get(it->tok, &start, &end);
+ int type = _PyTokenizer_Get(it->tok, &start, &end);
if (type == ERRORTOKEN && PyErr_Occurred()) {
return NULL;
}
@@ -105,7 +105,7 @@ static void
tokenizeriter_dealloc(tokenizeriterobject *it)
{
PyTypeObject *tp = Py_TYPE(it);
- PyTokenizer_Free(it->tok);
+ _PyTokenizer_Free(it->tok);
tp->tp_free(it);
Py_DECREF(tp);
}
diff --git a/Python/import.c b/Python/import.c
index 731f0f5911..4bc1e518bf 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -11,7 +11,6 @@
#include "pycore_interp.h" // _PyInterpreterState_ClearModules()
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_sysmodule.h"
-#include "errcode.h"
#include "marshal.h"
#include "code.h"
#include "importdl.h"
diff --git a/Python/traceback.c b/Python/traceback.c
index ffa7c3494e..b18cbb91ce 100644
--- a/Python/traceback.c
+++ b/Python/traceback.c
@@ -29,7 +29,7 @@
#define MAX_NTHREADS 100
/* Function from Parser/tokenizer.c */
-extern char * PyTokenizer_FindEncodingFilename(int, PyObject *);
+extern char* _PyTokenizer_FindEncodingFilename(int, PyObject *);
_Py_IDENTIFIER(TextIOWrapper);
_Py_IDENTIFIER(close);
@@ -431,7 +431,7 @@ _Py_DisplaySourceLine(PyObject *f, PyObject *filename, int lineno, int indent, i
Py_DECREF(binary);
return 0;
}
- found_encoding = PyTokenizer_FindEncodingFilename(fd, filename);
+ found_encoding = _PyTokenizer_FindEncodingFilename(fd, filename);
if (found_encoding == NULL)
PyErr_Clear();
encoding = (found_encoding != NULL) ? found_encoding : "utf-8";