#include "Python.h"
#include "errcode.h"
#include "internal/pycore_critical_section.h"
#include "../Parser/lexer/state.h"
#include "../Parser/lexer/lexer.h"
#include "../Parser/tokenizer/tokenizer.h"
#include "../Parser/pegen.h"
static struct PyModuleDef _tokenizemodule;
tokenize_state;
static tokenize_state *
get_tokenize_state(PyObject *module) { … }
#define _tokenize_get_state_by_type(type) …
#include "pycore_runtime.h"
#include "clinic/Python-tokenize.c.h"
tokenizeriterobject;
static PyObject *
tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
int extra_tokens, const char *encoding)
{ … }
static int
_tokenizer_error(tokenizeriterobject *it)
{ … }
static PyObject *
_get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t size,
int *line_changed)
{ … }
static void
_get_col_offsets(tokenizeriterobject *it, struct token token, const char *line_start,
PyObject *line, int line_changed, Py_ssize_t lineno, Py_ssize_t end_lineno,
Py_ssize_t *col_offset, Py_ssize_t *end_col_offset)
{ … }
static PyObject *
tokenizeriter_next(tokenizeriterobject *it)
{ … }
static void
tokenizeriter_dealloc(tokenizeriterobject *it)
{ … }
static PyType_Slot tokenizeriter_slots[] = …;
static PyType_Spec tokenizeriter_spec = …;
static int
tokenizemodule_exec(PyObject *m)
{ … }
static PyMethodDef tokenize_methods[] = …;
static PyModuleDef_Slot tokenizemodule_slots[] = …;
static int
tokenizemodule_traverse(PyObject *m, visitproc visit, void *arg)
{ … }
static int
tokenizemodule_clear(PyObject *m)
{ … }
static void
tokenizemodule_free(void *m)
{ … }
static struct PyModuleDef _tokenizemodule = …;
PyMODINIT_FUNC
PyInit__tokenize(void)
{ … }