cpython/Python/Python-tokenize.c

#include "Python.h"
#include "errcode.h"
#include "internal/pycore_critical_section.h"   // Py_BEGIN_CRITICAL_SECTION
#include "../Parser/lexer/state.h"
#include "../Parser/lexer/lexer.h"
#include "../Parser/tokenizer/tokenizer.h"
#include "../Parser/pegen.h"                    // _PyPegen_byte_offset_to_character_offset()

static struct PyModuleDef _tokenizemodule;

tokenize_state;

static tokenize_state *
get_tokenize_state(PyObject *module) {}

#define _tokenize_get_state_by_type(type)

#include "pycore_runtime.h"
#include "clinic/Python-tokenize.c.h"

/*[clinic input]
module _tokenizer
class _tokenizer.tokenizeriter "tokenizeriterobject *" "_tokenize_get_state_by_type(type)->TokenizerIter"
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=96d98ee2fef7a8bc]*/

tokenizeriterobject;

/*[clinic input]
@classmethod
_tokenizer.tokenizeriter.__new__ as tokenizeriter_new

    readline: object
    /
    *
    extra_tokens: bool
    encoding: str(c_default="NULL") = 'utf-8'
[clinic start generated code]*/

static PyObject *
tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
                       int extra_tokens, const char *encoding)
/*[clinic end generated code: output=7501a1211683ce16 input=f7dddf8a613ae8bd]*/
{}

static int
_tokenizer_error(tokenizeriterobject *it)
{}

static PyObject *
_get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t size,
                  int *line_changed)
{}

static void
_get_col_offsets(tokenizeriterobject *it, struct token token, const char *line_start,
                 PyObject *line, int line_changed, Py_ssize_t lineno, Py_ssize_t end_lineno,
                 Py_ssize_t *col_offset, Py_ssize_t *end_col_offset)
{}

static PyObject *
tokenizeriter_next(tokenizeriterobject *it)
{}

static void
tokenizeriter_dealloc(tokenizeriterobject *it)
{}

static PyType_Slot tokenizeriter_slots[] =;

static PyType_Spec tokenizeriter_spec =;

static int
tokenizemodule_exec(PyObject *m)
{}

static PyMethodDef tokenize_methods[] =;

static PyModuleDef_Slot tokenizemodule_slots[] =;

static int
tokenizemodule_traverse(PyObject *m, visitproc visit, void *arg)
{}

static int
tokenizemodule_clear(PyObject *m)
{}

static void
tokenizemodule_free(void *m)
{}

static struct PyModuleDef _tokenizemodule =;

PyMODINIT_FUNC
PyInit__tokenize(void)
{}