cpython/Objects/listobject.c

/* List object implementation */

#include "Python.h"
#include "pycore_abstract.h"      // _PyIndex_Check()
#include "pycore_ceval.h"         // _PyEval_GetBuiltin()
#include "pycore_dict.h"          // _PyDictViewObject
#include "pycore_freelist.h"      // _Py_FREELIST_FREE(), _Py_FREELIST_POP()
#include "pycore_pyatomic_ft_wrappers.h"
#include "pycore_interp.h"        // PyInterpreterState.list
#include "pycore_list.h"          // struct _Py_list_freelist, _PyListIterObject
#include "pycore_long.h"          // _PyLong_DigitCount
#include "pycore_modsupport.h"    // _PyArg_NoKwnames()
#include "pycore_object.h"        // _PyObject_GC_TRACK(), _PyDebugAllocatorStats()
#include "pycore_tuple.h"         // _PyTuple_FromArray()
#include "pycore_setobject.h"     // _PySet_NextEntry()
#include <stddef.h>

/*[clinic input]
class list "PyListObject *" "&PyList_Type"
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f9b222678f9f71e0]*/

#include "clinic/listobject.c.h"

_Py_DECLARE_STR(list_err, "list index out of range");

#ifdef Py_GIL_DISABLED
typedef struct {
    Py_ssize_t allocated;
    PyObject *ob_item[];
} _PyListArray;

static _PyListArray *
list_allocate_array(size_t capacity)
{
    if (capacity > PY_SSIZE_T_MAX/sizeof(PyObject*) - 1) {
        return NULL;
    }
    _PyListArray *array = PyMem_Malloc(sizeof(_PyListArray) + capacity * sizeof(PyObject *));
    if (array == NULL) {
        return NULL;
    }
    array->allocated = capacity;
    return array;
}

static Py_ssize_t
list_capacity(PyObject **items)
{
    _PyListArray *array = _Py_CONTAINER_OF(items, _PyListArray, ob_item);
    return array->allocated;
}
#endif

static void
free_list_items(PyObject** items, bool use_qsbr)
{}

static void
ensure_shared_on_resize(PyListObject *self)
{}

/* Ensure ob_item has room for at least newsize elements, and set
 * ob_size to newsize.  If newsize > ob_size on entry, the content
 * of the new slots at exit is undefined heap trash; it's the caller's
 * responsibility to overwrite them with sane values.
 * The number of allocated elements may grow, shrink, or stay the same.
 * Failure is impossible if newsize <= self.allocated on entry, although
 * that partly relies on an assumption that the system realloc() never
 * fails when passed a number of bytes <= the number of bytes last
 * allocated (the C standard doesn't guarantee this, but it's hard to
 * imagine a realloc implementation where it wouldn't be true).
 * Note that self->ob_item may change, and even if newsize is less
 * than ob_size on entry.
 */
static int
list_resize(PyListObject *self, Py_ssize_t newsize)
{}

static int
list_preallocate_exact(PyListObject *self, Py_ssize_t size)
{}

/* Print summary info about the state of the optimized allocator */
void
_PyList_DebugMallocStats(FILE *out)
{}

PyObject *
PyList_New(Py_ssize_t size)
{}

static PyObject *
list_new_prealloc(Py_ssize_t size)
{}

Py_ssize_t
PyList_Size(PyObject *op)
{}

static inline int
valid_index(Py_ssize_t i, Py_ssize_t limit)
{}

#ifdef Py_GIL_DISABLED

static PyObject *
list_item_impl(PyListObject *self, Py_ssize_t idx)
{
    PyObject *item = NULL;
    Py_BEGIN_CRITICAL_SECTION(self);
    if (!_PyObject_GC_IS_SHARED(self)) {
        _PyObject_GC_SET_SHARED(self);
    }
    Py_ssize_t size = Py_SIZE(self);
    if (!valid_index(idx, size)) {
        goto exit;
    }
#ifdef Py_GIL_DISABLED
    item = _Py_NewRefWithLock(self->ob_item[idx]);
#else
    item = Py_NewRef(self->ob_item[idx]);
#endif
exit:
    Py_END_CRITICAL_SECTION();
    return item;
}

static inline PyObject*
list_get_item_ref(PyListObject *op, Py_ssize_t i)
{
    if (!_Py_IsOwnedByCurrentThread((PyObject *)op) && !_PyObject_GC_IS_SHARED(op)) {
        return list_item_impl(op, i);
    }
    // Need atomic operation for the getting size.
    Py_ssize_t size = PyList_GET_SIZE(op);
    if (!valid_index(i, size)) {
        return NULL;
    }
    PyObject **ob_item = _Py_atomic_load_ptr(&op->ob_item);
    if (ob_item == NULL) {
        return NULL;
    }
    Py_ssize_t cap = list_capacity(ob_item);
    assert(cap != -1 && cap >= size);
    if (!valid_index(i, cap)) {
        return NULL;
    }
    PyObject *item = _Py_TryXGetRef(&ob_item[i]);
    if (item == NULL) {
        return list_item_impl(op, i);
    }
    return item;
}
#else
static inline PyObject*
list_get_item_ref(PyListObject *op, Py_ssize_t i)
{}
#endif

PyObject *
PyList_GetItem(PyObject *op, Py_ssize_t i)
{}

PyObject *
PyList_GetItemRef(PyObject *op, Py_ssize_t i)
{}

PyObject *
_PyList_GetItemRef(PyListObject *list, Py_ssize_t i)
{}

int
PyList_SetItem(PyObject *op, Py_ssize_t i,
               PyObject *newitem)
{}

static int
ins1(PyListObject *self, Py_ssize_t where, PyObject *v)
{}

int
PyList_Insert(PyObject *op, Py_ssize_t where, PyObject *newitem)
{}

/* internal, used by _PyList_AppendTakeRef */
int
_PyList_AppendTakeRefListResize(PyListObject *self, PyObject *newitem)
{}

int
PyList_Append(PyObject *op, PyObject *newitem)
{}

/* Methods */

static void
list_dealloc(PyObject *self)
{}

static PyObject *
list_repr_impl(PyListObject *v)
{}

static PyObject *
list_repr(PyObject *self)
{}

static Py_ssize_t
list_length(PyObject *a)
{}

static int
list_contains(PyObject *aa, PyObject *el)
{}

static PyObject *
list_item(PyObject *aa, Py_ssize_t i)
{}

static PyObject *
list_slice_lock_held(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh)
{}

PyObject *
PyList_GetSlice(PyObject *a, Py_ssize_t ilow, Py_ssize_t ihigh)
{}

static PyObject *
list_concat_lock_held(PyListObject *a, PyListObject *b)
{}

static PyObject *
list_concat(PyObject *aa, PyObject *bb)
{}

static PyObject *
list_repeat_lock_held(PyListObject *a, Py_ssize_t n)
{}

static PyObject *
list_repeat(PyObject *aa, Py_ssize_t n)
{}

static void
list_clear_impl(PyListObject *a, bool is_resize)
{}

static void
list_clear(PyListObject *a)
{}

static int
list_clear_slot(PyObject *self)
{}

/* a[ilow:ihigh] = v if v != NULL.
 * del a[ilow:ihigh] if v == NULL.
 *
 * Special speed gimmick:  when v is NULL and ihigh - ilow <= 8, it's
 * guaranteed the call cannot fail.
 */
static int
list_ass_slice_lock_held(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *v)
{}

static int
list_ass_slice(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *v)
{}

int
PyList_SetSlice(PyObject *a, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *v)
{}

static int
list_inplace_repeat_lock_held(PyListObject *self, Py_ssize_t n)
{}

static PyObject *
list_inplace_repeat(PyObject *_self, Py_ssize_t n)
{}

static int
list_ass_item_lock_held(PyListObject *a, Py_ssize_t i, PyObject *v)
{}

static int
list_ass_item(PyObject *aa, Py_ssize_t i, PyObject *v)
{}

/*[clinic input]
@critical_section
list.insert

    index: Py_ssize_t
    object: object
    /

Insert object before index.
[clinic start generated code]*/

static PyObject *
list_insert_impl(PyListObject *self, Py_ssize_t index, PyObject *object)
/*[clinic end generated code: output=7f35e32f60c8cb78 input=b1987ca998a4ae2d]*/
{}

/*[clinic input]
@critical_section
list.clear as py_list_clear

Remove all items from list.
[clinic start generated code]*/

static PyObject *
py_list_clear_impl(PyListObject *self)
/*[clinic end generated code: output=83726743807e3518 input=e285b7f09051a9ba]*/
{}

/*[clinic input]
@critical_section
list.copy

Return a shallow copy of the list.
[clinic start generated code]*/

static PyObject *
list_copy_impl(PyListObject *self)
/*[clinic end generated code: output=ec6b72d6209d418e input=81c54b0c7bb4f73d]*/
{}

/*[clinic input]
@critical_section
list.append

     object: object
     /

Append object to the end of the list.
[clinic start generated code]*/

static PyObject *
list_append_impl(PyListObject *self, PyObject *object)
/*[clinic end generated code: output=78423561d92ed405 input=122b0853de54004f]*/
{}

static int
list_extend_fast(PyListObject *self, PyObject *iterable)
{}

static int
list_extend_iter_lock_held(PyListObject *self, PyObject *iterable)
{}

static int
list_extend_lock_held(PyListObject *self, PyObject *iterable)
{}

static int
list_extend_set(PyListObject *self, PySetObject *other)
{}

static int
list_extend_dict(PyListObject *self, PyDictObject *dict, int which_item)
{}

static int
list_extend_dictitems(PyListObject *self, PyDictObject *dict)
{}

static int
_list_extend(PyListObject *self, PyObject *iterable)
{}

/*[clinic input]
list.extend as list_extend

     iterable: object
     /

Extend list by appending elements from the iterable.
[clinic start generated code]*/

static PyObject *
list_extend(PyListObject *self, PyObject *iterable)
/*[clinic end generated code: output=630fb3bca0c8e789 input=979da7597a515791]*/
{}

PyObject *
_PyList_Extend(PyListObject *self, PyObject *iterable)
{}

int
PyList_Extend(PyObject *self, PyObject *iterable)
{}


int
PyList_Clear(PyObject *self)
{}


static PyObject *
list_inplace_concat(PyObject *_self, PyObject *other)
{}

/*[clinic input]
@critical_section
list.pop

    index: Py_ssize_t = -1
    /

Remove and return item at index (default last).

Raises IndexError if list is empty or index is out of range.
[clinic start generated code]*/

static PyObject *
list_pop_impl(PyListObject *self, Py_ssize_t index)
/*[clinic end generated code: output=6bd69dcb3f17eca8 input=c269141068ae4b8f]*/
{}

/* Reverse a slice of a list in place, from lo up to (exclusive) hi. */
static void
reverse_slice(PyObject **lo, PyObject **hi)
{}

/* Lots of code for an adaptive, stable, natural mergesort.  There are many
 * pieces to this algorithm; read listsort.txt for overviews and details.
 */

/* A sortslice contains a pointer to an array of keys and a pointer to
 * an array of corresponding values.  In other words, keys[i]
 * corresponds with values[i].  If values == NULL, then the keys are
 * also the values.
 *
 * Several convenience routines are provided here, so that keys and
 * values are always moved in sync.
 */

sortslice;

Py_LOCAL_INLINE(void)
sortslice_copy(sortslice *s1, Py_ssize_t i, sortslice *s2, Py_ssize_t j)
{}

Py_LOCAL_INLINE(void)
sortslice_copy_incr(sortslice *dst, sortslice *src)
{}

Py_LOCAL_INLINE(void)
sortslice_copy_decr(sortslice *dst, sortslice *src)
{}


Py_LOCAL_INLINE(void)
sortslice_memcpy(sortslice *s1, Py_ssize_t i, sortslice *s2, Py_ssize_t j,
                 Py_ssize_t n)
{}

Py_LOCAL_INLINE(void)
sortslice_memmove(sortslice *s1, Py_ssize_t i, sortslice *s2, Py_ssize_t j,
                  Py_ssize_t n)
{}

Py_LOCAL_INLINE(void)
sortslice_advance(sortslice *slice, Py_ssize_t n)
{}

/* Comparison function: ms->key_compare, which is set at run-time in
 * listsort_impl to optimize for various special cases.
 * Returns -1 on error, 1 if x < y, 0 if x >= y.
 */

#define ISLT

/* Compare X to Y via "<".  Goto "fail" if the comparison raises an
   error.  Else "k" is set to true iff X<Y, and an "if (k)" block is
   started.  It makes more sense in context <wink>.  X and Y are PyObject*s.
*/
#define IFLT

/* The maximum number of entries in a MergeState's pending-runs stack.
 * For a list with n elements, this needs at most floor(log2(n)) + 1 entries
 * even if we didn't force runs to a minimal length.  So the number of bits
 * in a Py_ssize_t is plenty large enough for all cases.
 */
#define MAX_MERGE_PENDING

/* When we get into galloping mode, we stay there until both runs win less
 * often than MIN_GALLOP consecutive times.  See listsort.txt for more info.
 */
#define MIN_GALLOP

/* Avoid malloc for small temp arrays. */
#define MERGESTATE_TEMP_SIZE

/* The largest value of minrun. This must be a power of 2, and >= 1, so that
 * the compute_minrun() algorithm guarantees to return a result no larger than
 * this,
 */
#define MAX_MINRUN
#if ((MAX_MINRUN) < 1) || ((MAX_MINRUN) & ((MAX_MINRUN) - 1))
#error "MAX_MINRUN must be a power of 2, and >= 1"
#endif

/* One MergeState exists on the stack per invocation of mergesort.  It's just
 * a convenient way to pass state around among the helper functions.
 */
struct s_slice {};

MergeState;
struct s_MergeState {};

/* binarysort is the best method for sorting small arrays: it does few
   compares, but can do data movement quadratic in the number of elements.
   ss->keys is viewed as an array of n kays, a[:n]. a[:ok] is already sorted.
   Pass ok = 0 (or 1) if you don't know.
   It's sorted in-place, by a stable binary insertion sort. If ss->values
   isn't NULL, it's permuted in lockstap with ss->keys.
   On entry, must have n >= 1, and 0 <= ok <= n <= MAX_MINRUN.
   Return -1 if comparison raises an exception, else 0.
   Even in case of error, the output slice will be some permutation of
   the input (nothing is lost or duplicated).
*/
static int
binarysort(MergeState *ms, const sortslice *ss, Py_ssize_t n, Py_ssize_t ok)
{}

static void
sortslice_reverse(sortslice *s, Py_ssize_t n)
{}

/*
Return the length of the run beginning at slo->keys, spanning no more than
nremaining elements. The run beginning there may be ascending or descending,
but the function permutes it in place, if needed, so that it's always ascending
upon return.

Returns -1 in case of error.
*/
static Py_ssize_t
count_run(MergeState *ms, sortslice *slo, Py_ssize_t nremaining)
{}

/*
Locate the proper position of key in a sorted vector; if the vector contains
an element equal to key, return the position immediately to the left of
the leftmost equal element.  [gallop_right() does the same except returns
the position to the right of the rightmost equal element (if any).]

"a" is a sorted vector with n elements, starting at a[0].  n must be > 0.

"hint" is an index at which to begin the search, 0 <= hint < n.  The closer
hint is to the final result, the faster this runs.

The return value is the int k in 0..n such that

    a[k-1] < key <= a[k]

pretending that *(a-1) is minus infinity and a[n] is plus infinity.  IOW,
key belongs at index k; or, IOW, the first k elements of a should precede
key, and the last n-k should follow key.

Returns -1 on error.  See listsort.txt for info on the method.
*/
static Py_ssize_t
gallop_left(MergeState *ms, PyObject *key, PyObject **a, Py_ssize_t n, Py_ssize_t hint)
{}

/*
Exactly like gallop_left(), except that if key already exists in a[0:n],
finds the position immediately to the right of the rightmost equal value.

The return value is the int k in 0..n such that

    a[k-1] <= key < a[k]

or -1 if error.

The code duplication is massive, but this is enough different given that
we're sticking to "<" comparisons that it's much harder to follow if
written as one routine with yet another "left or right?" flag.
*/
static Py_ssize_t
gallop_right(MergeState *ms, PyObject *key, PyObject **a, Py_ssize_t n, Py_ssize_t hint)
{}

/* Conceptually a MergeState's constructor. */
static void
merge_init(MergeState *ms, Py_ssize_t list_size, int has_keyfunc,
           sortslice *lo)
{}

/* Free all the temp memory owned by the MergeState.  This must be called
 * when you're done with a MergeState, and may be called before then if
 * you want to free the temp memory early.
 */
static void
merge_freemem(MergeState *ms)
{}

/* Ensure enough temp memory for 'need' array slots is available.
 * Returns 0 on success and -1 if the memory can't be gotten.
 */
static int
merge_getmem(MergeState *ms, Py_ssize_t need)
{}
#define MERGE_GETMEM(MS, NEED)

/* Merge the na elements starting at ssa with the nb elements starting at
 * ssb.keys = ssa.keys + na in a stable way, in-place.  na and nb must be > 0.
 * Must also have that ssa.keys[na-1] belongs at the end of the merge, and
 * should have na <= nb.  See listsort.txt for more info.  Return 0 if
 * successful, -1 if error.
 */
static Py_ssize_t
merge_lo(MergeState *ms, sortslice ssa, Py_ssize_t na,
         sortslice ssb, Py_ssize_t nb)
{}

/* Merge the na elements starting at pa with the nb elements starting at
 * ssb.keys = ssa.keys + na in a stable way, in-place.  na and nb must be > 0.
 * Must also have that ssa.keys[na-1] belongs at the end of the merge, and
 * should have na >= nb.  See listsort.txt for more info.  Return 0 if
 * successful, -1 if error.
 */
static Py_ssize_t
merge_hi(MergeState *ms, sortslice ssa, Py_ssize_t na,
         sortslice ssb, Py_ssize_t nb)
{}

/* Merge the two runs at stack indices i and i+1.
 * Returns 0 on success, -1 on error.
 */
static Py_ssize_t
merge_at(MergeState *ms, Py_ssize_t i)
{}

/* Two adjacent runs begin at index s1. The first run has length n1, and
 * the second run (starting at index s1+n1) has length n2. The list has total
 * length n.
 * Compute the "power" of the first run. See listsort.txt for details.
 */
static int
powerloop(Py_ssize_t s1, Py_ssize_t n1, Py_ssize_t n2, Py_ssize_t n)
{}

/* The next run has been identified, of length n2.
 * If there's already a run on the stack, apply the "powersort" merge strategy:
 * compute the topmost run's "power" (depth in a conceptual binary merge tree)
 * and merge adjacent runs on the stack with greater power. See listsort.txt
 * for more info.
 *
 * It's the caller's responsibility to push the new run on the stack when this
 * returns.
 *
 * Returns 0 on success, -1 on error.
 */
static int
found_new_run(MergeState *ms, Py_ssize_t n2)
{}

/* Regardless of invariants, merge all runs on the stack until only one
 * remains.  This is used at the end of the mergesort.
 *
 * Returns 0 on success, -1 on error.
 */
static int
merge_force_collapse(MergeState *ms)
{}

/* Compute a good value for the minimum run length; natural runs shorter
 * than this are boosted artificially via binary insertion.
 *
 * If n < MAX_MINRUN return n (it's too small to bother with fancy stuff).
 * Else if n is an exact power of 2, return MAX_MINRUN / 2.
 * Else return an int k, MAX_MINRUN / 2 <= k <= MAX_MINRUN, such that n/k is
 * close to, but strictly less than, an exact power of 2.
 *
 * See listsort.txt for more info.
 */
static Py_ssize_t
merge_compute_minrun(Py_ssize_t n)
{}

/* Here we define custom comparison functions to optimize for the cases one commonly
 * encounters in practice: homogeneous lists, often of one of the basic types. */

/* This struct holds the comparison function and helper functions
 * selected in the pre-sort check. */

/* These are the special case compare functions.
 * ms->key_compare will always point to one of these: */

/* Heterogeneous compare: default, always safe to fall back on. */
static int
safe_object_compare(PyObject *v, PyObject *w, MergeState *ms)
{}

/* Homogeneous compare: safe for any two comparable objects of the same type.
 * (ms->key_richcompare is set to ob_type->tp_richcompare in the
 *  pre-sort check.)
 */
static int
unsafe_object_compare(PyObject *v, PyObject *w, MergeState *ms)
{}

/* Latin string compare: safe for any two latin (one byte per char) strings. */
static int
unsafe_latin_compare(PyObject *v, PyObject *w, MergeState *ms)
{}

/* Bounded int compare: compare any two longs that fit in a single machine word. */
static int
unsafe_long_compare(PyObject *v, PyObject *w, MergeState *ms)
{}

/* Float compare: compare any two floats. */
static int
unsafe_float_compare(PyObject *v, PyObject *w, MergeState *ms)
{}

/* Tuple compare: compare *any* two tuples, using
 * ms->tuple_elem_compare to compare the first elements, which is set
 * using the same pre-sort check as we use for ms->key_compare,
 * but run on the list [x[0] for x in L]. This allows us to optimize compares
 * on two levels (as long as [x[0] for x in L] is type-homogeneous.) The idea is
 * that most tuple compares don't involve x[1:]. */
static int
unsafe_tuple_compare(PyObject *v, PyObject *w, MergeState *ms)
{}

/* An adaptive, stable, natural mergesort.  See listsort.txt.
 * Returns Py_None on success, NULL on error.  Even in case of error, the
 * list will be some permutation of its input state (nothing is lost or
 * duplicated).
 */
/*[clinic input]
@critical_section
list.sort

    *
    key as keyfunc: object = None
    reverse: bool = False

Sort the list in ascending order and return None.

The sort is in-place (i.e. the list itself is modified) and stable (i.e. the
order of two equal elements is maintained).

If a key function is given, apply it once to each list item and sort them,
ascending or descending, according to their function values.

The reverse flag can be set to sort in descending order.
[clinic start generated code]*/

static PyObject *
list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse)
/*[clinic end generated code: output=57b9f9c5e23fbe42 input=667bf25d0e3a3676]*/
{}
#undef IFLT
#undef ISLT

int
PyList_Sort(PyObject *v)
{}

/*[clinic input]
@critical_section
list.reverse

Reverse *IN PLACE*.
[clinic start generated code]*/

static PyObject *
list_reverse_impl(PyListObject *self)
/*[clinic end generated code: output=482544fc451abea9 input=04ac8e0c6a66e4d9]*/
{}

int
PyList_Reverse(PyObject *v)
{}

PyObject *
PyList_AsTuple(PyObject *v)
{}

PyObject *
_PyList_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n)
{}

/*[clinic input]
list.index

    value: object
    start: slice_index(accept={int}) = 0
    stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
    /

Return first index of value.

Raises ValueError if the value is not present.
[clinic start generated code]*/

static PyObject *
list_index_impl(PyListObject *self, PyObject *value, Py_ssize_t start,
                Py_ssize_t stop)
/*[clinic end generated code: output=ec51b88787e4e481 input=40ec5826303a0eb1]*/
{}

/*[clinic input]
list.count

     value: object
     /

Return number of occurrences of value.
[clinic start generated code]*/

static PyObject *
list_count(PyListObject *self, PyObject *value)
/*[clinic end generated code: output=b1f5d284205ae714 input=3bdc3a5e6f749565]*/
{}

/*[clinic input]
@critical_section
list.remove

     value: object
     /

Remove first occurrence of value.

Raises ValueError if the value is not present.
[clinic start generated code]*/

static PyObject *
list_remove_impl(PyListObject *self, PyObject *value)
/*[clinic end generated code: output=b9b76a6633b18778 input=26c813dbb95aa93b]*/
{}

static int
list_traverse(PyObject *self, visitproc visit, void *arg)
{}

static PyObject *
list_richcompare_impl(PyObject *v, PyObject *w, int op)
{}

static PyObject *
list_richcompare(PyObject *v, PyObject *w, int op)
{}

/*[clinic input]
list.__init__

    iterable: object(c_default="NULL") = ()
    /

Built-in mutable sequence.

If no argument is given, the constructor creates a new empty list.
The argument must be an iterable if specified.
[clinic start generated code]*/

static int
list___init___impl(PyListObject *self, PyObject *iterable)
/*[clinic end generated code: output=0f3c21379d01de48 input=b3f3fe7206af8f6b]*/
{}

static PyObject *
list_vectorcall(PyObject *type, PyObject * const*args,
                size_t nargsf, PyObject *kwnames)
{}


/*[clinic input]
list.__sizeof__

Return the size of the list in memory, in bytes.
[clinic start generated code]*/

static PyObject *
list___sizeof___impl(PyListObject *self)
/*[clinic end generated code: output=3417541f95f9a53e input=b8030a5d5ce8a187]*/
{}

static PyObject *list_iter(PyObject *seq);
static PyObject *list_subscript(PyObject*, PyObject*);

static PyMethodDef list_methods[] =;

static PySequenceMethods list_as_sequence =;

static inline PyObject *
list_slice_step_lock_held(PyListObject *a, Py_ssize_t start, Py_ssize_t step, Py_ssize_t len)
{}

static PyObject *
list_slice_wrap(PyListObject *aa, Py_ssize_t start, Py_ssize_t stop, Py_ssize_t step)
{}

static PyObject *
list_subscript(PyObject* _self, PyObject* item)
{}

static Py_ssize_t
adjust_slice_indexes(PyListObject *lst,
                     Py_ssize_t *start, Py_ssize_t *stop,
                     Py_ssize_t step)
{}

static int
list_ass_subscript(PyObject* _self, PyObject* item, PyObject* value)
{}

static PyMappingMethods list_as_mapping =;

PyTypeObject PyList_Type =;

/*********************** List Iterator **************************/

static void listiter_dealloc(PyObject *);
static int listiter_traverse(PyObject *, visitproc, void *);
static PyObject *listiter_next(PyObject *);
static PyObject *listiter_len(PyObject *, PyObject *);
static PyObject *listiter_reduce_general(void *_it, int forward);
static PyObject *listiter_reduce(PyObject *, PyObject *);
static PyObject *listiter_setstate(PyObject *, PyObject *state);

PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");

static PyMethodDef listiter_methods[] =;

PyTypeObject PyListIter_Type =;


static PyObject *
list_iter(PyObject *seq)
{}

static void
listiter_dealloc(PyObject *self)
{}

static int
listiter_traverse(PyObject *it, visitproc visit, void *arg)
{}

static PyObject *
listiter_next(PyObject *self)
{}

static PyObject *
listiter_len(PyObject *self, PyObject *Py_UNUSED(ignored))
{}

static PyObject *
listiter_reduce(PyObject *it, PyObject *Py_UNUSED(ignored))
{}

static PyObject *
listiter_setstate(PyObject *self, PyObject *state)
{}

/*********************** List Reverse Iterator **************************/

listreviterobject;

static void listreviter_dealloc(PyObject *);
static int listreviter_traverse(PyObject *, visitproc, void *);
static PyObject *listreviter_next(PyObject *);
static PyObject *listreviter_len(PyObject *, PyObject *);
static PyObject *listreviter_reduce(PyObject *, PyObject *);
static PyObject *listreviter_setstate(PyObject *, PyObject *);

static PyMethodDef listreviter_methods[] =;

PyTypeObject PyListRevIter_Type =;

/*[clinic input]
list.__reversed__

Return a reverse iterator over the list.
[clinic start generated code]*/

static PyObject *
list___reversed___impl(PyListObject *self)
/*[clinic end generated code: output=b166f073208c888c input=eadb6e17f8a6a280]*/
{}

static void
listreviter_dealloc(PyObject *self)
{}

static int
listreviter_traverse(PyObject *it, visitproc visit, void *arg)
{}

static PyObject *
listreviter_next(PyObject *self)
{}

static PyObject *
listreviter_len(PyObject *self, PyObject *Py_UNUSED(ignored))
{}

static PyObject *
listreviter_reduce(PyObject *it, PyObject *Py_UNUSED(ignored))
{}

static PyObject *
listreviter_setstate(PyObject *self, PyObject *state)
{}

/* common pickling support */

static PyObject *
listiter_reduce_general(void *_it, int forward)
{}