kubernetes/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go

type CommonTokenStream

// NewCommonTokenStream creates a new CommonTokenStream instance using the supplied lexer to produce
// tokens and will pull tokens from the given lexer channel.
func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream {}

// GetAllTokens returns all tokens currently pulled from the token source.
func (c *CommonTokenStream) GetAllTokens() []Token {}

func (c *CommonTokenStream) Mark() int {}

func (c *CommonTokenStream) Release(_ int) {}

func (c *CommonTokenStream) Reset() {}

func (c *CommonTokenStream) Seek(index int) {}

func (c *CommonTokenStream) Get(index int) Token {}

func (c *CommonTokenStream) Consume() {}

// Sync makes sure index i in tokens has a token and returns true if a token is
// located at index i and otherwise false.
func (c *CommonTokenStream) Sync(i int) bool {}

// fetch adds n elements to buffer and returns the actual number of elements
// added to the buffer.
func (c *CommonTokenStream) fetch(n int) int {}

// GetTokens gets all tokens from start to stop inclusive.
func (c *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {}

func (c *CommonTokenStream) LA(i int) int {}

func (c *CommonTokenStream) lazyInit() {}

func (c *CommonTokenStream) setup() {}

func (c *CommonTokenStream) GetTokenSource() TokenSource {}

// SetTokenSource resets the c token stream by setting its token source.
func (c *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {}

// NextTokenOnChannel returns the index of the next token on channel given a
// starting index. Returns i if tokens[i] is on channel. Returns -1 if there are
// no tokens on channel between 'i' and [TokenEOF].
func (c *CommonTokenStream) NextTokenOnChannel(i, _ int) int {}

// previousTokenOnChannel returns the index of the previous token on channel
// given a starting index. Returns i if tokens[i] is on channel. Returns -1 if
// there are no tokens on channel between i and 0.
func (c *CommonTokenStream) previousTokenOnChannel(i, channel int) int {}

// GetHiddenTokensToRight collects all tokens on a specified channel to the
// right of the current token up until we see a token on DEFAULT_TOKEN_CHANNEL
// or EOF. If channel is -1, it finds any non-default channel token.
func (c *CommonTokenStream) GetHiddenTokensToRight(tokenIndex, channel int) []Token {}

// GetHiddenTokensToLeft collects all tokens on channel to the left of the
// current token until we see a token on DEFAULT_TOKEN_CHANNEL. If channel is
// -1, it finds any non default channel token.
func (c *CommonTokenStream) GetHiddenTokensToLeft(tokenIndex, channel int) []Token {}

func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {}

func (c *CommonTokenStream) GetSourceName() string {}

func (c *CommonTokenStream) Size() int {}

func (c *CommonTokenStream) Index() int {}

func (c *CommonTokenStream) GetAllText() string {}

func (c *CommonTokenStream) GetTextFromTokens(start, end Token) string {}

func (c *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string {}

func (c *CommonTokenStream) GetTextFromInterval(interval Interval) string {}

// Fill gets all tokens from the lexer until EOF.
func (c *CommonTokenStream) Fill() {}

func (c *CommonTokenStream) adjustSeekIndex(i int) int {}

func (c *CommonTokenStream) LB(k int) Token {}

func (c *CommonTokenStream) LT(k int) Token {}

// getNumberOfOnChannelTokens counts EOF once.
func (c *CommonTokenStream) getNumberOfOnChannelTokens() int {}