kubernetes/vendor/github.com/google/shlex/shlex.go

type TokenType

type runeTokenClass

type lexerState

type Token

// Equal reports whether tokens a, and b, are equal.
// Two tokens are equal if both their types and values are equal. A nil token can
// never be equal to another token.
func (a *Token) Equal(b *Token) bool {}

const spaceRunes

const escapingQuoteRunes

const nonEscapingQuoteRunes

const escapeRunes

const commentRunes

const unknownRuneClass

const spaceRuneClass

const escapingQuoteRuneClass

const nonEscapingQuoteRuneClass

const escapeRuneClass

const commentRuneClass

const eofRuneClass

const UnknownToken

const WordToken

const SpaceToken

const CommentToken

const startState

const inWordState

const escapingState

const escapingQuotedState

const quotingEscapingState

const quotingState

const commentState

type tokenClassifier

func (typeMap tokenClassifier) addRuneClass(runes string, tokenType runeTokenClass) {}

// newDefaultClassifier creates a new classifier for ASCII characters.
func newDefaultClassifier() tokenClassifier {}

// ClassifyRune classifiees a rune
func (t tokenClassifier) ClassifyRune(runeVal rune) runeTokenClass {}

type Lexer

// NewLexer creates a new lexer from an input stream.
func NewLexer(r io.Reader) *Lexer {}

// Next returns the next word, or an error. If there are no more words,
// the error will be io.EOF.
func (l *Lexer) Next() (string, error) {}

type Tokenizer

// NewTokenizer creates a new tokenizer from an input stream.
func NewTokenizer(r io.Reader) *Tokenizer {}

// scanStream scans the stream for the next token using the internal state machine.
// It will panic if it encounters a rune which it does not know how to handle.
func (t *Tokenizer) scanStream() (*Token, error) {}

// Next returns the next token in the stream.
func (t *Tokenizer) Next() (*Token, error) {}

// Split partitions a string into a slice of strings.
func Split(s string) ([]string, error) {}