Tokenizer.cc
Go to the documentation of this file.
65 const SBuf::size_type tokenLen = buf_.findFirstOf(delimiters); // not found = npos => consume to end
79Parser::Tokenizer::prefix(SBuf &returnedToken, const CharacterSet &tokenChars, const SBuf::size_type limit)
100Parser::Tokenizer::prefix(const char *description, const CharacterSet &tokenChars, const SBuf::size_type limit)
117Parser::Tokenizer::suffix(SBuf &returnedToken, const CharacterSet &tokenChars, const SBuf::size_type limit)
238Parser::Tokenizer::int64(int64_t & result, int base, bool allowSign, const SBuf::size_type limit)
293 if (any < 0 || static_cast<uint64_t>(acc) > cutoff || (static_cast<uint64_t>(acc) == cutoff && c > cutlim))
#define TexcHere(msg)
legacy convenience macro; it is not difficult to type Here() now
Definition: TextException.h:63
optimized set of C chars, with quick membership test and merge support
Definition: CharacterSet.h:18
const char * name
optional set label for debugging (default: "anonymous")
Definition: CharacterSet.h:72
thrown by modern "incremental" parsers when they need more data
Definition: forward.h:18
Definition: Tokenizer.h:30
SBuf::size_type skipAllTrailing(const CharacterSet &discardables)
Definition: Tokenizer.cc:222
bool prefix(SBuf &returnedToken, const CharacterSet &tokenChars, SBuf::size_type limit=SBuf::npos)
Definition: Tokenizer.cc:79
bool suffix(SBuf &returnedToken, const CharacterSet &tokenChars, SBuf::size_type limit=SBuf::npos)
Definition: Tokenizer.cc:117
SBuf::size_type successTrailing(const SBuf::size_type n)
convenience method: consumes up to n last bytes and returns their count
Definition: Tokenizer.cc:55
bool skipOne(const CharacterSet &discardables)
Definition: Tokenizer.cc:161
bool token(SBuf &returnedToken, const CharacterSet &delimiters)
Definition: Tokenizer.cc:61
SBuf consumeTrailing(const SBuf::size_type n)
convenience method: consumes up to n last bytes and returns them
Definition: Tokenizer.cc:40
SBuf::size_type success(const SBuf::size_type n)
convenience method: consume()s up to n bytes and returns their count
Definition: Tokenizer.cc:33
SBuf::size_type skipAll(const CharacterSet &discardables)
Definition: Tokenizer.cc:137
bool int64(int64_t &result, int base=0, bool allowSign=true, SBuf::size_type limit=SBuf::npos)
Definition: Tokenizer.cc:238
SBuf consume(const SBuf::size_type n)
convenience method: consumes up to n bytes, counts, and returns them
Definition: Tokenizer.cc:22
bool skipOneTrailing(const CharacterSet &discardables)
Definition: Tokenizer.cc:211
void skipRequired(const char *description, const SBuf &tokenToSkip)
Definition: Tokenizer.cc:149
int64_t udec64(const char *description, SBuf::size_type limit=SBuf::npos)
int64() wrapper but limited to unsigned decimal integers (for now)
Definition: Tokenizer.cc:316
bool startsWith(const SBuf &S, const SBufCaseSensitive isCaseSensitive=caseSensitive) const
Definition: SBuf.cc:442
SBuf ToSBuf(Args &&... args)
slowly stream-prints all arguments into a freshly allocated SBuf
Definition: Stream.h:63