Home
last modified time | relevance | path

Searched refs:tokenize (Results 1 – 23 of 23) sorted by relevance

/freebsd/tools/tools/vt/keymaps/
H A Dconvert-keymap.pl89 sub tokenize { # split on white space and parentheses (but not within token) subroutine
117 my @KEYTOKEN = tokenize($_);
/freebsd/sbin/devfs/
H A Dextern.h52 void tokenize(const char *, int *, char ***);
H A Ddevfs.c183 tokenize(const char *line, int *acp, char ***avp) in tokenize() function
H A Drule.c297 tokenize(str, &ac, &av); in rulespec_instr()
/freebsd/contrib/llvm-project/clang/lib/Tooling/Syntax/
H A DTokenBufferTokenManager.cpp19 auto It = ExtraTokens.try_emplace(FID, tokenize(FID, SM, LangOpts)); in lexBuffer()
H A DTokens.cpp575 std::vector<syntax::Token> syntax::tokenize(const FileRange &FR, in tokenize() function in syntax
608 std::vector<syntax::Token> syntax::tokenize(FileID FID, const SourceManager &SM, in tokenize() function in syntax
610 return tokenize(syntax::FileRange(FID, 0, SM.getFileIDSize(FID)), SM, LO); in tokenize()
883 File.SpelledTokens = tokenize(FID, SM, LangOpts); in buildSpelledTokens()
/freebsd/contrib/llvm-project/lld/ELF/
H A DScriptLexer.h24 void tokenize(MemoryBufferRef mb);
H A DScriptLexer.cpp88 ScriptLexer::ScriptLexer(MemoryBufferRef mb) { tokenize(mb); } in ScriptLexer()
103 void ScriptLexer::tokenize(MemoryBufferRef mb) { in tokenize() function in ScriptLexer
H A DScriptParser.cpp410 tokenize(*mb); in readInclude()
/freebsd/contrib/llvm-project/lld/COFF/
H A DDriver.h61 llvm::opt::InputArgList parse(StringRef s) { return parse(tokenize(s)); } in parse()
72 std::vector<const char *> tokenize(StringRef s);
H A DDriverUtils.cpp1006 std::vector<const char *> v = tokenize(*s); in addLINK()
1010 std::vector<const char *> v = tokenize(*s); in addLINK()
1015 std::vector<const char *> ArgParser::tokenize(StringRef s) { in tokenize() function in lld::coff::ArgParser
/freebsd/contrib/llvm-project/clang/include/clang/Tooling/Syntax/
H A DTokens.h405 std::vector<syntax::Token> tokenize(FileID FID, const SourceManager &SM,
412 tokenize(const FileRange &FR, const SourceManager &SM, const LangOptions &LO);
/freebsd/tools/tools/notescheck/
H A Dnotescheck.py217 def tokenize(line): function
248 words = tokenize(line)
/freebsd/contrib/ntp/sntp/libopts/
H A DMakefile.am63 time.c tokenize.c usage.c \
H A DMakefile.in484 streqvcmp.c text_mmap.c time.c tokenize.c usage.c version.c
/freebsd/libexec/nuageinit/
H A Dyaml.lua146 exports.tokenize = function (str) function
582 return Parser:new(exports.tokenize(str)):parse()
/freebsd/contrib/tcsh/
H A Dsrc.desc60 tw.parse.c: All the listing and completion. Tries to tokenize the line
/freebsd/contrib/ntp/ntpdc/
H A Dntpdc.c83 static void tokenize (const char *, char **, int *);
1182 tokenize(cmdline, tokens, &ntok); in docmd()
1291 tokenize( in tokenize() function
/freebsd/contrib/ntp/ntpq/
H A Dntpq.c196 static void tokenize (const char *, char **, int *);
1645 tokenize(cmdline, tokens, &ntok); in docmd()
1750 * tokenize - turn a command line into tokens in docmd()
1759 tokenize(
1763 tokenize( tokenize() function
/freebsd/sys/dev/ocs_fc/
H A Docs_hw_queues.c1195 tokenize(const char *s, tok_t *tok) in tokenize() function
1616 ((s = tokenize(s, &tokarray.tokens[tokarray.inuse_count]))) != NULL; ) { in ocs_hw_qtop_parse()
/freebsd/contrib/ntp/
H A DCommitLog5750 sntp/libopts/tokenize.c@1.15 +2 -19
25964 sntp/libopts/tokenize.c@1.14 +1 -1
[all...]
H A DCommitLog-4.1.04623 * ntpdc/ntpdc.c (tokenize): Define cp as const char *, remove
H A DChangeLog1364 * [Sec 2630] buffer overrun in ntpq tokenize().