Project homepage Mailing List  Warmcat.com  API Docs  Github Mirror 
{"schema":"libjg2-1", "vpath":"/git/", "avatar":"/git/avatar/", "alang":"en-US,en;q\u003d0.5", "gen_ut":1571200288, "reponame":"libwebsockets", "desc":"libwebsockets lightweight C networking library", "owner": { "name": "Andy Green", "email": "andy@warmcat.com", "md5": "c50933ca2aa61e0fe2c43d46bb6b59cb" },"url":"https://libwebsockets.org/repo/libwebsockets", "f":3, "items": [ {"schema":"libjg2-1", "cid":"6b1a09200616f647589b7a523d36bc47", "oid":{ "oid": "868eea1c2e0191ba74e557ea4ed278047ca8f2be", "alias": [ "refs/heads/v3.1-stable"]},"blobname": "include/libwebsockets/lws-tokenize.h", "blob": "/*\n * libwebsockets - small server side websockets and web server implementation\n *\n * Copyright (C) 2010-2018 Andy Green \u003candy@warmcat.com\u003e\n *\n * This library is free software; you can redistribute it and/or\n * modify it under the terms of the GNU Lesser General Public\n * License as published by the Free Software Foundation:\n * version 2.1 of the License.\n *\n * This library is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n * Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public\n * License along with this library; if not, write to the Free Software\n * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,\n * MA 02110-1301 USA\n *\n * included from libwebsockets.h\n */\n\n/* Do not treat - as a terminal character, so \u0022my-token\u0022 is one token */\n#define LWS_TOKENIZE_F_MINUS_NONTERM\t(1 \u003c\u003c 0)\n/* Separately report aggregate colon-delimited tokens */\n#define LWS_TOKENIZE_F_AGG_COLON\t(1 \u003c\u003c 1)\n/* Enforce sequencing for a simple token , token , token ... list */\n#define LWS_TOKENIZE_F_COMMA_SEP_LIST\t(1 \u003c\u003c 2)\n/* Allow more characters in the tokens and less delimiters... default is\n * only alphanumeric + underscore in tokens */\n#define LWS_TOKENIZE_F_RFC7230_DELIMS\t(1 \u003c\u003c 3)\n/* Do not treat . as a terminal character, so \u0022warmcat.com\u0022 is one token */\n#define LWS_TOKENIZE_F_DOT_NONTERM\t(1 \u003c\u003c 4)\n/* If something starts looking like a float, like 1.2, force to be string token.\n * This lets you receive dotted-quads like 192.168.0.1 as string tokens, and\n * avoids illegal float format detection like 1.myserver.com */\n#define LWS_TOKENIZE_F_NO_FLOATS\t(1 \u003c\u003c 5)\n\ntypedef enum {\n\n\tLWS_TOKZE_ERRS\t\t\t\u003d 5, /* the number of errors defined */\n\n\tLWS_TOKZE_ERR_BROKEN_UTF8\t\u003d -5,\t/* malformed or partial utf8 */\n\tLWS_TOKZE_ERR_UNTERM_STRING\t\u003d -4,\t/* ended while we were in \u0022\u0022 */\n\tLWS_TOKZE_ERR_MALFORMED_FLOAT\t\u003d -3,\t/* like 0..1 or 0.1.1 */\n\tLWS_TOKZE_ERR_NUM_ON_LHS\t\u003d -2,\t/* like 123\u003d or 0.1\u003d */\n\tLWS_TOKZE_ERR_COMMA_LIST\t\u003d -1,\t/* like \u0022,tok\u0022, or, \u0022tok,,\u0022 */\n\n\tLWS_TOKZE_ENDED \u003d 0,\t\t/* no more content */\n\n\t/* Note: results have ordinal 1+, EOT is 0 and errors are \u003c 0 */\n\n\tLWS_TOKZE_DELIMITER,\t\t/* a delimiter appeared */\n\tLWS_TOKZE_TOKEN,\t\t/* a token appeared */\n\tLWS_TOKZE_INTEGER,\t\t/* an integer appeared */\n\tLWS_TOKZE_FLOAT,\t\t/* a float appeared */\n\tLWS_TOKZE_TOKEN_NAME_EQUALS,\t/* token [whitespace] \u003d */\n\tLWS_TOKZE_TOKEN_NAME_COLON,\t/* token [whitespace] : (only with\n\t\t\t\t\t LWS_TOKENIZE_F_AGG_COLON flag) */\n\tLWS_TOKZE_QUOTED_STRING,\t/* \u0022*\u0022, where * may have any char */\n\n} lws_tokenize_elem;\n\n/*\n * helper enums to allow caller to enforce legal delimiter sequencing, eg\n * disallow \u0022token,,token\u0022, \u0022token,\u0022, and \u0022,token\u0022\n */\n\nenum lws_tokenize_delimiter_tracking {\n\tLWSTZ_DT_NEED_FIRST_CONTENT,\n\tLWSTZ_DT_NEED_DELIM,\n\tLWSTZ_DT_NEED_NEXT_CONTENT,\n};\n\nstruct lws_tokenize {\n\tconst char *start; /**\u003c set to the start of the string to tokenize */\n\tconst char *token; /**\u003c the start of an identified token or delimiter */\n\tint len;\t/**\u003c set to the length of the string to tokenize */\n\tint token_len;\t/**\u003c the length of the identied token or delimiter */\n\n\tint flags;\t/**\u003c optional LWS_TOKENIZE_F_ flags, or 0 */\n\tint delim;\n};\n\n/**\n * lws_tokenize() - breaks down a string into tokens and delimiters in-place\n *\n * \u005cparam ts: the lws_tokenize struct to init\n * \u005cparam start: the string to tokenize\n * \u005cparam flags: LWS_TOKENIZE_F_ option flags\n *\n * This initializes the tokenize struct to point to the given string, and\n * sets the length to 2GiB - 1 (so there must be a terminating NUL)... you can\n * override this requirement by setting ts.len yourself before using it.\n *\n * .delim is also initialized to LWSTZ_DT_NEED_FIRST_CONTENT.\n */\n\nLWS_VISIBLE LWS_EXTERN void\nlws_tokenize_init(struct lws_tokenize *ts, const char *start, int flags);\n\n/**\n * lws_tokenize() - breaks down a string into tokens and delimiters in-place\n *\n * \u005cparam ts: the lws_tokenize struct with information and state on what to do\n *\n * The \u005cp ts struct should have its start, len and flags members initialized to\n * reflect the string to be tokenized and any options.\n *\n * Then `lws_tokenize()` may be called repeatedly on the struct, returning one\n * of `lws_tokenize_elem` each time, and with the struct's `token` and\n * `token_len` members set to describe the content of the delimiter or token\n * payload each time.\n *\n * There are no allocations during the process.\n *\n * returns lws_tokenize_elem that was identified (LWS_TOKZE_ENDED means reached\n * the end of the string).\n */\n\nLWS_VISIBLE LWS_EXTERN lws_tokenize_elem\nlws_tokenize(struct lws_tokenize *ts);\n\n/**\n * lws_tokenize_cstr() - copy token string to NUL-terminated buffer\n *\n * \u005cparam ts: pointer to lws_tokenize struct to operate on\n * \u005cparam str: destination buffer\n * \u005cpparam max: bytes in destination buffer\n *\n * returns 0 if OK or nonzero if the string + NUL won't fit.\n */\n\nLWS_VISIBLE LWS_EXTERN int\nlws_tokenize_cstr(struct lws_tokenize *ts, char *str, int max);\n","s":{"c":1571200288,"u": 407}} ],"g": 1620,"chitpc": 0,"ehitpc": 0, "indexed":0 }