Lines Matching +full:0 +full:- +full:9

10  * See the COPYING.LIB file in the top-level directory.
15 #include "json-parser-int.h"
28 * begin-array = ws %x5B ws ; [ left square bracket
29 * begin-object = ws %x7B ws ; { left curly bracket
30 * end-array = ws %x5D ws ; ] right square bracket
31 * end-object = ws %x7D ws ; } right curly bracket
32 * name-separator = ws %x3A ws ; : colon
33 * value-separator = ws %x2C ws ; , comma
49 * [This lexer accepts [a-z]+, and leaves rejecting unknown literal
55 * decimal-point = %x2E ; .
56 * digit1-9 = %x31-39 ; 1-9
59 * frac = decimal-point 1*DIGIT
60 * int = zero / ( digit1-9 *DIGIT )
61 * minus = %x2D ; -
63 * zero = %x30 ; 0
66 * string = quotation-mark *char quotation-mark
80 * quotation-mark = %x22 ; "
81 * unescaped = %x20-21 / %x23-5B / %x5D-10FFFF
82 * [This lexer accepts any non-control character after escape, and
87 * - Extra escape sequence in strings:
88 * 0x27 (apostrophe) is recognized after escape, too
89 * - Single-quoted strings:
90 * Like double-quoted strings, except they're delimited by %x27
93 * - Interpolation, if enabled:
94 * The lexer accepts %[A-Za-z0-9]*, and leaves rejecting invalid
98 * - Input must be encoded in modified UTF-8.
99 * - Decoding and validating is left to the parser.
122 QEMU_BUILD_BUG_ON(JSON_ERROR != 0);
125 QEMU_BUILD_BUG_ON(JSON_MAX >= 0x80);
128 #define LOOKAHEAD 0x80
129 #define TERMINAL(state) [0 ... 0xFF] = ((state) | LOOKAHEAD)
138 * control character other than '\t', or impossible UTF-8
141 * may use the others to force the JSON parser into known-good
142 * state; see docs/interop/qmp-spec.rst.
144 [0 ... 0x1F] = IN_START | LOOKAHEAD,
145 [0x20 ... 0xFD] = IN_RECOVERY,
146 [0xFE ... 0xFF] = IN_START | LOOKAHEAD,
158 [0x20 ... 0xFD] = IN_DQ_STRING,
161 [0x20 ... 0xFD] = IN_DQ_STRING,
168 [0x20 ... 0xFD] = IN_SQ_STRING,
171 [0x20 ... 0xFD] = IN_SQ_STRING,
179 ['0' ... '9'] = JSON_ERROR,
186 ['0' ... '9'] = IN_EXP_DIGITS,
190 ['0' ... '9'] = IN_EXP_DIGITS,
194 ['-'] = IN_EXP_SIGN,
196 ['0' ... '9'] = IN_EXP_DIGITS,
201 ['0' ... '9'] = IN_MANTISSA_DIGITS,
207 ['0' ... '9'] = IN_MANTISSA_DIGITS,
213 ['0' ... '9'] = IN_DIGITS,
220 ['0'] = IN_ZERO,
221 ['1' ... '9'] = IN_DIGITS,
235 ['0' ... '9'] = IN_INTERP,
240 * - IN_START recognizes JSON tokens with our string extensions
241 * - IN_START_INTERP additionally recognizes interpolation.
246 ['0'] = IN_ZERO,
247 ['1' ... '9'] = IN_DIGITS,
248 ['-'] = IN_SIGN,
269 assert(lexer->state < ARRAY_SIZE(json_lexer)); in next_state()
270 next = json_lexer[lexer->state][(uint8_t)ch]; in next_state()
277 lexer->start_state = lexer->state = enable_interpolation in json_lexer_init()
279 lexer->token = g_string_sized_new(3); in json_lexer_init()
280 lexer->x = lexer->y = 0; in json_lexer_init()
288 lexer->x++; in json_lexer_feed_char()
290 lexer->x = 0; in json_lexer_feed_char()
291 lexer->y++; in json_lexer_feed_char()
294 while (flush ? lexer->state != lexer->start_state : !char_consumed) { in json_lexer_feed_char()
298 g_string_append_c(lexer->token, ch); in json_lexer_feed_char()
313 json_message_process_token(lexer, lexer->token, new_state, in json_lexer_feed_char()
314 lexer->x, lexer->y); in json_lexer_feed_char()
317 g_string_truncate(lexer->token, 0); in json_lexer_feed_char()
318 new_state = lexer->start_state; in json_lexer_feed_char()
321 json_message_process_token(lexer, lexer->token, JSON_ERROR, in json_lexer_feed_char()
322 lexer->x, lexer->y); in json_lexer_feed_char()
326 g_string_truncate(lexer->token, 0); in json_lexer_feed_char()
331 lexer->state = new_state; in json_lexer_feed_char()
337 if (lexer->token->len > MAX_TOKEN_SIZE) { in json_lexer_feed_char()
338 json_message_process_token(lexer, lexer->token, lexer->state, in json_lexer_feed_char()
339 lexer->x, lexer->y); in json_lexer_feed_char()
340 g_string_truncate(lexer->token, 0); in json_lexer_feed_char()
341 lexer->state = lexer->start_state; in json_lexer_feed_char()
349 for (i = 0; i < size; i++) { in json_lexer_feed()
356 json_lexer_feed_char(lexer, 0, true); in json_lexer_flush()
357 assert(lexer->state == lexer->start_state); in json_lexer_flush()
358 json_message_process_token(lexer, lexer->token, JSON_END_OF_INPUT, in json_lexer_flush()
359 lexer->x, lexer->y); in json_lexer_flush()
364 g_string_free(lexer->token, true); in json_lexer_destroy()