qemu/qobject/json-streamer.c
Paolo Bonzini 95385fe9ac qjson: store tokens in a GQueue
Even though we still have the "streamer" concept, the tokens can now
be deleted as they are read.  While doing so convert from QList to
GQueue, since the next step will make tokens not a QObject and we
will have to do the conversion anyway.

Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
Message-Id: <1448300659-23559-4-git-send-email-pbonzini@redhat.com>
Signed-off-by: Markus Armbruster <armbru@redhat.com>
Reviewed-by: Eric Blake <eblake@redhat.com>
2015-11-26 10:07:07 +01:00

126 lines
3.4 KiB
C

/*
* JSON streaming support
*
* Copyright IBM, Corp. 2009
*
* Authors:
* Anthony Liguori <aliguori@us.ibm.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "qapi/qmp/qlist.h"
#include "qapi/qmp/qstring.h"
#include "qapi/qmp/qint.h"
#include "qapi/qmp/qdict.h"
#include "qemu-common.h"
#include "qapi/qmp/json-lexer.h"
#include "qapi/qmp/json-streamer.h"
#define MAX_TOKEN_SIZE (64ULL << 20)
#define MAX_NESTING (1ULL << 10)
static void json_message_free_tokens(JSONMessageParser *parser)
{
if (parser->tokens) {
g_queue_free(parser->tokens);
parser->tokens = NULL;
}
}
static void json_message_process_token(JSONLexer *lexer, GString *input,
JSONTokenType type, int x, int y)
{
JSONMessageParser *parser = container_of(lexer, JSONMessageParser, lexer);
QDict *dict;
switch (type) {
case JSON_LCURLY:
parser->brace_count++;
break;
case JSON_RCURLY:
parser->brace_count--;
break;
case JSON_LSQUARE:
parser->bracket_count++;
break;
case JSON_RSQUARE:
parser->bracket_count--;
break;
default:
break;
}
dict = qdict_new();
qdict_put(dict, "type", qint_from_int(type));
qdict_put(dict, "token", qstring_from_str(input->str));
qdict_put(dict, "x", qint_from_int(x));
qdict_put(dict, "y", qint_from_int(y));
parser->token_size += input->len;
g_queue_push_tail(parser->tokens, dict);
if (type == JSON_ERROR) {
goto out_emit_bad;
} else if (parser->brace_count < 0 ||
parser->bracket_count < 0 ||
(parser->brace_count == 0 &&
parser->bracket_count == 0)) {
goto out_emit;
} else if (parser->token_size > MAX_TOKEN_SIZE ||
parser->bracket_count + parser->brace_count > MAX_NESTING) {
/* Security consideration, we limit total memory allocated per object
* and the maximum recursion depth that a message can force.
*/
goto out_emit_bad;
}
return;
out_emit_bad:
/*
* Clear out token list and tell the parser to emit an error
* indication by passing it a NULL list
*/
json_message_free_tokens(parser);
out_emit:
/* send current list of tokens to parser and reset tokenizer */
parser->brace_count = 0;
parser->bracket_count = 0;
/* parser->emit takes ownership of parser->tokens. */
parser->emit(parser, parser->tokens);
parser->tokens = g_queue_new();
parser->token_size = 0;
}
void json_message_parser_init(JSONMessageParser *parser,
void (*func)(JSONMessageParser *, GQueue *))
{
parser->emit = func;
parser->brace_count = 0;
parser->bracket_count = 0;
parser->tokens = g_queue_new();
parser->token_size = 0;
json_lexer_init(&parser->lexer, json_message_process_token);
}
int json_message_parser_feed(JSONMessageParser *parser,
const char *buffer, size_t size)
{
return json_lexer_feed(&parser->lexer, buffer, size);
}
int json_message_parser_flush(JSONMessageParser *parser)
{
return json_lexer_flush(&parser->lexer);
}
void json_message_parser_destroy(JSONMessageParser *parser)
{
json_lexer_destroy(&parser->lexer);
json_message_free_tokens(parser);
}