/// <summary>
/// Merges the given message using data from the given tokenizer. In most cases, the next
/// token should be a "start object" token, but wrapper types and nullity can invalidate
/// that assumption. This is implemented as an LL(1) recursive descent parser over the stream
/// of tokens provided by the tokenizer. This token stream is assumed to be valid JSON, with the
/// tokenizer performing that validation - but not every token stream is valid "protobuf JSON".
/// </summary>
private void Merge(IMessage message, JsonTokenizer tokenizer)
{
if (tokenizer.ObjectDepth > settings.RecursionLimit)
{
throw InvalidProtocolBufferException.JsonRecursionLimitExceeded();
}
if (message.Descriptor.IsWellKnownType)
{
Action <JsonParser, IMessage, JsonTokenizer> handler;
if (WellKnownTypeHandlers.TryGetValue(message.Descriptor.FullName, out handler))
{
handler(this, message, tokenizer);
return;
}
// Well-known types with no special handling continue in the normal way.
}
var token = tokenizer.Next();
if (token.Type != JsonToken.TokenType.StartObject)
{
throw new InvalidProtocolBufferException("Expected an object");
}
var descriptor = message.Descriptor;
var jsonFieldMap = descriptor.Fields.ByJsonName();
while (true)
{
token = tokenizer.Next();
if (token.Type == JsonToken.TokenType.EndObject)
{
return;
}
if (token.Type != JsonToken.TokenType.Name)
{
throw new InvalidOperationException("Unexpected token type " + token.Type);
}
string name = token.StringValue;
FieldDescriptor field;
if (jsonFieldMap.TryGetValue(name, out field))
{
MergeField(message, field, tokenizer);
}
else
{
// TODO: Is this what we want to do? If not, we'll need to skip the value,
// which may be an object or array. (We might want to put code in the tokenizer
// to do that.)
throw new InvalidProtocolBufferException("Unknown field: " + name);
}
}
}