public void AddEntriesFrom_CodedInputStream() { // map will have string key and string value var keyTag = WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited); var valueTag = WireFormat.MakeTag(2, WireFormat.WireType.LengthDelimited); var memoryStream = new MemoryStream(); var output = new CodedOutputStream(memoryStream); output.WriteLength(20); // total of keyTag + key + valueTag + value output.WriteTag(keyTag); output.WriteString("the_key"); output.WriteTag(valueTag); output.WriteString("the_value"); output.Flush(); var field = new MapField <string, string>(); var mapCodec = new MapField <string, string> .Codec(FieldCodec.ForString(keyTag, ""), FieldCodec.ForString(valueTag, ""), 10); var input = new CodedInputStream(memoryStream.ToArray()); // test the legacy overload of AddEntriesFrom that takes a CodedInputStream field.AddEntriesFrom(input, mapCodec); CollectionAssert.AreEquivalent(new[] { "the_key" }, field.Keys); CollectionAssert.AreEquivalent(new[] { "the_value" }, field.Values); Assert.IsTrue(input.IsAtEnd); }
public void AddEntriesFrom_CodedInputStream_MissingKey() { // map will have string key and string value var keyTag = WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited); var valueTag = WireFormat.MakeTag(2, WireFormat.WireType.LengthDelimited); var memoryStream = new MemoryStream(); var output = new CodedOutputStream(memoryStream); output.WriteLength(11); // total of valueTag + value output.WriteTag(valueTag); output.WriteString("the_value"); output.Flush(); Console.WriteLine(BitConverter.ToString(memoryStream.ToArray())); var field = new MapField <string, string>(); var mapCodec = new MapField <string, string> .Codec(FieldCodec.ForString(keyTag, ""), FieldCodec.ForString(valueTag, ""), 10); var input = new CodedInputStream(memoryStream.ToArray()); field.AddEntriesFrom(input, mapCodec); CollectionAssert.AreEquivalent(new[] { "" }, field.Keys); CollectionAssert.AreEquivalent(new[] { "the_value" }, field.Values); Assert.IsTrue(input.IsAtEnd); }
static IEnumerable <TableRow> CreateTableRows(string descriptorFile) { // We need to load the descriptor file with an extension registry containing the cloud_event_type extension. var fieldCodec = FieldCodec.ForString(WireFormat.MakeTag(ExtensionField, WireFormat.WireType.LengthDelimited), ""); var eventTypeExtension = new Extension <MessageOptions, string>(ExtensionField, fieldCodec); var extensionRegistry = new ExtensionRegistry { eventTypeExtension }; var bytes = File.ReadAllBytes(descriptorFile); var descriptorSet = FileDescriptorSet.Parser.WithExtensionRegistry(extensionRegistry).ParseFrom(bytes); // For every file in the descriptor set, we check for messages with the cloud_event_type extension. // We gather all the foreach (var protoFile in descriptorSet.File) { // We currently assume there's only a single proto file per package that has cloud_event_type extensions, // conventionally events.proto. If that changes, we'll end up with multiple rows in the event // registry for a single package, one per file - at which point we'd need to keep a dictionary of // rows based on the package. var package = protoFile.Package; // Speculatively create a row in which to store any event types and data message names we find. var row = new TableRow(package); foreach (var message in protoFile.MessageType) { // We only care about messages that have the cloud_event_type extension. var eventType = message.Options?.GetExtension(eventTypeExtension); if (string.IsNullOrWhiteSpace(eventType)) { continue; } // Remember the event type specified in this message. row.EventTypes.Add(eventType); // Find the "data" field within the message, if there is one. var dataFieldType = message.Field.FirstOrDefault(f => f.Name == "data")?.TypeName; if (dataFieldType is string) { // Add the data message to the row. // For the sake of making the table simple, we'll remove any package names etc, // just leaving whatever comes after the final period. row.DataMessages.Add(dataFieldType.Split('.').Last()); } } // If this proto contained any messages with event types, yield the row in the iterator. // Otherwise, ignore it. if (row.EventTypes.Any()) { yield return(row); } } }
/// <summary> /// Loads the specified descriptor set, and extracts CloudEvent information from it. /// </summary> private static List <CloudEventDataInfo> LoadCloudEventDataInfo(string file) { // Note: while it's slightly annoying to hard-code this, it's less annoying than loading the // descriptor set without extensions, finding the extension number, and then reloading it. const int extensionField = 11716486; var fieldCodec = FieldCodec.ForString(WireFormat.MakeTag(extensionField, WireFormat.WireType.LengthDelimited), ""); var extension = new Extension <MessageOptions, string>(extensionField, fieldCodec); var extensionRegistry = new ExtensionRegistry { extension }; var descriptorSetBytes = File.ReadAllBytes(file); var descriptorSet = FileDescriptorSet.Parser.WithExtensionRegistry(extensionRegistry).ParseFrom(descriptorSetBytes); var typeToNamespace = (from protoFile in descriptorSet.File from message in protoFile.MessageType select(protoFile, message)) .ToDictionary(pair => $"{pair.protoFile.Package}.{pair.message.Name}", pair => pair.protoFile.Options.CsharpNamespace); // ConcurrentDictionary has a convenient GetOrAdd method. ConcurrentDictionary <string, CloudEventDataInfo> infoByFqn = new ConcurrentDictionary <string, CloudEventDataInfo>(); // Find every message in every file in the descriptor set, and check for the cloud_event_type extension. // If it has one, check for a field called "data" and take the type of that, then create a CloudEventDataInfo // that knows the C# namespace of the message, the message name, and the CloudEvent type. foreach (var protoFile in descriptorSet.File) { var package = protoFile.Package; foreach (var message in protoFile.MessageType) { var eventType = message.Options?.GetExtension(extension); if (string.IsNullOrWhiteSpace(eventType)) { continue; } // We expect each CloudEvent message to have a data field, which is a message. var dataFieldType = message.Field.Single(f => f.Name == "data").TypeName; // Convert the type to a fully-qualified name var fqn = dataFieldType.StartsWith(".") ? dataFieldType.Substring(1) : $"{package}.{dataFieldType}"; var messageName = fqn.Split('.').Last(); CloudEventDataInfo info = infoByFqn.GetOrAdd(fqn, _ => new CloudEventDataInfo(messageName, typeToNamespace[fqn])); info.CloudEventTypes.Add(eventType); } } return(infoByFqn.Values.ToList()); }
private async Task <FileWeightsReply> GetFileWeightsInternal(GetFileWeightsRequest request) { var reply = new FileWeightsReply(); if (string.IsNullOrWhiteSpace(request.RootName)) { reply.Error = Error.NoSuchEntity; return(reply); } var path = PathSep + request.RootName; var folder = await _service.All().Where(k => k.Name == request.RootName && k.Path == path && k.Type == (int)FileType.Folder) .FirstOrDefaultAsync(); if (folder == null) { reply.Error = Error.NoSuchEntity; return(reply); } var pathStart = folder.Path + PathSep; var weights = await _service.All().Where(p => p.Type == (int)FileType.Normal && p.Path.StartsWith(pathStart)).Select(f => new { f.Name, f.Weight }) .ToArrayAsync(); foreach (var w in weights) { reply.Weights.Add(w.Name.ToString(), w.Weight ?? 0); } var codec = new MapField <string, int> .Codec(FieldCodec.ForString(0), FieldCodec.ForInt32(1), 0); var version = await _contentService.Add(_cachesGroup, fs => { using (var s = new CodedOutputStream(fs)) { reply.WriteTo(s); } return(Task.FromResult(true)); }, "bin"); await _cache.SetStringAsync(FileWeightsVersionCacheKey(request.RootName), version); return(reply); }
public void WriteTo_String() { uint tag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); var field = new RepeatedField <string> { "Foo", "", "Bar" }; var stream = new MemoryStream(); var output = new CodedOutputStream(stream); field.WriteTo(output, FieldCodec.ForString(tag)); output.Flush(); stream.Position = 0; var input = new CodedInputStream(stream); input.AssertNextTag(tag); Assert.AreEqual("Foo", input.ReadString()); input.AssertNextTag(tag); Assert.AreEqual("", input.ReadString()); input.AssertNextTag(tag); Assert.AreEqual("Bar", input.ReadString()); Assert.IsTrue(input.IsAtEnd); }
public void AddEntriesFrom_String() { uint tag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); var stream = new MemoryStream(); var output = new CodedOutputStream(stream); output.WriteTag(tag); output.WriteString("Foo"); output.WriteTag(tag); output.WriteString(""); output.WriteTag(tag); output.WriteString("Bar"); output.Flush(); stream.Position = 0; var field = new RepeatedField <string>(); var input = new CodedInputStream(stream); input.AssertNextTag(tag); field.AddEntriesFrom(input, FieldCodec.ForString(tag)); CollectionAssert.AreEqual(new[] { "Foo", "", "Bar" }, field); Assert.IsTrue(input.IsAtEnd); }
/// <inheritdoc/> public override FieldCodec <string> CreateFieldCodec(int fieldNumber) { return(FieldCodec.ForString(WireFormat.MakeTag(fieldNumber, WireType))); }
static Location() { // Note: this type is marked as 'beforefieldinit'. while (true) { IL_7D: uint arg_61_0 = 3312851954u; while (true) { uint num; switch ((num = (arg_61_0 ^ 3421084927u)) % 4u) { case 0u: SourceCodeInfo.Types.Location._repeated_span_codec = FieldCodec.ForInt32(18u); SourceCodeInfo.Types.Location._repeated_leadingDetachedComments_codec = FieldCodec.ForString(50u); arg_61_0 = (num * 3524145418u ^ 1786628465u); continue; case 1u: SourceCodeInfo.Types.Location._repeated_path_codec = FieldCodec.ForInt32(10u); arg_61_0 = (num * 3025576672u ^ 2268289631u); continue; case 3u: goto IL_7D; } return; } } }