private static void LoadModel(KerasLayersJson layersJson, List <NNLayerBase> Layers) { foreach (var layer in layersJson.layers) { switch (layer.class_name) { case "InputLayer": var Input = new InputLayer(); Input.LoadConfig(layer.config); Layers.Add(Input); break; case "Activation": if (layer.config.activation == "relu") { Layers.Add(new ReLU()); } if (layer.config.activation == "tanh") { Layers.Add(new Tanh()); } break; case "Conv2D": var Conv2DLayer = new Conv2D(); Conv2DLayer.LoadConfig(layer.config); Layers.Add(Conv2DLayer); if (layer.config.activation == "relu") { Layers.Add(new ReLU()); } if (layer.config.activation == "tanh") { Layers.Add(new Tanh()); } break; case "LeakyReLU": var LeakyReLULayer = new LeakyReLU(); LeakyReLULayer.LoadConfig(layer.config); Layers.Add(LeakyReLULayer); break; case "BatchNormalization": Layers.Add(new BatchNormalization()); break; case "UpSampling2D": var UpSampling2DLayer = new UpSampling2D(); UpSampling2DLayer.LoadConfig(layer.config); Layers.Add(UpSampling2DLayer); break; case "Concatenate": var thislayer = new Concatenate(); string alternativeLayerName = layer.inbound_nodes[0][1][0] as string; thislayer.AlternativeInputId = Layers.FindIndex(ly => string.Compare(ly.Name, alternativeLayerName) == 0); Layers.Add(thislayer); break; case "Add": var addlayer = new Add(); int alterinput = layer.inbound_nodes[0].FindIndex(node => string.Compare(node[0] as string, Layers[Layers.Count - 1].Name) != 0); string addalternativeLayerName = layer.inbound_nodes[0][alterinput][0] as string; addlayer.AlternativeInputId = Layers.FindIndex(ly => string.Compare(ly.Name, addalternativeLayerName) == 0); Layers.Add(addlayer); break; } Layers[Layers.Count - 1].Name = layer.name; } }
public static void LoadConfig(this UpSampling2D layer, KerasLayerConfigJson config) { layer.Size = new Vector2Int(config.size[0], config.size[1]); }