public static ModelProto infer_shapes(ModelProto model, bool check_type = false, bool strict_mode = false) { var model_str = model.ToByteArray(); var inferred_model_str = C.infer_shapes(model_str, check_type, strict_mode); return(onnx.LoadModelFromString(inferred_model_str)); }
public static ModelProto ConvertVersion(ModelProto model, int target_version) { var model_str = model.ToByteArray(); var converted_model_str = C.convert_version(model_str, target_version); return(onnx.LoadModelFromString(converted_model_str)); }
static void AssertModelBytesEqualToEmbeddedExpected(ModelProto model, string expectedName) { var actualBytes = model.ToByteArray(); //model.WriteToFile(expectedName); var expectedBytes = AssemblyResourceLoader.GetBytes(expectedName); CollectionAssert.AreEqual(expectedBytes, actualBytes); }
public static void CheckModel(ModelProto model, bool full_check = false) { // If the protobuf is larger than 2GB, // remind users should use the model path to check var protobuf_string = model.ToByteArray(); if (protobuf_string.Length > MAXIMUM_PROTOBUF) { throw new Exception("This protobuf of onnx model is too large (>2GB). Call check_model with model path instead."); } C.check_model(protobuf_string); var m = model; if (full_check) { ShapeInference.infer_shapes(m, check_type: true); } }
private static byte[] _serialize(ModelProto proto) { return(proto.ToByteArray()); }