unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputTensor  = InputTensor.__MarshalAlloc();
        @ref->OutputTensor = OutputTensor.__MarshalAlloc();

        var dimensionCount = Strides.Length;

        if (WindowSize.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("WindowSize must have the same length as Strides.");
        }
        if (StartPadding.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("StartPadding must have the same length as Strides.");
        }
        if (EndPadding.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("EndPadding must have the same length as Strides.");
        }
        @ref->DimensionCount = dimensionCount;

        @ref->Strides        = new(UnsafeUtilities.AllocWithData(Strides));
        @ref->WindowSize     = new(UnsafeUtilities.AllocWithData(WindowSize));
        @ref->StartPadding   = new(UnsafeUtilities.AllocWithData(StartPadding));
        @ref->EndPadding     = new(UnsafeUtilities.AllocWithData(EndPadding));
        @ref->IncludePadding = IncludePadding;

        return(new(@ref));
    }
    unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputGradientTensor  = InputGradientTensor.__MarshalAlloc();
        @ref->OutputGradientTensor = OutputGradientTensor.__MarshalAlloc();
        @ref->InterpolationMode    = InterpolationMode;

        var dimensionCount = Scales.Length;

        if (InputPixelOffsets.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("InputPixelOffsets must have the same length as Scales.");
        }
        if (OutputPixelOffsets.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("OutputPixelOffsets must have the same length as Scales.");
        }
        @ref->DimensionCount = dimensionCount;

        @ref->Scales             = new(UnsafeUtilities.AllocWithData(Scales));
        @ref->InputPixelOffsets  = new(UnsafeUtilities.AllocWithData(InputPixelOffsets));
        @ref->OutputPixelOffsets = new(UnsafeUtilities.AllocWithData(OutputPixelOffsets));

        return(new(@ref));
    }
    unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputTensor             = InputTensor.__MarshalAlloc();
        @ref->WeightTensor            = WeightTensor.__MarshalAlloc();
        @ref->RecurrenceTensor        = RecurrenceTensor.__MarshalAlloc();
        @ref->BiasTensor              = (BiasTensor != null) ? BiasTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->HiddenInitializerTensor = (HiddenInitializerTensor != null) ? HiddenInitializerTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->SequenceLengthsTensor   = (SequenceLengthsTensor != null) ? SequenceLengthsTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->OutputSequenceTensor    = (OutputSequenceTensor != null) ? OutputSequenceTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->OutputSingleTensor      = (OutputSingleTensor != null) ? OutputSingleTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->ActivationDescCount     = Activations.Length;

        @ref->Activations = IntPtr.Zero;
        if (Activations.Length != 0)
        {
            var activationDescsPtr = UnsafeUtilities.Alloc <OperatorDescription.__Native>(Activations.Length);
            for (int i = 0; i < Activations.Length; i++)
            {
                Activations[i].__MarshalTo(ref activationDescsPtr[i]);
            }
            @ref->Activations = new(activationDescsPtr);
        }

        @ref->Direction = Direction;

        return(new(@ref));
    }
Exemple #4
0
        /// <summary>
        /// Copies the contents of this span into another.  The destination
        /// must be at least as big as the source, and may be bigger.
        /// </summary>
        /// <param name="destination">The span to copy items into.</param>
        public bool TryCopyTo(Span <T> destination)
        {
            // There are some benefits of making local copies. See https://github.com/dotnet/coreclr/issues/5556
            var dest = destination;
            var src  = this;

            if (src.Length > dest.Length)
            {
                return(false);
            }

            if (default(T) != null && MemoryUtils.IsPrimitiveValueType <T>())
            {
                UnsafeUtilities.CopyBlock(src.Object, src.Offset, dest.Object, dest.Offset,
                                          src.Length * UnsafeUtilities.SizeOf <T>());
            }
            else
            {
                for (int i = 0; i < src.Length; i++)
                {
                    // We don't check bounds here as we are surely within them
                    T value = UnsafeUtilities.Get <T>(src.Object, src.Offset, (UIntPtr)i);
                    UnsafeUtilities.Set(dest.Object, dest.Offset, (UIntPtr)i, value);
                }
            }

            return(true);
        }
    unsafe void IOperatorDescriptionMarshal.__MarshalFree(ref IntPtr pDesc)
    {
        var @ref = (__Native *)pDesc;

        InputTensor.__MarshalFree(ref @ref->InputTensor);

        if (InputZeroPointTensor != null)
        {
            InputZeroPointTensor.Value.__MarshalFree(ref @ref->InputZeroPointTensor);
        }

        FilterTensor.__MarshalFree(ref @ref->FilterTensor);

        if (FilterZeroPointTensor != null)
        {
            FilterZeroPointTensor.Value.__MarshalFree(ref @ref->FilterZeroPointTensor);
        }

        OutputTensor.__MarshalFree(ref @ref->OutputTensor);
        UnsafeUtilities.Free(@ref->Strides);
        UnsafeUtilities.Free(@ref->Dilations);
        UnsafeUtilities.Free(@ref->StartPadding);
        UnsafeUtilities.Free(@ref->EndPadding);

        UnsafeUtilities.Free(@ref);
    }
        public bool Contains(T value)
        {
            bool result = UnsafeUtilities.Contains(_buffer, 0, _length, value);

            Dispose();
            return(result);
        }
Exemple #7
0
        internal unsafe void __MarshalTo(ref __Native @ref)
        {
            @ref.NumEntries = Elements?.Length ?? 0;
            if (@ref.NumEntries > 0)
            {
                var nativeElements = (StreamOutputElement.__Native *)UnsafeUtilities.Alloc <StreamOutputElement.__Native>(@ref.NumEntries);
                for (int i = 0; i < @ref.NumEntries; i++)
                {
                    Elements[i].__MarshalTo(ref nativeElements[i]);
                }

                @ref.pSODeclaration = nativeElements;
            }

            @ref.NumStrides = Strides?.Length ?? 0;
            if (@ref.NumStrides > 0)
            {
                var nativeStrides = UnsafeUtilities.Alloc <int>(@ref.NumStrides);
                fixed(int *src = &Strides[0])
                {
                    MemoryHelpers.CopyMemory(nativeStrides, (IntPtr)src, @ref.NumStrides * sizeof(int));
                }

                @ref.pBufferStrides = nativeStrides;
            }

            @ref.RasterizedStream = RasterizedStream;
        }
Exemple #8
0
        /// <summary>
        /// Determines whether two spans are structurally (byte-wise) equal by comparing the elements by using memcmp
        /// </summary>
        /// <param name="first">A span, of type T to compare to second.</param>
        /// <param name="second">A span, of type U to compare to first.</param>
        public static bool BlockEquals <[Primitive] T, [Primitive] U>(this ReadOnlySpan <T> first, ReadOnlySpan <U> second)
            where T : struct
            where U : struct
        {
            var bytesCount = first.Length * UnsafeUtilities.SizeOf <T>();

            if (bytesCount != second.Length * UnsafeUtilities.SizeOf <U>())
            {
                return(false);
            }

            // perf: it is cheaper to compare 'n' long elements than 'n*8' bytes (in a loop)
            if ((bytesCount & 0x00000007) == 0) // fast % sizeof(long)
            {
                return(SequenceEqual(Cast <T, long>(first), Cast <U, long>(second)));
            }
            if ((bytesCount & 0x00000003) == 0) // fast % sizeof(int)
            {
                return(SequenceEqual(Cast <T, int>(first), Cast <U, int>(second)));
            }
            if ((bytesCount & 0x00000001) == 0) // fast % sizeof(short)
            {
                return(SequenceEqual(Cast <T, short>(first), Cast <U, short>(second)));
            }

            return(SequenceEqual(Cast <T, byte>(first), Cast <U, byte>(second)));
        }
Exemple #9
0
        public static ReadOnlySpan <U> Cast <[Primitive] T, [Primitive] U>(this ReadOnlySpan <T> slice)
            where T : struct
            where U : struct
        {
            int countOfU;

            /// This comparison is a jittime constant
            if (UnsafeUtilities.SizeOf <T>() > UnsafeUtilities.SizeOf <U>())
            {
                IntPtr count = UnsafeUtilities.CountOfU <T, U>((uint)slice.Length);
                unsafe
                {
                    // We can't compare IntPtrs, so have to resort to pointer comparison
                    bool fits = (byte *)count <= (byte *)int.MaxValue;
                    Contract.Requires(fits);
                    countOfU = (int)count.ToPointer();
                }
            }
            else
            {
                countOfU = slice.Length * UnsafeUtilities.SizeOf <T>() / UnsafeUtilities.SizeOf <U>();
            }

            object  obj    = slice.Object;
            UIntPtr offset = slice.Offset;

            if (countOfU == 0)
            {
                obj    = null;
                offset = (UIntPtr)0;
            }

            return(new ReadOnlySpan <U>(obj, offset, countOfU));
        }
    unsafe void IOperatorDescriptionMarshal.__MarshalFree(ref IntPtr pDesc)
    {
        var @ref = (__Native *)pDesc;

        if (InputTensor != null)
        {
            InputTensor.Value.__MarshalFree(ref @ref->InputTensor);
        }

        InputGradientTensor.__MarshalFree(ref @ref->InputGradientTensor);
        RoiTensor.__MarshalFree(ref @ref->RoiTensor);
        BatchIndicesTensor.__MarshalFree(ref @ref->BatchIndicesTensor);

        if (OutputGradientTensor != null)
        {
            OutputGradientTensor.Value.__MarshalFree(ref @ref->OutputGradientTensor);
        }

        if (OutputROIGradientTensor != null)
        {
            OutputROIGradientTensor.Value.__MarshalFree(ref @ref->OutputROIGradientTensor);
        }

        UnsafeUtilities.Free(@ref);
    }
Exemple #11
0
    unsafe void IOperatorDescriptionMarshal.__MarshalFree(ref IntPtr pDesc)
    {
        var @ref = (__Native *)pDesc;

        InputTensor.__MarshalFree(ref @ref->InputTensor);
        FilterTensor.__MarshalFree(ref @ref->FilterTensor);

        if (BiasTensor != null)
        {
            BiasTensor.Value.__MarshalFree(ref @ref->BiasTensor);
        }

        OutputTensor.__MarshalFree(ref @ref->OutputTensor);
        UnsafeUtilities.Free(@ref->Strides);
        UnsafeUtilities.Free(@ref->Dilations);
        UnsafeUtilities.Free(@ref->StartPadding);
        UnsafeUtilities.Free(@ref->EndPadding);
        UnsafeUtilities.Free(@ref->OutputPadding);

        if (FusedActivation != null)
        {
            FusedActivation.Value.__MarshalFree(ref @ref->FusedActivation);
        }

        UnsafeUtilities.Free(@ref);
    }
    unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputTensor           = InputTensor.__MarshalAlloc();
        @ref->InputZeroPointTensor  = (InputZeroPointTensor != null) ? InputZeroPointTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->FilterTensor          = FilterTensor.__MarshalAlloc();
        @ref->FilterZeroPointTensor = (FilterZeroPointTensor != null) ? FilterZeroPointTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->OutputTensor          = OutputTensor.__MarshalAlloc();

        var dimensionCount = Strides.Length;

        if (Dilations.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("Dilations must have the same length as Strides.");
        }
        if (StartPadding.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("StartPadding must have the same length as Strides.");
        }
        if (EndPadding.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("EndPadding must have the same length as Strides.");
        }
        @ref->DimensionCount = dimensionCount;

        @ref->Strides      = new(UnsafeUtilities.AllocWithData(Strides));
        @ref->Dilations    = new(UnsafeUtilities.AllocWithData(Dilations));
        @ref->StartPadding = new(UnsafeUtilities.AllocWithData(StartPadding));
        @ref->EndPadding   = new(UnsafeUtilities.AllocWithData(EndPadding));
        @ref->GroupCount   = GroupCount;

        return(new(@ref));
    }
        internal unsafe void __MarshalTo(ref __Native @ref)
        {
            if (Categories != null && Categories.Length > 0)
            {
                @ref.NumCategories = Categories.Length;
                @ref.PCategoryList = UnsafeUtilities.AllocToPointer(Categories);
            }
            else
            {
                @ref.NumCategories = 0;
                @ref.PCategoryList = IntPtr.Zero;
            }

            if (Severities != null && Severities.Length > 0)
            {
                @ref.NumSeverities = Severities.Length;
                @ref.PSeverityList = UnsafeUtilities.AllocToPointer(Severities);
            }
            else
            {
                @ref.NumSeverities = 0;
                @ref.PSeverityList = IntPtr.Zero;
            }

            if (Ids != null && Ids.Length > 0)
            {
                @ref.NumIDs  = Ids.Length;
                @ref.PIDList = UnsafeUtilities.AllocToPointer(Ids);
            }
            else
            {
                @ref.NumIDs  = 0;
                @ref.PIDList = IntPtr.Zero;
            }
        }
Exemple #14
0
    unsafe void IOperatorDescriptionMarshal.__MarshalFree(ref IntPtr pDesc)
    {
        var @ref = (__Native *)pDesc;

        ATensor.__MarshalFree(ref @ref->ATensor);
        AScaleTensor.__MarshalFree(ref @ref->AScaleTensor);

        if (AZeroPointTensor != null)
        {
            AZeroPointTensor.Value.__MarshalFree(ref @ref->AZeroPointTensor);
        }

        BTensor.__MarshalFree(ref @ref->BTensor);
        BScaleTensor.__MarshalFree(ref @ref->BScaleTensor);

        if (BZeroPointTensor != null)
        {
            BZeroPointTensor.Value.__MarshalFree(ref @ref->BZeroPointTensor);
        }

        OutputScaleTensor.__MarshalFree(ref @ref->OutputScaleTensor);

        if (OutputZeroPointTensor != null)
        {
            OutputZeroPointTensor.Value.__MarshalFree(ref @ref->OutputZeroPointTensor);
        }

        OutputTensor.__MarshalFree(ref @ref->OutputTensor);

        UnsafeUtilities.Free(@ref);
    }
Exemple #15
0
    unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputTensor  = InputTensor.__MarshalAlloc();
        @ref->OutputTensor = OutputTensor.__MarshalAlloc();

        var dimensionCount = InputWindowOffsets.Length;

        if (InputWindowSizes.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("InputWindowSizes must have the same length as InputWindowOffsets.");
        }
        if (InputWindowStrides.Length != dimensionCount)
        {
            throw new IndexOutOfRangeException("InputWindowStrides must have the same length as InputWindowOffsets.");
        }
        @ref->DimensionCount = dimensionCount;

        @ref->InputWindowOffsets = new(UnsafeUtilities.AllocWithData(InputWindowOffsets));
        @ref->InputWindowSizes   = new(UnsafeUtilities.AllocWithData(InputWindowSizes));
        @ref->InputWindowStrides = new(UnsafeUtilities.AllocWithData(InputWindowStrides));

        return(new(@ref));
    }
Exemple #16
0
    unsafe void IOperatorDescriptionMarshal.__MarshalFree(ref IntPtr pDesc)
    {
        var @ref = (__Native *)pDesc;

        InputTensor.__MarshalFree(ref @ref->InputTensor);

        if (ScaleTensor != null)
        {
            ScaleTensor.Value.__MarshalFree(ref @ref->ScaleTensor);
        }

        if (BiasTensor != null)
        {
            BiasTensor.Value.__MarshalFree(ref @ref->BiasTensor);
        }

        OutputTensor.__MarshalFree(ref @ref->OutputTensor);

        if (FusedActivation != null)
        {
            FusedActivation.Value.__MarshalFree(ref @ref->FusedActivation);
        }

        UnsafeUtilities.Free(@ref);
    }
    internal unsafe void __MarshalTo(ref __Native @ref)
    {
        @ref.FontFace      = FontFace == null ? IntPtr.Zero : FontFace.NativePointer;
        @ref.FontEmSize    = FontEmSize;
        @ref.GlyphCount    = -1;
        @ref.GlyphIndices  = IntPtr.Zero;
        @ref.GlyphAdvances = IntPtr.Zero;
        @ref.GlyphOffsets  = IntPtr.Zero;

        if (Indices != null)
        {
            @ref.GlyphCount   = Indices.Length;
            @ref.GlyphIndices = Marshal.AllocHGlobal(Indices.Length * sizeof(ushort));
            if (Indices.Length > 0)
            {
                UnsafeUtilities.Write(@ref.GlyphIndices, Indices, 0, Indices.Length);
            }
        }

        if (Advances != null)
        {
            if (@ref.GlyphCount >= 0 && @ref.GlyphCount != Advances.Length)
            {
                throw new InvalidOperationException(
                          $"Invalid length for array Advances [{Advances.Length}] and Indices [{@ref.GlyphCount}]. Indices, Advances and Offsets array must have same size - or may be null"
                          );
            }

            @ref.GlyphCount    = Advances.Length;
            @ref.GlyphAdvances = Marshal.AllocHGlobal(Advances.Length * sizeof(float));
            if (Advances.Length > 0)
            {
                UnsafeUtilities.Write(@ref.GlyphAdvances, Advances, 0, Advances.Length);
            }
        }

        if (Offsets != null)
        {
            if (@ref.GlyphCount >= 0 && @ref.GlyphCount != Offsets.Length)
            {
                throw new InvalidOperationException($"Invalid length for array Offsets [{Offsets.Length}]. Indices, Advances and Offsets array must have same size (Current is [{@ref.GlyphCount}]- or may be null");
            }

            @ref.GlyphCount   = this.Offsets.Length;
            @ref.GlyphOffsets = Marshal.AllocHGlobal(this.Offsets.Length * sizeof(GlyphOffset));
            if (this.Offsets.Length > 0)
            {
                UnsafeUtilities.Write(@ref.GlyphOffsets, Offsets, 0, this.Offsets.Length);
            }
        }

        if (@ref.GlyphCount < 0)
        {
            @ref.GlyphCount = 0;
        }

        @ref.IsSideways = IsSideways;
        @ref.BidiLevel  = BidiLevel;
    }
Exemple #18
0
 internal unsafe void __MarshalTo(ref __Native @ref)
 {
     @ref.NumParameters     = Parameters?.Length ?? 0;
     @ref.PParameters       = UnsafeUtilities.AllocToPointer(Parameters);
     @ref.NumStaticSamplers = StaticSamplers?.Length ?? 0;
     @ref.PStaticSamplers   = UnsafeUtilities.AllocToPointer(StaticSamplers);
     @ref.Flags             = Flags;
 }
Exemple #19
0
 internal unsafe void __MarshalTo(ref __Native @ref)
 {
     @ref.ByteStride       = ByteStride;
     @ref.NumArgumentDescs = IndirectArguments?.Length ?? 0;
     if (@ref.NumArgumentDescs > 0)
     {
         @ref.pArgumentDescs = UnsafeUtilities.Alloc <IndirectArgumentDescription>(@ref.NumArgumentDescs);
         fixed(void *indirectArgumentsPtr = &IndirectArguments ![0])
 internal unsafe void __MarshalFrom(ref __Native @ref)
 {
     Data = new byte[@ref.Length];
     if (@ref.Length > 0)
     {
         UnsafeUtilities.Read(@ref.Bytecode, Data);
     }
 }
Exemple #21
0
    internal unsafe void __MarshalFree(ref IntPtr pDesc)
    {
        __Native * @ref = (__Native *)pDesc;

        ((ITensorDescriptionMarshal)Description).__MarshalFree(ref @ref->Description);

        UnsafeUtilities.Free(@ref);
    }
    internal unsafe IntPtr __MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->Description = ((IOperatorDescriptionMarshal)Description).__MarshalAlloc();

        return(new(@ref));
    }
    unsafe void IOperatorDescriptionMarshal.__MarshalFree(ref IntPtr pDesc)
    {
        var @ref = (__Native *)pDesc;

        OutputTensor.__MarshalFree(ref @ref->OutputTensor);

        UnsafeUtilities.Free(@ref);
    }
    unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputTensor  = (InputTensor != null) ? InputTensor.Value.__MarshalAlloc() : IntPtr.Zero;
        @ref->OutputTensor = (OutputTensor != null) ? OutputTensor.Value.__MarshalAlloc() : IntPtr.Zero;

        return(new(@ref));
    }
Exemple #25
0
    unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputTensor  = InputTensor.__MarshalAlloc();
        @ref->OutputTensor = OutputTensor.__MarshalAlloc();

        return(new(@ref));
    }
Exemple #26
0
    unsafe IntPtr IGraphNodeDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->Operator = Operator.NativePointer;
        @ref->Name     = string.IsNullOrEmpty(Name) ? IntPtr.Zero : Marshal.StringToHGlobalAnsi(Name);

        return(new(@ref));
    }
 internal unsafe void __MarshalTo(ref __Native @ref)
 {
     @ref.NumCategories = Categories?.Length ?? 0;
     @ref.PCategoryList = UnsafeUtilities.AllocToPointer(Categories);
     @ref.NumSeverities = Severities?.Length ?? 0;
     @ref.PSeverityList = UnsafeUtilities.AllocToPointer(Severities);
     @ref.NumIDs        = Ids?.Length ?? 0;
     @ref.PIDList       = UnsafeUtilities.AllocToPointer(Ids);
 }
    /// <summary>
    /// Writes an array of values to the current stream, and advances the current position within this stream by the number of bytes written.
    /// </summary>
    /// <remarks>
    /// In order to provide faster read/write, this operation doesn't check stream bound.
    /// A client must carefully not read/write above the size of this datastream.
    /// </remarks>
    /// <typeparam name = "T">The type of the values to be written to the stream.</typeparam>
    /// <param name = "data">An array of values to be written to the stream.</param>
    /// <param name = "offset">The zero-based offset in data at which to begin copying values to the current stream.</param>
    /// <param name = "count">
    /// The number of values to be written to the current stream. If this is zero, all of the contents <paramref name="data" /> will be written.
    /// </param>
    /// <exception cref="NotSupportedException">This stream does not support writing.</exception>
    public void WriteRange <T>(T[] data, int offset, int count) where T : unmanaged
    {
        if (!CanWrite)
        {
            throw new NotSupportedException();
        }

        _position = (byte *)UnsafeUtilities.Write((IntPtr)(_buffer + _position), data, offset, count) - _buffer;
    }
Exemple #29
0
    unsafe IntPtr IBindingDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->Buffer      = Buffer.NativePointer;
        @ref->Offset      = Offset;
        @ref->SizeInBytes = SizeInBytes;

        return(new(@ref));
    }
Exemple #30
0
    unsafe IntPtr IOperatorDescriptionMarshal.__MarshalAlloc()
    {
        __Native * @ref = UnsafeUtilities.Alloc <__Native>();

        @ref->InputTensor  = InputTensor.__MarshalAlloc();
        @ref->OutputTensor = OutputTensor.__MarshalAlloc();
        @ref->ScaleBias    = (ScaleBias != null) ? new(UnsafeUtilities.AllocWithData(ScaleBias.Value)) : IntPtr.Zero;

        return(new(@ref));
    }