/// <summary> /// Initializes the decoder. Always call this before trying to use this class. /// </summary> /// <param name="framebuffer">Framebuffer to be created. Decoded frames will be written here.</param> /// <param name="demux">Demux to read the video data from</param> public override void Init(out Texture2D framebuffer, Demux demux, LoadOptions loadOptions = null) { // can we decode this stream? if (demux == null) { throw new System.ArgumentException("Missing Demux to get video frames from"); } if (info == null || info.width <= 0 || info.height <= 0 || info.bitsPerPixel <= 0) { throw new ArgumentException("Can't initialize stream decoder without proper VideoStreamInfo"); } if (info.bitsPerPixel != 16 && info.bitsPerPixel != 24 && info.bitsPerPixel != 32) { throw new ArgumentException("Only RGB555, RGB24 and ARGB32 pixel formats are supported"); } // create framebuffer and initialize vars this.framebuffer = new Texture2D(info.width, info.height, info.bitsPerPixel == 32 ? TextureFormat.ARGB32 : TextureFormat.RGB24, false); framebuffer = this.framebuffer; rgbBuffer = new Color32[info.width * info.height]; this.demux = demux; this._lastFrameDecodeTime = 0; this._lastFrameSizeBytes = 0; this._totalDecodeTime = 0; this._totalSizeBytes = 0; this.watch = new System.Diagnostics.Stopwatch(); }
/// <summary> /// Initializes the decoder for playing back given video stream. It returns a framebuffer /// which is updated with decoded frame pixel data. /// </summary> /// <param name="framebuffer">Framebuffer.</param> /// <param name="stream">Stream.</param> /// <param name="loadOptions">Load options.</param> public override void Init(out Texture2D framebuffer, Demux demux, LoadOptions loadOptions = null) { // can we decode this stream? if (demux == null) { throw new System.ArgumentException("Missing Demux to get video frames from"); } // create framebuffer and initialize vars. Texture size and format are not important here, // becase they'll be overwritten when a frame is decoded. this.framebuffer = new Texture2D(4, 4, TextureFormat.RGB24, false); framebuffer = this.framebuffer; this.demux = demux; this._lastFrameDecodeTime = 0; this._totalDecodeTime = 0; this.watch = new System.Diagnostics.Stopwatch(); }
public static async Task <List <(Texture2D, int)> > Decode(byte[] bytes) { // create the decoder if (!CreateDecoder(bytes, out var decoder, out var info)) { return(null); } Debug.Log($"[WebPDecoderWrapper] Loaded animation: {info.frame_count}, {info.canvas_width}/{info.canvas_height}"); // decode every frame of the WebP file with threads var decodedBytes = await WebPDecodeJob.StartJob(decoder, info.frame_count); Debug.Log($"[WebPDecoderWrapper] Raw bytes decode complete"); var textures = CreateTexturesFromBytes(decodedBytes, info.canvas_width, info.canvas_height); // release the decoder Demux.WebPAnimDecoderReset(decoder); Demux.WebPAnimDecoderDelete(decoder); return(textures); }
/// <summary> /// Initializes the decoder for playing back an audio stream. It returns an audio clip /// which is either streaming or preloaded. Unity will use callback methods here to /// get the actual audio data. /// </summary> /// <param name="audioClip">Audio clip.</param> /// <param name="demux">Demux.</param> /// <param name="loadOptions">Load options.</param> public override void Init(out AudioClip audioClip, Demux demux, LoadOptions loadOptions = null) { if (loadOptions == null) { loadOptions = LoadOptions.Default; } if (demux == null) { throw new ArgumentException("Missing Demux to get audio samples for decoding"); } this.demux = demux; this._totalDecodeTime = 0; this.watch = new System.Diagnostics.Stopwatch(); // it'd be safer to do inside lock, but Unity tends to crash on that #if UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 this.audioClip = AudioClip.Create("_movie_audio_", streamInfo.sampleCount, streamInfo.channels, streamInfo.sampleRate, loadOptions._3DSound, !loadOptions.preloadAudio, OnAudioRead, OnAudioSeek); #else // starting from Unity 5 the 3D audio parameter is deprecated. // It has moved into AudioSource spatialBlend. this.audioClip = AudioClip.Create("_movie_audio_", streamInfo.sampleCount, streamInfo.channels, streamInfo.sampleRate, !loadOptions.preloadAudio, OnAudioRead, OnAudioSeek); #endif audioClip = this.audioClip; }
void DropHalfTheFramesRemux() { // In this example we're going one level deeper in the API and work directly // with Demux class. We could use MoviePlayerUtil.Load too, but for remuxing // we don't need Decoders to be instantiated, because we're just copying encoded // frame bytes around. // // Since we're not using decoders, we're not referencing anything from Unity API. // Therefore it's possible to run it in separate thread. RunInBackgroundOrNot(delegate() { // Instantiate a demux for an input stream based on stream type. Stream instream = File.OpenRead(infile); Demux demux = Demux.forSource(instream); demux.Init(instream); // Instantiate a remux for an output stream. Here we have to explicity // instantiate the remux we want, in this case, AviRemux, and set its // properties. Since we're not doing much here, we can use the same // videoStreamInfo and audioStreamInfo for remux as demux. For the video // however we clone the stream info, because we want to change it. Since // we're going to drop every other frame, we also need to lower the // video framerate. Stream outstream = File.OpenWrite(outfile); Remux remux = new AviRemux(); var remuxVideoStreamInfo = new VideoStreamInfo(demux.videoStreamInfo); remuxVideoStreamInfo.framerate /= 2; remux.Init(outstream, remuxVideoStreamInfo, demux.audioStreamInfo); // Just sum buffers and variables needed later byte[] videoBuffer, audioBuffer; int videoBytesRead, audioBytesRead; // Loop until we've processed all the video frames. If we wanted to run this code // in main Unity thread without blocking, then we could wrap it all in a coroutine // and do "yield return 1" inside the loop. do { // Here we're using sequential access to video (and audio) stream. The same could // be achieved with random access, but then only demuxes that can seek in a file // can be used (no streaming from network or webcam). videoBytesRead = demux.ReadVideoFrame(out videoBuffer); if (videoBytesRead > 0) { // Read the exact number of audio samples that are to be played during this frame int samplesPerVideoFrame = (int)(demux.audioStreamInfo.sampleRate / demux.videoStreamInfo.framerate); audioBytesRead = demux.ReadAudioSamples(out audioBuffer, samplesPerVideoFrame); // Only write every second video frame, but all the audio samples. The total stream // lengths will still be the same, because we've set the framerate for remuxed stream // to half of the original. if (demux.VideoPosition % 2 == 1) { remux.WriteNextVideoFrame(videoBuffer, videoBytesRead); } remux.WriteNextAudioSamples(audioBuffer, audioBytesRead); } } while(videoBytesRead > 0); // Close the remux and demux. While it's possible to leave demux just hanging there unclosed and // possibly introducing a memory leak, we have to Close the remux for the output to be playable. // The reason is that AviDemux needs to write all unwritten index chunks and update the avi header // after all frames have been written. remux.Shutdown(); demux.Shutdown(); }); }
/// <summary> /// Initializes the decoder. Always call this before trying to use this class. /// </summary> /// <param name="framebuffer">Framebuffer to be created. Decoded frames will be written here.</param> /// <param name="demux">Demux to read the video data from</param> public override void Init(out Texture2D framebuffer, Demux demux, LoadOptions loadOptions = null) { // can we decode this stream? if (demux == null) { throw new System.ArgumentException ("Missing Demux to get video frames from"); } if (info == null || info.width <= 0 || info.height <= 0 || info.bitsPerPixel <= 0) { throw new ArgumentException ("Can't initialize stream decoder without proper VideoStreamInfo"); } if (info.bitsPerPixel != 16 && info.bitsPerPixel != 24 && info.bitsPerPixel != 32) { throw new ArgumentException ("Only RGB555, RGB24 and ARGB32 pixel formats are supported"); } // create framebuffer and initialize vars this.framebuffer = new Texture2D (info.width, info.height, info.bitsPerPixel == 32 ? TextureFormat.ARGB32 : TextureFormat.RGB24, false); framebuffer = this.framebuffer; rgbBuffer = new Color32[info.width * info.height]; this.demux = demux; this._lastFrameDecodeTime = 0; this._lastFrameSizeBytes = 0; this._totalDecodeTime = 0; this._totalSizeBytes = 0; this.watch = new System.Diagnostics.Stopwatch (); }
/// <summary> /// Initializes the decoder for playing back given video stream. It returns a framebuffer /// which is updated with decoded frame pixel data. /// </summary> /// <param name="framebuffer">Framebuffer.</param> /// <param name="stream">Stream.</param> /// <param name="loadOptions">Load options.</param> public override void Init(out Texture2D framebuffer, Demux demux, LoadOptions loadOptions = null) { // can we decode this stream? if (demux == null) { throw new System.ArgumentException ("Missing Demux to get video frames from"); } // create framebuffer and initialize vars. Texture size and format are not important here, // becase they'll be overwritten when a frame is decoded. this.framebuffer = new Texture2D (4, 4, TextureFormat.RGB24, false); framebuffer = this.framebuffer; this.demux = demux; this._lastFrameDecodeTime = 0; this._totalDecodeTime = 0; this.watch = new System.Diagnostics.Stopwatch (); }
static void Main(string[] args) { WireSet ws = new WireSet(9); WireSet ws2 = new WireSet(9); WireSet ws3 = new WireSet(9); OrGate or = new OrGate(); XorGate xor = new XorGate(); MultiBitAndGate mbag3 = new MultiBitAndGate(3); MultiBitAndGate mbag4 = new MultiBitAndGate(4); MultiBitAndGate mbag5 = new MultiBitAndGate(5); MultiBitAndGate mbag6 = new MultiBitAndGate(6); MultiBitAndGate mbag7 = new MultiBitAndGate(7); MultiBitAndGate mbag8 = new MultiBitAndGate(8); MultiBitOrGate mbog3 = new MultiBitOrGate(3); MultiBitOrGate mbog4 = new MultiBitOrGate(4); MultiBitOrGate mbog5 = new MultiBitOrGate(5); MultiBitOrGate mbog6 = new MultiBitOrGate(6); MultiBitOrGate mbog7 = new MultiBitOrGate(7); MultiBitOrGate mbog8 = new MultiBitOrGate(8); MuxGate mg = new MuxGate(); Demux dmg = new Demux(); BitwiseOrGate bwog0 = new BitwiseOrGate(0); BitwiseOrGate bwog1 = new BitwiseOrGate(1); BitwiseOrGate bwog2 = new BitwiseOrGate(2); BitwiseOrGate bwog3 = new BitwiseOrGate(3); BitwiseOrGate bwog4 = new BitwiseOrGate(4); BitwiseOrGate bwog5 = new BitwiseOrGate(5); BitwiseOrGate bwog6 = new BitwiseOrGate(6); BitwiseOrGate bwog7 = new BitwiseOrGate(7); BitwiseAndGate bwag2 = new BitwiseAndGate(2); BitwiseAndGate bwag3 = new BitwiseAndGate(3); BitwiseAndGate bwag4 = new BitwiseAndGate(4); BitwiseNotGate bwng2 = new BitwiseNotGate(2); BitwiseNotGate bwng3 = new BitwiseNotGate(3); BitwiseNotGate bwng4 = new BitwiseNotGate(4); BitwiseDemux bwdm2 = new BitwiseDemux(2); BitwiseDemux bwdm3 = new BitwiseDemux(3); BitwiseDemux bwdm4 = new BitwiseDemux(4); BitwiseMux bwmx2 = new BitwiseMux(2); BitwiseMux bwmx3 = new BitwiseMux(3); BitwiseMux bwmx4 = new BitwiseMux(4); BitwiseMultiwayMux bwmwm = new BitwiseMultiwayMux(3, 3); BitwiseMultiwayDemux bwmwdm = new BitwiseMultiwayDemux(3, 3); HalfAdder ha = new HalfAdder(); FullAdder fa = new FullAdder(); MultiBitAdder mba = new MultiBitAdder(4); ALU alu = new ALU(4); System.Console.WriteLine(or.TestGate().ToString()); System.Console.WriteLine(xor.TestGate().ToString()); System.Console.WriteLine(mbag3.TestGate().ToString()); System.Console.WriteLine(mbag4.TestGate().ToString()); System.Console.WriteLine(mbag5.TestGate().ToString()); System.Console.WriteLine(mbag6.TestGate().ToString()); System.Console.WriteLine(mbag7.TestGate().ToString()); System.Console.WriteLine(mbag8.TestGate().ToString()); System.Console.WriteLine(mbog3.TestGate().ToString()); System.Console.WriteLine(mbog4.TestGate().ToString()); System.Console.WriteLine(mbog5.TestGate().ToString()); System.Console.WriteLine(mbog6.TestGate().ToString()); System.Console.WriteLine(mbog7.TestGate().ToString()); System.Console.WriteLine(mbog8.TestGate().ToString()); System.Console.WriteLine(mg.TestGate().ToString()); System.Console.WriteLine(dmg.TestGate().ToString()); System.Console.WriteLine(bwag2.TestGate().ToString()); System.Console.WriteLine(bwag3.TestGate().ToString()); System.Console.WriteLine(bwag4.TestGate().ToString()); System.Console.WriteLine(bwog0.TestGate().ToString()); System.Console.WriteLine(bwog1.TestGate().ToString()); System.Console.WriteLine(bwog2.TestGate().ToString()); System.Console.WriteLine(bwog3.TestGate().ToString()); System.Console.WriteLine(bwog4.TestGate().ToString()); System.Console.WriteLine(bwog5.TestGate().ToString()); System.Console.WriteLine(bwog6.TestGate().ToString()); System.Console.WriteLine(bwog7.TestGate().ToString()); ws.Set2sComplement(-5); System.Console.WriteLine(ws.Get2sComplement().ToString()); int test = 0; int test2 = 0; for (int i = 1; i < 50; i++) { ws2.SetValue(i); if (ws2.GetValue() != i) { test = 10; } } for (int i = -34; i < 50; i++) { ws3.Set2sComplement(i); if (ws3.Get2sComplement() != i) { test2 = 10; } } System.Console.WriteLine(test); System.Console.WriteLine(test2); System.Console.WriteLine(bwng2.TestGate().ToString()); System.Console.WriteLine(bwng3.TestGate().ToString()); System.Console.WriteLine(bwng4.TestGate().ToString()); System.Console.WriteLine(bwdm2.TestGate().ToString()); System.Console.WriteLine(bwdm3.TestGate().ToString()); System.Console.WriteLine(bwdm4.TestGate().ToString()); System.Console.WriteLine(bwmx2.TestGate().ToString()); System.Console.WriteLine(bwmx3.TestGate().ToString()); System.Console.WriteLine(bwmx4.TestGate().ToString()); System.Console.WriteLine(bwmwm.TestGate().ToString()); System.Console.WriteLine(bwmwdm.TestGate().ToString()); System.Console.WriteLine(ha.TestGate().ToString()); System.Console.WriteLine(fa.TestGate().ToString()); System.Console.WriteLine(mba.TestGate().ToString()); System.Console.WriteLine(alu.TestGate().ToString()); }
unsafe List <(Texture2D, int)> LoadAnimation(string loadPath) { List <ValueTuple <Texture2D, int> > ret = new List <ValueTuple <Texture2D, int> >(); TextAsset textasset = Resources.Load <TextAsset>(loadPath); byte[] bytes = textasset.bytes; WebPAnimDecoderOptions option = new WebPAnimDecoderOptions { use_threads = 1, color_mode = WEBP_CSP_MODE.MODE_RGBA }; Demux.WebPAnimDecoderOptionsInit(ref option); fixed(byte *p = bytes) { IntPtr ptr = (IntPtr)p; WebPData webpdata = new WebPData { bytes = ptr, size = new UIntPtr((uint)bytes.Length) }; IntPtr dec = Demux.WebPAnimDecoderNew(ref webpdata, ref option); WebPAnimInfo anim_info = new WebPAnimInfo(); Demux.WebPAnimDecoderGetInfo(dec, ref anim_info); Debug.LogWarning($"{anim_info.frame_count} {anim_info.canvas_width}/{anim_info.canvas_height}"); int size = anim_info.canvas_width * 4 * anim_info.canvas_height; IntPtr unmanagedPointer = new IntPtr(); int timestamp = 0; for (int i = 0; i < anim_info.frame_count; ++i) { int result = Demux.WebPAnimDecoderGetNext(dec, ref unmanagedPointer, ref timestamp); int lWidth = anim_info.canvas_width; int lHeight = anim_info.canvas_height; bool lMipmaps = false; bool lLinear = false; Texture2D texture = new Texture2D(lWidth, lHeight, TextureFormat.RGBA32, lMipmaps, lLinear); texture.LoadRawTextureData(unmanagedPointer, size); {// Flip updown. // ref: https://github.com/netpyoung/unity.webp/issues/18 // ref: https://github.com/webmproject/libwebp/blob/master/src/demux/anim_decode.c#L309 Color[] pixels = texture.GetPixels(); Color[] pixelsFlipped = new Color[pixels.Length]; for (int y = 0; y < anim_info.canvas_height; y++) { Array.Copy(pixels, y * anim_info.canvas_width, pixelsFlipped, (anim_info.canvas_height - y - 1) * anim_info.canvas_width, anim_info.canvas_width); } texture.SetPixels(pixelsFlipped); } texture.Apply(); ret.Add((texture, timestamp)); } Demux.WebPAnimDecoderReset(dec); Demux.WebPAnimDecoderDelete(dec); } return(ret); }
unsafe List <(Texture2D, int)> LoadAnimation(string loadPath) { List <ValueTuple <Texture2D, int> > ret = new List <ValueTuple <Texture2D, int> >(); TextAsset textasset = Resources.Load <TextAsset>(loadPath); byte[] bytes = textasset.bytes; WebPAnimDecoderOptions option = new WebPAnimDecoderOptions { use_threads = 1, color_mode = WEBP_CSP_MODE.MODE_RGBA }; Demux.WebPAnimDecoderOptionsInit(ref option); fixed(byte *p = bytes) { IntPtr ptr = (IntPtr)p; WebPData webpdata = new WebPData { bytes = ptr, size = new UIntPtr((uint)bytes.Length) }; IntPtr dec = Demux.WebPAnimDecoderNew(ref webpdata, ref option); WebPAnimInfo anim_info = new WebPAnimInfo(); Demux.WebPAnimDecoderGetInfo(dec, ref anim_info); Debug.LogWarning($"{anim_info.frame_count} {anim_info.canvas_width}/{anim_info.canvas_height}"); int size = anim_info.canvas_width * 4 * anim_info.canvas_height; WebPAnimDecoder decoder = (WebPAnimDecoder)Marshal.PtrToStructure(dec, typeof(WebPAnimDecoder)); decoder.config_.options.flip = 1; decoder.config_.options.no_fancy_upsampling = 1; Marshal.StructureToPtr(decoder, dec, true); IntPtr unmanagedPointer = new IntPtr(); int timestamp = 0; for (int i = 0; i < anim_info.frame_count; ++i) { int result = Demux.WebPAnimDecoderGetNext(dec, ref unmanagedPointer, ref timestamp); if (result != 1) { Debug.LogError("WTF"); } int lWidth = anim_info.canvas_width; int lHeight = anim_info.canvas_height; bool lMipmaps = false; bool lLinear = false; Texture2D texture = new Texture2D(lWidth, lHeight, TextureFormat.RGBA32, lMipmaps, lLinear); texture.LoadRawTextureData(unmanagedPointer, size); texture.Apply(updateMipmaps: false, makeNoLongerReadable: true); ret.Add((texture, timestamp)); } Demux.WebPAnimDecoderReset(dec); Demux.WebPAnimDecoderDelete(dec); } return(ret); }
private unsafe List <(Texture2D, int)> LoadAnimation2(string loadPath) { List <ValueTuple <Texture2D, int> > ret = new List <ValueTuple <Texture2D, int> >(); TextAsset textasset = Resources.Load <TextAsset>(loadPath); byte[] bytes = textasset.bytes; var config = new WebPDecoderConfig(); if (Decode.WebPInitDecoderConfig(ref config) == 0) { throw new Exception("WebPInitDecoderConfig failed. Wrong version?"); } var iter = new WebPIterator(); IntPtr webpDataPtr = Marshal.AllocHGlobal(sizeof(WebPData)); IntPtr configPtr = Marshal.AllocHGlobal(Marshal.SizeOf(config)); IntPtr iterPtr = Marshal.AllocHGlobal(Marshal.SizeOf(iter)); try { fixed(byte *p = bytes) { IntPtr ptr = (IntPtr)p; WebPData webpdata = new WebPData { bytes = ptr, size = new UIntPtr((uint)bytes.Length) }; Marshal.StructureToPtr(webpdata, webpDataPtr, false); Marshal.StructureToPtr(config, configPtr, false); Marshal.StructureToPtr(iter, iterPtr, false); IntPtr webPDemuxer = Demux.WebPDemuxInternal(webpDataPtr, 0, (IntPtr)0, Demux.WEBP_DEMUX_ABI_VERSION); VP8StatusCode result = Decode.WebPGetFeatures(webpdata.bytes, webpdata.size, ref config.input); if (result != VP8StatusCode.VP8_STATUS_OK) { throw new Exception(string.Format("Failed WebPGetFeatures with error {0}.", result.ToString())); } var height = config.input.height; var width = config.input.height; config.options.bypass_filtering = 0; config.options.use_threads = 1; config.options.no_fancy_upsampling = 0; config.options.use_cropping = 0; config.options.use_scaling = 1; config.options.scaled_width = width; config.options.scaled_height = height; config.options.flip = 1; config.options.dithering_strength = 0; config.output.colorspace = WEBP_CSP_MODE.MODE_RGBA; config.output.width = width; config.output.height = height; //byte[] bbb = new byte[width * height]; //fixed (byte* ppp = bbb) //{ // config.output.u.RGBA.rgba = (IntPtr)ppp; //} //config.output.u.RGBA.stride = width * 4; //config.output.u.RGBA.size = (UIntPtr)(width * height); //config.output.is_external_memory = 1; //config.output.is_external_memory = 1; int success = Demux.WebPDemuxGetFrame(webPDemuxer, 1, ref iter); if (success != 1) { return(ret); } int timestamp = 0; int size = width * height * 4; do { WebPData frame = iter.fragment; VP8StatusCode status = Decode.WebPDecode(frame.bytes, frame.size, ref config); if (status != VP8StatusCode.VP8_STATUS_OK) { Debug.LogError(status); break; } var texture = new Texture2D(width, height, TextureFormat.RGBA32, mipChain: false, linear: false); texture.LoadRawTextureData(config.output.u.RGBA.rgba, size); texture.Apply(updateMipmaps: false, makeNoLongerReadable: true); timestamp += iter.duration; ret.Add((texture, timestamp)); }while (Demux.WebPDemuxNextFrame(ref iter) == 1); Demux.WebPDemuxDelete(webPDemuxer); Demux.WebPDemuxReleaseIterator(ref iter); } } finally { Marshal.FreeHGlobal(webpDataPtr); Marshal.FreeHGlobal(configPtr); Marshal.FreeHGlobal(iterPtr); } return(ret); }
unsafe List <(Texture2D, int)> LoadAnimation3(string loadPath) { List <ValueTuple <Texture2D, int> > ret = new List <ValueTuple <Texture2D, int> >(); TextAsset textasset = Resources.Load <TextAsset>(loadPath); byte[] bytes = textasset.bytes; WebPAnimDecoderOptions option = new WebPAnimDecoderOptions { use_threads = 1, color_mode = WEBP_CSP_MODE.MODE_RGBA }; var config = new WebPDecoderConfig(); if (Decode.WebPInitDecoderConfig(ref config) == 0) { throw new Exception("WebPInitDecoderConfig failed. Wrong version?"); } Demux.WebPAnimDecoderOptionsInit(ref option); fixed(byte *p = bytes) { IntPtr ptr = (IntPtr)p; var webpdata = new WebPData { bytes = ptr, size = new UIntPtr((uint)bytes.Length) }; WebPAnimDecoderOptions opt = new WebPAnimDecoderOptions(); Demux.WebPAnimDecoderOptionsInit(ref opt); IntPtr webPAnimDecoderPtr = Demux.WebPAnimDecoderNewInternal(ref webpdata, ref opt, Demux.WEBP_DEMUX_ABI_VERSION); Debug.Log($"webPAnimDecoderPtr = {webPAnimDecoderPtr}"); WebPAnimDecoder decoder = (WebPAnimDecoder)Marshal.PtrToStructure(webPAnimDecoderPtr, typeof(WebPAnimDecoder)); //int width = 400; //int height = 400; { //config.input.has_alpha = 1; //config.options.bypass_filtering = 1; //config.options.no_fancy_upsampling = 1; config.options.use_threads = 1; //config.options.no_fancy_upsampling = 0; //config.options.use_cropping = 0; //config.options.use_scaling = 1; //config.options.scaled_width = width; //config.options.scaled_height = height; config.options.flip = 1; //config.options.dithering_strength = 100; config.output.colorspace = WEBP_CSP_MODE.MODE_RGBA; //config.output.is_external_memory = 1; //config.output.width = width; //config.output.height = height; } decoder.config_ = config; Marshal.StructureToPtr(decoder, webPAnimDecoderPtr, true); IntPtr dec = webPAnimDecoderPtr; WebPAnimInfo anim_info = new WebPAnimInfo(); Demux.WebPAnimDecoderGetInfo(dec, ref anim_info); Debug.LogWarning($"{anim_info.frame_count} {anim_info.canvas_width}/{anim_info.canvas_height}"); int size = anim_info.canvas_width * 4 * anim_info.canvas_height; IntPtr unmanagedPointer = new IntPtr(); int timestamp = 0; for (int i = 0; i < anim_info.frame_count; ++i) { int result = Demux.WebPAnimDecoderGetNext(dec, ref unmanagedPointer, ref timestamp); int lWidth = anim_info.canvas_width; int lHeight = anim_info.canvas_height; bool lMipmaps = false; bool lLinear = false; Texture2D texture = new Texture2D(lWidth, lHeight, TextureFormat.RGBA32, lMipmaps, lLinear); texture.LoadRawTextureData(unmanagedPointer, size); texture.Apply(updateMipmaps: false, makeNoLongerReadable: true); ret.Add((texture, timestamp)); } Demux.WebPAnimDecoderReset(dec); Demux.WebPAnimDecoderDelete(dec); } return(ret); }
/// <summary> /// Initializes the decoder for playing back an audio stream. It returns an audio clip /// which is either streaming or preloaded. Unity will use callback methods here to /// get the actual audio data. /// </summary> /// <param name="audioClip">Audio clip.</param> /// <param name="demux">Demux.</param> /// <param name="loadOptions">Load options.</param> public override void Init(out AudioClip audioClip, Demux demux, LoadOptions loadOptions = null) { if (loadOptions == null) loadOptions = LoadOptions.Default; if (demux == null) { throw new ArgumentException ("Missing Demux to get audio samples for decoding"); } this.demux = demux; this._totalDecodeTime = 0; this.watch = new System.Diagnostics.Stopwatch (); // it'd be safer to do inside lock, but Unity tends to crash on that #if UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 this.audioClip = AudioClip.Create ("_movie_audio_", streamInfo.sampleCount, streamInfo.channels, streamInfo.sampleRate, loadOptions._3DSound, !loadOptions.preloadAudio, OnAudioRead, OnAudioSeek); #else // starting from Unity 5 the 3D audio parameter is deprecated. // It has moved into AudioSource spatialBlend. this.audioClip = AudioClip.Create ("_movie_audio_", streamInfo.sampleCount, streamInfo.channels, streamInfo.sampleRate, !loadOptions.preloadAudio, OnAudioRead, OnAudioSeek); #endif audioClip = this.audioClip; }