public GifFrame( Stream inputStream, LogicalScreenDescriptor lsd, ColourTable gct, GraphicControlExtension gce, GifFrame previousFrame, GifFrame previousFrameBut1 ) : base( inputStream, lsd, gct, gce, previousFrame, previousFrameBut1 ) {}
/// <summary> /// Initializes a new instance of the <see cref="T:DesktopSprites.Forms.GifControl"/> class to display the given frame. /// </summary> /// <param name="frame">The frame to be displayed.</param> /// <param name="info">The information string to be displayed.</param> /// <exception cref="T:System.ArgumentNullException"><paramref name="frame"/> is null.</exception> public GifControl(GifFrame<BitmapFrame> frame, string info) { Argument.EnsureNotNull(frame, "frame"); InitializeComponent(); this.frame = frame; FrameInfo.Text = info; Width = Math.Max(frame.Image.Width + 8, FrameInfo.Width) + Padding.Horizontal; Height = frame.Image.Height + FrameInfo.Height + Padding.Vertical + 4; Disposed += (sender, e) => frame.Image.Dispose(); }
/// <summary> /// Adds a frame to this animation. /// </summary> /// <param name="Image">The image to add</param> /// <param name="XOffset">The positioning x offset this image should be displayed at.</param> /// <param name="YOffset">The positioning y offset this image should be displayed at.</param> public void AddFrame(Image Image, double? frameDelay = null, int XOffset = 0, int YOffset = 0) { using (var gifStream = new MemoryStream()) { GifFrame Frame = new GifFrame(Image, frameDelay ?? DefaultFrameDelay, XOffset, YOffset); Frame.Image.Save(gifStream, ImageFormat.Gif); // Steal the global color table info if (countOfFrame == 0) InitHeader(gifStream, Writer, Frame.Image.Width, Frame.Image.Height); WriteGraphicControlBlock(gifStream, Writer, Frame.Delay); WriteImageBlock(gifStream, Writer, countOfFrame != 0, Frame.XOffset, Frame.YOffset, Frame.Image.Width, Frame.Image.Height); countOfFrame++; } Image.Dispose(); }
/// <summary> /// Reads next frame image /// </summary> protected void ReadImage() { Ix = ReadShort(); // (sub)image position & size Iy = ReadShort(); Iw = ReadShort(); Ih = ReadShort(); int packed = Inp.ReadByte(); LctFlag = (packed & 0x80) != 0; // 1 - local color table flag Interlace = (packed & 0x40) != 0; // 2 - interlace flag // 3 - sort flag // 4-5 - reserved LctSize = 2 << (packed & 7); // 6-8 - local color table size MBpc = NewBpc(MGbpc); if (LctFlag) { MCurrTable = ReadColorTable((packed & 7) + 1); // read table MBpc = NewBpc((packed & 7) + 1); } else { MCurrTable = MGlobalTable; } if (Transparency && TransIndex >= MCurrTable.Length / 3) { Transparency = false; } if (Transparency && MBpc == 1) { // Acrobat 5.05 doesn't like this combination byte[] tp = new byte[12]; Array.Copy(MCurrTable, 0, tp, 0, 6); MCurrTable = tp; MBpc = 2; } bool skipZero = DecodeImageData(); // decode pixel data if (!skipZero) { Skip(); } Image img = null; img = new ImgRaw(Iw, Ih, 1, MBpc, MOut); PdfArray colorspace = new PdfArray(); colorspace.Add(PdfName.Indexed); colorspace.Add(PdfName.Devicergb); int len = MCurrTable.Length; colorspace.Add(new PdfNumber(len / 3 - 1)); colorspace.Add(new PdfString(MCurrTable)); PdfDictionary ad = new PdfDictionary(); ad.Put(PdfName.Colorspace, colorspace); img.Additional = ad; if (Transparency) { img.Transparency = new[] { TransIndex, TransIndex }; } img.OriginalType = Image.ORIGINAL_GIF; img.OriginalData = FromData; img.Url = FromUrl; GifFrame gf = new GifFrame(); gf.Image = img; gf.Ix = Ix; gf.Iy = Iy; Frames.Add(gf); // add image to frame list //ResetFrame(); }
/// <summary> /// Gets the [x,y] position of the frame in reference to the /// logical screen. /// </summary> /// <param name="frame">the frame</param> /// <returns>the [x,y] position of the frame</returns> public int[] GetFramePosition(int frame) { GifFrame gf = (GifFrame)Frames[frame - 1]; return(new[] { gf.Ix, gf.Iy }); }
private void ParseGif(byte[] gifData) { frameList = new List <GifFrame>(); currentParseGifFrame = new GifFrame(); ParseGifDataStream(gifData, 0); }
void Update() { // process UnityEvents invoked from the worker threads while (unityEventQueue.Count > 0) { unityEventQueue.Dequeue().Invoke(); } passedTime += Time.unscaledDeltaTime; if (passedTime >= timePerFrame) { if (State != CamcorderSate.Stopped && !attachedCamera.enabled) { attachedCamera.Render(); } if (IsSaving && gifFramesToGrab > 0) { if (rawFrames.Peek() != null) { asyncRequests.Enqueue(AsyncGPUReadback.Request(rawFrames.Peek())); } } } if (asyncRequests.Count > 0 && asyncRequests.Peek().done) { var req = asyncRequests.Dequeue(); if (!req.hasError) { GifFrame frame = new GifFrame() { Width = width, Height = height, Data = req.GetData <Color32>().ToArray(), ID = frameID++ }; // Sanity check the frame if (frame.Width * frame.Height == frame.Data.Length) { lock (grabbedGifFrames) { grabbedGifFrames.Add(frame); } gifFramesToGrab--; } else { Debug.Log("Discarding bad frame."); } } #if UNITY_EDITOR else { failedAsyncGPUReadbackRequest++; } #endif } if (encodingJobs.Count > 0 && !IsSaving) { IsSaving = true; encodingJobs[0].Start(); encodingJobs.RemoveAt(0); } }
protected void SetPixels() { byte[] dest = new byte[bitmap.Width * bitmap.Height]; // fill in starting image contents based on last image's dispose code if (lastDispose > 0) { if (lastDispose == 3) { // use image before last int n = frameCount - 2; if (n > 0) { lastImage = frames[n - 1]; } else { lastImage = null; } } if ((lastImage != null) && (lastImage.pixels.Length == dest.Length)) { // this frame is based on the previous frame Array.Copy(lastImage.pixels, dest, lastImage.pixels.Length); } } // copy each source line to the appropriate place in the destination int pass = 1; int inc = 8; int iline = 0; for (int i = 0; i < ih; i++) { int line = i; if (interlace) { if (iline >= ih) { pass++; switch (pass) { case 2 : iline = 4; break; case 3 : iline = 2; inc = 4; break; case 4 : iline = 1; inc = 2; break; } } line = iline; iline += inc; } line += iy; if (line < height) { int k = line * width; int dx = k + ix; // start of line in dest int dlim = dx + iw; // end of dest line if ((k + width) < dlim) { dlim = k + width; // past dest edge } int sx = i * iw; // start of line in source while (dx < dlim) { dest[dx] = pixels[sx++]; dx++; } } } Rectangle rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height); BitmapData data = bitmap.LockBits(rect, ImageLockMode.WriteOnly, PixelFormat.Format8bppIndexed); IntPtr ptr = data.Scan0; for (int i = 0; i < bitmap.Height; i++) { System.Runtime.InteropServices.Marshal.Copy(dest, i * bitmap.Width, ptr, bitmap.Width); ptr = new IntPtr(ptr.ToInt32() + data.Stride); } bitmap.UnlockBits(data); pixels = dest; }
public LedFrame(GifFrame frame) : base(frame.Image as Bitmap) { //*10 to convert it to milliseconds instead of hundred of a second. this.Delay = frame.Delay * 10; }
/// <summary> /// Writes a Graphic Control Extension to the supplied output stream. /// </summary> /// <param name="frame"> /// The GifFrame to which this graphic control extension relates. /// </param> /// <param name="transparentColourIndex"> /// The index within the active colour table of the transparent colour. /// </param> /// <param name="outputStream"> /// The stream to write to. /// </param> private void WriteGraphicCtrlExt( GifFrame frame, int transparentColourIndex, Stream outputStream ) { outputStream.WriteByte( GifComponent.CodeExtensionIntroducer ); outputStream.WriteByte( GifComponent.CodeGraphicControlLabel ); // The frame doesn't have have a graphic control extension yet, so we // need to work out what it would contain. DisposalMethod disposalMethod; bool hasTransparentColour; if( _transparent == Color.Empty ) // TODO: remove reference to _transparent - parameterise? { hasTransparentColour = false; disposalMethod = DisposalMethod.NotSpecified; // dispose = no action } else { // TESTME: WriteGraphicCtrlExt - _transparent != Color.Empty hasTransparentColour = true; disposalMethod = DisposalMethod.RestoreToBackgroundColour; // force clear if using transparent color } int blockSize = 4; GraphicControlExtension gce = new GraphicControlExtension( blockSize, disposalMethod, frame.ExpectsUserInput, hasTransparentColour, frame.Delay, transparentColourIndex ); gce.WriteToStream( outputStream ); }
/// <summary> /// Adds a frame to the animation. /// </summary> /// <param name="frame"> /// The frame to add to the animation. /// </param> public void AddFrame( GifFrame frame ) { _frames.Add( frame ); }
/** * Reads next frame image */ protected void readBitmap() { if (currentFrame == null) currentFrame = new GifFrame(); currentFrame.ix = readShort(); // (sub)image position & size currentFrame.iy = readShort(); currentFrame.iw = readShort(); currentFrame.ih = readShort(); int packed = read(); lctFlag = (packed & 0x80) != 0; // 1 - local color table flag interlace lctSize = (int)Math.Pow(2, (packed & 0x07) + 1); // 3 - sort flag // 4-5 - reserved lctSize = 2 << (packed & 7); // 6-8 - local color // table size currentFrame.interlace = (packed & 0x40) != 0; if (lctFlag) { currentFrame.lct = readColorTable(lctSize); // read table } else { currentFrame.lct = null; //No local color table } currentFrame.bufferFrameStart = rawData.Position(); //Save this as the decoding position pointer decodeBitmapData(null, mainPixels); // false decode pixel data to advance buffer skip(); if (err()) { return; } frameCount++; frames.Add(currentFrame); // add image to frame }
/** * Main file parser. Reads GIF content blocks. */ protected void readContents() { // read GIF file content blocks bool done = false; while (!(done || err())) { int code = read(); switch (code) { case 0x2C: // image separator readBitmap(); break; case 0x21: // extension code = read(); switch (code) { case 0xf9: // graphics control extension //Start a new frame currentFrame = new GifFrame(); readGraphicControlExt(); break; case 0xff: // application extension readBlock(); String app = ""; for (int i = 0; i < 11; i++) { app += (char)block[i]; } if (app.Equals("NETSCAPE2.0")) { readNetscapeExt(); } else { skip(); // don't care } break; case 0xfe:// comment extension skip(); break; case 0x01:// plain text extension skip(); break; default: // uninteresting extension skip(); break; } break; case 0x3b: // terminator done = true; break; case 0x00: // bad byte, but keep going and see what happens break; default: status = STATUS_FORMAT_ERROR; break; } } }
/** * Decodes LZW image data into pixel array. Adapted from John Cristy's BitmapMagick. */ void decodeBitmapData(GifFrame frame, byte[] dstPixels) { long startTime = DateTime.Now.Millisecond; long stepOne, stepTwo, stepThree; if (frame != null) { //Jump to the frame start position rawData.Position(frame.bufferFrameStart); } int nullCode = -1; int npix = (frame == null) ? width * height : frame.iw * frame.ih; int available, clear, code_mask, code_size, end_of_information, in_code, old_code, bits, code, count, i, datum, data_size, first, top, bi, pi; if (dstPixels == null || dstPixels.Length < npix) { dstPixels = new byte[npix]; // allocate new pixel array } if (prefix == null) { prefix = new short[MAX_STACK_SIZE]; } if (suffix == null) { suffix = new byte[MAX_STACK_SIZE]; } if (pixelStack == null) { pixelStack = new byte[MAX_STACK_SIZE + 1]; } // Initialize GIF data stream decoder. data_size = read(); clear = 1 << data_size; end_of_information = clear + 1; available = clear + 2; old_code = nullCode; code_size = data_size + 1; code_mask = (1 << code_size) - 1; for (code = 0; code < clear; code++) { prefix[code] = 0; // XXX ArrayIndexOutOfBoundsException suffix[code] = (byte)code; } // Decode GIF pixel stream. datum = bits = count = first = top = pi = bi = 0; for (i = 0; i < npix;) { if (top == 0) { if (bits < code_size) { // Load bytes until there are enough bits for a code. if (count == 0) { // Read a new data block. count = readBlock(); if (count <= 0) { break; } bi = 0; } datum += (((int)block[bi]) & 0xff) << bits; bits += 8; bi++; count--; continue; } // Get the next code. code = datum & code_mask; datum >>= code_size; bits -= code_size; // Interpret the code if ((code > available) || (code == end_of_information)) { break; } if (code == clear) { // Reset decoder. code_size = data_size + 1; code_mask = (1 << code_size) - 1; available = clear + 2; old_code = nullCode; continue; } if (old_code == nullCode) { pixelStack[top++] = suffix[code]; old_code = code; first = code; continue; } in_code = code; if (code == available) { pixelStack[top++] = (byte)first; code = old_code; } while (code > clear) { pixelStack[top++] = suffix[code]; code = prefix[code]; } first = ((int)suffix[code]) & 0xff; // Add a new string to the string table, if (available >= MAX_STACK_SIZE) { break; } pixelStack[top++] = (byte)first; prefix[available] = (short)old_code; suffix[available] = (byte)first; available++; if (((available & code_mask) == 0) && (available < MAX_STACK_SIZE)) { code_size++; code_mask += available; } old_code = in_code; } // Pop a pixel off the pixel stack. top--; dstPixels[pi++] = pixelStack[top]; i++; } for (i = pi; i < npix; i++) { dstPixels[i] = 0; // clear missing pixels } }
public ActionResult Timer(long?timestamp) { string title = null; try { byte[] output = null; using (MemoryStream stream = new MemoryStream()) { int repeatCount = 0; //repeat forever DateTime end = Epoch.AddSeconds(timestamp ?? 0); title = end.ToString(); TimeSpan remaining = end - DateTime.UtcNow; if (remaining.TotalSeconds < 0) { remaining = TimeSpan.FromSeconds(0); } if (remaining.TotalSeconds <= 60) { repeatCount = -1; //don't repeat } using (Image background = Image.FromFile(Server.MapPath(BackgroundPath))) { using (Font font = new Font(FontName, FontSize, FontBold ? FontStyle.Bold : FontStyle.Regular, GraphicsUnit.Pixel)) { using (var disposer = new Disposer()) { var target = new SynchronizeInvokeStub(); AnimatedGifEncoder encoder = new AnimatedGifEncoder(target); encoder.RepeatCount = repeatCount; encoder.OutputStream = stream; encoder.QuantizerType = GifComponents.Quantizing.QuantizerType.Octree; { int count = 0; while (remaining.TotalSeconds >= 0 && count < 60) { Bitmap bitmap = disposer.Track(new Bitmap(background)); using (Graphics g = Graphics.FromImage(bitmap)) { StringFormat format = new StringFormat(); format.Alignment = StringAlignment.Center; format.LineAlignment = StringAlignment.Center; g.TextRenderingHint = System.Drawing.Text.TextRenderingHint.AntiAliasGridFit; string days, hours, minutes, seconds; if (remaining.Days > 99) { days = "--"; hours = "--"; minutes = "--"; seconds = "--"; count = 99; //causes the loop to end after this one } else { days = remaining.Days.ToString("00"); hours = remaining.Hours.ToString("00"); minutes = remaining.Minutes.ToString("00"); seconds = remaining.Seconds.ToString("00"); } g.DrawString(days, font, Brushes.White, new RectangleF(ImageX, ImageY, ImageWidth / 4, ImageHeight), format); g.DrawString(hours, font, Brushes.White, new RectangleF(ImageX + (ImageWidth / 4), ImageY, ImageWidth / 4, ImageHeight), format); g.DrawString(minutes, font, Brushes.White, new RectangleF(ImageX + (2 * ImageWidth / 4), ImageY, ImageWidth / 4, ImageHeight), format); g.DrawString(seconds, font, Brushes.White, new RectangleF(ImageX + (3 * ImageWidth / 4), ImageY, ImageWidth / 4, ImageHeight), format); g.DrawString(":", font, Brushes.White, new RectangleF(ImageX + (ImageWidth / 4) - ImageWidth / 8, ImageY, ImageWidth / 4, ImageHeight), format); g.DrawString(":", font, Brushes.White, new RectangleF(ImageX + (2 * ImageWidth / 4) - ImageWidth / 8, ImageY, ImageWidth / 4, ImageHeight), format); g.DrawString(":", font, Brushes.White, new RectangleF(ImageX + (3 * ImageWidth / 4) - ImageWidth / 8, ImageY, ImageWidth / 4, ImageHeight), format); } var frame = new GifFrame(bitmap); frame.Delay = 100; encoder.AddFrame(frame); count++; remaining = remaining.Subtract(OneSecond); } } encoder.Start(); encoder.WaitUntilDone(); } } } output = stream.ToArray(); } return(new FileContentResult(output, "image/gif")); } catch { return(new FilePathResult(Server.MapPath(BackgroundPath), "image/gif")); } finally { Clicky.TrackRequest(Request, ActionType.PageView, "Timer: " + title); } }
private void ParseGif(byte[] gifData) { frameList = new List<GifFrame>(); currentParseGifFrame = new GifFrame(); ParseGifDataStream(gifData, 0); }
// GIF exporting coroutine: preprocess the image data then send it to native code (mobile) or a worker thread (other platforms) to export GIF file. static IEnumerator CRExportGif(AnimatedClip clip, string filename, int loop, int quality, System.Threading.ThreadPriority threadPriority, Action <AnimatedClip, float> exportProgressCallback, Action <AnimatedClip, string> exportCompletedCallback) { // The encoder don't want loop to be < -1 if (loop < -1) { loop = -1; } // Compute the NeuQuant sample factor from the inverse of the quality value. // Note that NeuQuant prefers values in range [1,30] so we'll also scale the factor to that range. int sampleFac = Mathf.RoundToInt(Mathf.Lerp(30, 1, (float)(Mathf.Clamp(quality, 1, 100)) / 100)); // Construct filepath string folder; #if UNITY_EDITOR folder = Application.dataPath; // Assets folder #else folder = Application.persistentDataPath; #endif string filepath = System.IO.Path.Combine(folder, filename + ".gif"); // Construct a new export task var exportTask = new GifExportTask(); exportTask.taskId = curExportId++; // assign this task a unique id exportTask.clip = clip; exportTask.imageData = null; exportTask.filepath = filepath; exportTask.loop = loop; exportTask.sampleFac = sampleFac; exportTask.exportProgressCallback = exportProgressCallback; exportTask.exportCompletedCallback = exportCompletedCallback; exportTask.workerPriority = threadPriority; exportTask.isExporting = true; exportTask.isDone = false; exportTask.progress = 0; // Add task to the list with its unique id key gifExportTasks.Add(exportTask.taskId, exportTask); yield return(null); // Create a temporary texture to read RenderTexture data Texture2D temp = new Texture2D(clip.Width, clip.Height, TextureFormat.RGB24, false); temp.hideFlags = HideFlags.HideAndDontSave; temp.wrapMode = TextureWrapMode.Clamp; temp.filterMode = FilterMode.Bilinear; temp.anisoLevel = 0; // On iOS and Android, the GIF encoding is done in native code. // In Unity editor (and other platforms), we use Moments encoder for testing purpose. #if UNITY_EDITOR || (!UNITY_IOS && !UNITY_ANDROID) // Converts to GIF frames List <GifFrame> frames = new List <GifFrame>(clip.Frames.Length); for (int i = 0; i < clip.Frames.Length; i++) { if (clip.Frames[i] is RenderTexture) { RenderTexture source = clip.Frames[i] as RenderTexture; RenderTexture.active = source; temp.ReadPixels(new Rect(0, 0, source.width, source.height), 0, 0); temp.Apply(); RenderTexture.active = null; } else if (clip.Frames[i] is Texture2D) { temp = clip.Frames[i] as Texture2D; } else { Debug.LogError("AnimatedClip contains an unrecognized texture. Aborting..."); yield break; } GifFrame frame = new GifFrame() { Width = temp.width, Height = temp.height, Data = temp.GetPixels32() }; frames.Add(frame); OnGifPreProcessing(exportTask.taskId, (float)i / clip.Frames.Length); yield return(null); } // Setup a worker thread and let it do its magic GifEncoder encoder = new GifEncoder(loop, sampleFac); encoder.SetDelay(Mathf.RoundToInt(1000f / clip.FramePerSecond)); Worker worker = new Worker( exportTask.taskId, threadPriority, frames, encoder, filepath, OnGifExportProgress, OnGifExportCompleted); worker.Start(); #else // Allocate an array to hold the serialized image data exportTask.imageData = new Color32[clip.Frames.Length][]; // Construct the serialized image data, note that texture data is layered down-top, so flip it for (int i = 0; i < clip.Frames.Length; i++) { if (clip.Frames[i] is RenderTexture) { RenderTexture source = clip.Frames[i] as RenderTexture; RenderTexture.active = source; temp.ReadPixels(new Rect(0, 0, source.width, source.height), 0, 0); temp.Apply(); RenderTexture.active = null; } else if (clip.Frames[i] is Texture2D) { temp = clip.Frames[i] as Texture2D; } else { Debug.LogError("AnimatedClip contains an unrecognized texture. Aborting..."); yield break; } // Get the frame's pixel data exportTask.imageData[i] = temp.GetPixels32(); // Call the preprocessing handler directly float progress = (float)i / clip.Frames.Length; OnGifPreProcessing(exportTask.taskId, progress); yield return(null); } #if UNITY_IOS iOSNativeGif.ExportGif(exportTask); #elif UNITY_ANDROID AndroidNativeGif.ExportGif(exportTask); #endif #endif // UNITY_EDITOR || (!UNITY_IOS && !UNITY_ANDROID) // Dispose the temporary texture Destroy(temp); }
private async Task RenderFrameAsync(int frameIndex, CancellationToken cancellationToken) { if (frameIndex < 0) return; var frame = _metadata.Frames[frameIndex]; var desc = frame.Descriptor; using (var indexStream = await GetIndexStreamAsync(frame, cancellationToken)) { #if WPF _bitmap.Lock(); try { #endif if (frameIndex < _previousFrameIndex) ClearArea(_metadata.Header.LogicalScreenDescriptor); else DisposePreviousFrame(frame); int bufferLength = 4 * desc.Width; byte[] indexBuffer = new byte[desc.Width]; byte[] lineBuffer = new byte[bufferLength]; var palette = _palettes[frameIndex]; int transparencyIndex = palette.TransparencyIndex ?? -1; var rows = frame.Descriptor.Interlace ? InterlacedRows(frame.Descriptor.Height) : NormalRows(frame.Descriptor.Height); foreach (int y in rows) { int read = indexStream.Read(indexBuffer, 0, desc.Width); if (read != desc.Width) throw new EndOfStreamException(); int offset = (desc.Top + y) * _stride + desc.Left * 4; if (transparencyIndex >= 0) { CopyFromBitmap(lineBuffer, _bitmap, offset, bufferLength); } for (int x = 0; x < desc.Width; x++) { byte index = indexBuffer[x]; int i = 4 * x; if (index != transparencyIndex) { WriteColor(lineBuffer, palette[index], i); } } CopyToBitmap(lineBuffer, _bitmap, offset, bufferLength); } #if WPF var rect = new Int32Rect(desc.Left, desc.Top, desc.Width, desc.Height); _bitmap.AddDirtyRect(rect); } finally { _bitmap.Unlock(); } #elif WINRT _bitmap.Invalidate(); #endif _previousFrame = frame; _previousFrameIndex = frameIndex; } }
// Pre-processing coroutine to extract frame data and send everything to a separate worker thread IEnumerator PreProcess() { List <GifFrame> frames = new List <GifFrame>(m_Frames.Count); // Get a temporary texture to read RenderTexture data Texture2D temp = new Texture2D(m_Width, m_Height, TextureFormat.RGB24, false); temp.hideFlags = HideFlags.HideAndDontSave; temp.wrapMode = TextureWrapMode.Clamp; temp.filterMode = FilterMode.Bilinear; temp.anisoLevel = 0; // Process the frame queue while (m_Frames.Count > 0) { GifFrame frame = ToGifFrame(m_Frames.Dequeue(), temp); frames.Add(frame); yield return(null); } // Dispose the temporary texture Flush(temp); // Switch the state to pause, let the user choose to keep recording or not State = RecorderState.Paused; // Callback if (OnPreProcessingDone != null) { OnPreProcessingDone(); } // Setup a worker thread and let it do its magic GifEncoder encoder = new GifEncoder(m_Repeat, m_Quality); encoder.SetDelay(Mathf.RoundToInt(m_TimePerFrame * 1000f)); /* * Worker worker = new Worker(WorkerPriority) * { * m_Encoder = encoder, * m_Frames = frames, * m_FilePath = filepath, * m_OnFileSaved = OnFileSaved, * m_OnFileSaveProgress = OnFileSaveProgress * }; * worker.Start(); */ encoder.Start(); System.Diagnostics.Stopwatch stopwatch = new System.Diagnostics.Stopwatch(); stopwatch.Start(); for (int i = 0; i < frames.Count; i++) { GifFrame frame = frames[i]; encoder.AddFrame(frame); if (OnFileSaveProgress != null) { float percent = (float)i / (float)frames.Count; OnFileSaveProgress(0, percent); } if (stopwatch.ElapsedMilliseconds > 40) { stopwatch.Stop(); yield return(0); stopwatch.Start(); } } encoder.Finish(); if (OnFileSaved != null) { OnFileSaved(0, encoder.GetBytes()); } }
public void NextFrame(object sender, EventArgs e) { frameTimer.Stop(); if (_numberOfFrames == 0) { return; } if (_frameList[_frameCounter].disposalMethod == 2) { // dispose = background, tricky code to make transparent last frames region for (int fc = 0; fc < _frameCounter; fc++) { if (_frameList[fc].Visibility == Visibility.Visible) { GifFrame gf = _frameList[_frameCounter]; RectangleGeometry rg2 = new RectangleGeometry(new Rect(gf.left, gf.top, gf.width, gf.height)); totalTransparentGeometry = new CombinedGeometry(GeometryCombineMode.Union, totalTransparentGeometry, rg2); GifFrame gfBack = _frameList[fc]; RectangleGeometry rgBack = new RectangleGeometry(new Rect(gfBack.left, gfBack.top, gfBack.width, gfBack.height)); CombinedGeometry cg = new CombinedGeometry(GeometryCombineMode.Exclude, rgBack, totalTransparentGeometry); GeometryDrawing gd = new GeometryDrawing(Brushes.Black, new Pen(Brushes.Black, 0), cg); DrawingBrush db = new DrawingBrush(gd); _frameList[fc].OpacityMask = db; } } _frameList[_frameCounter].Visibility = Visibility.Hidden; } if (_frameList[_frameCounter].disposalMethod >= 3) { _frameList[_frameCounter].Visibility = Visibility.Hidden; } _frameCounter++; if (_frameCounter < _numberOfFrames) { _frameList[_frameCounter].Visibility = Visibility.Visible; frameTimer.Interval = new TimeSpan(0, 0, 0, 0, _frameList[_frameCounter].delayTime * 10); frameTimer.Start(); } else { if (_numberOfLoops != 0) { _currentLoop++; } if (_currentLoop < _numberOfLoops || _numberOfLoops == 0) { for (int f = 0; f < _frameList.Count; f++) { _frameList[f].Visibility = Visibility.Hidden; _frameList[f].OpacityMask = null; } totalTransparentGeometry = null; _frameCounter = 0; _frameList[_frameCounter].Visibility = Visibility.Visible; frameTimer.Interval = new TimeSpan(0, 0, 0, 0, _frameList[_frameCounter].delayTime * 10); frameTimer.Start(); } } }
private async Task RenderFrameAsync(int frameIndex, CancellationToken cancellationToken) { if (frameIndex < 0) { return; } var frame = _metadata.Frames[frameIndex]; var desc = frame.Descriptor; var rect = GetFixedUpFrameRect(desc); using var indexStream = await GetIndexStreamAsync(frame, cancellationToken); using (_bitmap.LockInScope()) { if (frameIndex < _previousFrameIndex) { ClearArea(_metadata.Header.LogicalScreenDescriptor); } else { DisposePreviousFrame(frame); } int bufferLength = 4 * rect.Width; byte[] indexBuffer = new byte[desc.Width]; byte[] lineBuffer = new byte[bufferLength]; var palette = _palettes[frameIndex]; int transparencyIndex = palette.TransparencyIndex ?? -1; var rows = desc.Interlace ? InterlacedRows(rect.Height) : NormalRows(rect.Height); foreach (int y in rows) { indexStream.ReadAll(indexBuffer, 0, desc.Width); int offset = (desc.Top + y) * _stride + desc.Left * 4; if (transparencyIndex >= 0) { CopyFromBitmap(lineBuffer, _bitmap, offset, bufferLength); } for (int x = 0; x < rect.Width; x++) { byte index = indexBuffer[x]; int i = 4 * x; if (index != transparencyIndex) { WriteColor(lineBuffer, palette[index], i); } } CopyToBitmap(lineBuffer, _bitmap, offset, bufferLength); } _bitmap.AddDirtyRect(rect); } _previousFrame = frame; _previousFrameIndex = frameIndex; }
/** Gets the image from a frame. The first frame isp 1. * @param frame the frame to get the image from * @return the image */ virtual public Image GetImage(int frame) { GifFrame gf = frames[frame - 1]; return(gf.image); }
public static Texture2D GifToTexture(Stream stream, int frameIndex) { Loader loader = new Loader(); if (!loader.Load(stream)) { return(null); } int width = loader._logical_screen_desc.image_width; int height = loader._logical_screen_desc.image_height; int num3 = width * height; UnityEngine.Color[] colors = new UnityEngine.Color[num3]; Gif.Color[] colorArray2 = loader._global_color_table; int index = loader._logical_screen_desc.background_color; bool flag = loader._frames[0]._GCE_data.transparent_color_flag; UnityEngine.Color clear = UnityEngine.Color.clear; if ((!flag && (colorArray2 != null)) && (index < colorArray2.Length)) { clear.r = ((float)colorArray2[index].r) / 255f; clear.g = ((float)colorArray2[index].g) / 255f; clear.b = ((float)colorArray2[index].b) / 255f; clear.a = 1f; } for (int i = 0; i < num3; i++) { colors[i] = clear; } GifFrame frame = loader._frames[frameIndex]; int num6 = frame._image.desc.image_left; int num7 = frame._image.desc.image_top; int num8 = frame._image.desc.image_width; int num9 = frame._image.desc.image_height; if (frame._image.desc.local_color_table_flag) { colorArray2 = frame._image.desc.local_color_table; } int length = frame._image.data.Length; bool flag2 = frame._GCE_data.transparent_color_flag; int num11 = frame._GCE_data.transparent_color; for (int j = 0; j < length; j++) { int num13 = frame._image.data[j]; if (!flag2 || (num11 != num13)) { Gif.Color color2 = colorArray2[num13]; colors[j].r = ((float)color2.r) / 255f; colors[j].g = ((float)color2.g) / 255f; colors[j].b = ((float)color2.b) / 255f; colors[j].a = 1f; } } Texture2D textured = new Texture2D(width, height, TextureFormat.RGBA32, false); textured.SetPixels(colors); textured.Apply(); return(textured); }
/** Gets the [x,y] position of the frame in reference to the * logical screen. * @param frame the frame * @return the [x,y] position of the frame */ virtual public int[] GetFramePosition(int frame) { GifFrame gf = frames[frame - 1]; return(new int[] { gf.ix, gf.iy }); }
/// <summary> /// Gets the image from a frame. The first frame isp 1. /// </summary> /// <param name="frame">the frame to get the image from</param> /// <returns>the image</returns> public Image GetImage(int frame) { GifFrame gf = (GifFrame)Frames[frame - 1]; return(gf.Image); }
/** * Reads next frame image */ virtual protected void ReadImage() { ix = ReadShort(); // (sub)image position & size iy = ReadShort(); iw = ReadShort(); ih = ReadShort(); int packed = inp.ReadByte(); lctFlag = (packed & 0x80) != 0; // 1 - local color table flag interlace = (packed & 0x40) != 0; // 2 - interlace flag // 3 - sort flag // 4-5 - reserved lctSize = 2 << (packed & 7); // 6-8 - local color table size m_bpc = NewBpc(m_gbpc); if (lctFlag) { m_curr_table = ReadColorTable((packed & 7) + 1); // read table m_bpc = NewBpc((packed & 7) + 1); } else { m_curr_table = m_global_table; } if (transparency && transIndex >= m_curr_table.Length / 3) { transparency = false; } if (transparency && m_bpc == 1) // Acrobat 5.05 doesn't like this combination { byte[] tp = new byte[12]; Array.Copy(m_curr_table, 0, tp, 0, 6); m_curr_table = tp; m_bpc = 2; } bool skipZero = DecodeImageData(); // decode pixel data if (!skipZero) { Skip(); } Image img = null; img = new ImgRaw(iw, ih, 1, m_bpc, m_out); PdfArray colorspace = new PdfArray(); colorspace.Add(PdfName.INDEXED); colorspace.Add(PdfName.DEVICERGB); int len = m_curr_table.Length; colorspace.Add(new PdfNumber(len / 3 - 1)); colorspace.Add(new PdfString(m_curr_table)); PdfDictionary ad = new PdfDictionary(); ad.Put(PdfName.COLORSPACE, colorspace); img.Additional = ad; if (transparency) { img.Transparency = new int[] { transIndex, transIndex }; } img.OriginalType = Image.ORIGINAL_GIF; img.OriginalData = fromData; img.Url = fromUrl; GifFrame gf = new GifFrame(); gf.image = img; gf.ix = ix; gf.iy = iy; frames.Add(gf); // add image to frame list //ResetFrame(); }
/** * Creates new frame image from current data (and previous frames as specified by their disposition codes). */ protected void setPixels(int frameIndex) { GifFrame currentFrame = frames[frameIndex]; GifFrame previousFrame = null; int previousIndex = frameIndex - 1; if (previousIndex >= 0) { previousFrame = frames[previousIndex]; } // location of blended pixels int[] dest = mainScratch; // fill in starting image contents based on last image's dispose code if (previousFrame != null && previousFrame.dispose > DISPOSAL_UNSPECIFIED) { if (previousFrame.dispose == DISPOSAL_NONE && currentImage != null) { // Start with the current image currentImage.GetPixels(dest, 0, width, 0, 0, width, height); } if (previousFrame.dispose == DISPOSAL_BACKGROUND) { // Start with a canvas filled with the background color int c = 0; if (!currentFrame.transparency) { c = bgColor; } for (int i = 0; i < previousFrame.ih; i++) { int n1 = (previousFrame.iy + i) * width + previousFrame.ix; int n2 = n1 + previousFrame.iw; for (int k = n1; k < n2; k++) { dest[k] = c; } } } if (previousFrame.dispose == DISPOSAL_PREVIOUS && previousImage != null) { // Start with the previous frame previousImage.GetPixels(dest, 0, width, 0, 0, width, height); } } //Decode pixels for this frame into the global pixels[] scratch decodeBitmapData(currentFrame, mainPixels); // decode pixel data // copy each source line to the appropriate place in the destination int pass = 1; int inc = 8; int iline = 0; for (int i = 0; i < currentFrame.ih; i++) { int line = i; if (currentFrame.interlace) { if (iline >= currentFrame.ih) { pass++; switch (pass) { case 2: iline = 4; break; case 3: iline = 2; inc = 4; break; case 4: iline = 1; inc = 2; break; default: break; } } line = iline; iline += inc; } line += currentFrame.iy; if (line < height) { int k = line * width; int dx = k + currentFrame.ix; // start of line in dest int dlim = dx + currentFrame.iw; // end of dest line if ((k + width) < dlim) { dlim = k + width; // past dest edge } int sx = i * currentFrame.iw; // start of line in source while (dx < dlim) { // map color and insert in destination int index = ((int)mainPixels[sx++]) & 0xff; int c = act[index]; if (c != 0) { dest[dx] = c; } dx++; } } } //Copy pixels into previous image currentImage.GetPixels(copyScratch, 0, width, 0, 0, width, height); previousImage.SetPixels(copyScratch, 0, width, 0, 0, width, height); //Set pixels for current image currentImage.SetPixels(dest, 0, width, 0, 0, width, height); }
/// <summary> /// Reads a frame from the input stream and adds it to the collection /// of frames. /// </summary> /// <param name="inputStream"> /// Input stream from which the frame is to be read. /// </param> /// <param name="lastGce"> /// The graphic control extension most recently read from the input /// stream. /// </param> private void AddFrame( Stream inputStream, GraphicControlExtension lastGce ) { GifFrame previousFrame; GifFrame previousFrameBut1; if( _frames.Count > 0 ) { previousFrame = _frames[_frames.Count - 1]; } else { previousFrame = null; } if( _frames.Count > 1 ) { previousFrameBut1 = _frames[_frames.Count - 2]; } else { previousFrameBut1 = null; } GifFrame frame = new GifFrame( inputStream, _lsd, _gct, lastGce, previousFrame, previousFrameBut1, XmlDebugging ); MyProgressCounters[_readStreamCounterText].Value = (int) inputStream.Position; _frames.Add( frame ); WriteDebugXmlNode( frame.DebugXmlReader ); }
/** * Main file parser. Reads GIF content blocks. */ protected void readContents() { // read GIF file content blocks bool done = false; while (!(done || err())) { int code = read(); switch (code) { case 0x2C: // image separator readBitmap(); break; case 0x21: // extension code = read(); switch (code) { case 0xf9: // graphics control extension //Start a new frame currentFrame = new GifFrame(); readGraphicControlExt(); break; case 0xff: // application extension readBlock(); String app = ""; for (int i = 0; i < 11; i++) { app += (char)block[i]; } if (app.Equals("NETSCAPE2.0")) { readNetscapeExt(); } else { skip(); // don't care } break; case 0xfe: // comment extension skip(); break; case 0x01: // plain text extension skip(); break; default: // uninteresting extension skip(); break; } break; case 0x3b: // terminator done = true; break; case 0x00: // bad byte, but keep going and see what happens break; default: status = STATUS_FORMAT_ERROR; break; } } }
private int ParseBlock(byte[] gifData, int offset) { switch (gifData[offset]) { case 0x21: if (gifData[offset + 1] == 0xF9) { return ParseGraphicControlExtension(gifData, offset); } else { return ParseExtensionBlock(gifData, offset); } case 0x2C: offset = ParseGraphicBlock(gifData, offset); frameList.Add(currentParseGifFrame); currentParseGifFrame = new GifFrame(); return offset; case 0x3B: return -1; default: throw new Exception("GIF format incorrect: missing graphic block or special-purpose block. "); } }
/** * Resets frame state for reading next image. */ protected void ResetFrame() { lastDispose = dispose; lastRect = new Rectangle(ix, iy, iw, ih); lastImage = frames[frames.Count - 1]; lastBgColor = bgColor; // int dispose = 0; transparency = false; delay = 0; lct = null; }
/** * Reads next frame image */ virtual protected void ReadImage() { ix = ReadShort(); // (sub)image position & size iy = ReadShort(); iw = ReadShort(); ih = ReadShort(); int packed = inp.ReadByte(); lctFlag = (packed & 0x80) != 0; // 1 - local color table flag interlace = (packed & 0x40) != 0; // 2 - interlace flag // 3 - sort flag // 4-5 - reserved lctSize = 2 << (packed & 7); // 6-8 - local color table size m_bpc = NewBpc(m_gbpc); if (lctFlag) { m_curr_table = ReadColorTable((packed & 7) + 1); // read table m_bpc = NewBpc((packed & 7) + 1); } else { m_curr_table = m_global_table; } if (transparency && transIndex >= m_curr_table.Length / 3) transparency = false; if (transparency && m_bpc == 1) { // Acrobat 5.05 doesn't like this combination byte[] tp = new byte[12]; Array.Copy(m_curr_table, 0, tp, 0, 6); m_curr_table = tp; m_bpc = 2; } bool skipZero = DecodeImageData(); // decode pixel data if (!skipZero) Skip(); Image img = null; img = new ImgRaw(iw, ih, 1, m_bpc, m_out); PdfArray colorspace = new PdfArray(); colorspace.Add(PdfName.INDEXED); colorspace.Add(PdfName.DEVICERGB); int len = m_curr_table.Length; colorspace.Add(new PdfNumber(len / 3 - 1)); colorspace.Add(new PdfString(m_curr_table)); PdfDictionary ad = new PdfDictionary(); ad.Put(PdfName.COLORSPACE, colorspace); img.Additional = ad; if (transparency) { img.Transparency = new int[]{transIndex, transIndex}; } img.OriginalType = Image.ORIGINAL_GIF; img.OriginalData = fromData; img.Url = fromUrl; GifFrame gf = new GifFrame(); gf.image = img; gf.ix = ix; gf.iy = iy; frames.Add(gf); // add image to frame list //ResetFrame(); }
/** * Reads next frame image */ void _readImage() { this._ix = this._readShort(); // (sub)image position & size this._iy = this._readShort(); this._iw = this._readShort(); this._ih = this._readShort(); int packed = this._read(); this._lctFlag = (packed & 0x80) != 0; // 1 - local color table flag this._interlace = (packed & 0x40) != 0; // 2 - interlace flag // 3 - sort flag // 4-5 - reserved this._lctSize = 2 << (packed & 7); // 6-8 - local color table size if (this._lctFlag) { this._lct = this._readColorTable(this._lctSize); // read table this._act = this._lct; // make local table active } else { this._act = this._gct; // make global table active if (this._bgIndex == this._transIndex) { this._bgColor = 0; } } int save = 0; if (this._transparency) { save = this._act[this._transIndex]; this._act[this._transIndex] = 0; // set transparent color if specified } if (this._act == null) { this._status = STATUS_FORMAT_ERROR; // no color table defined } if (this._error()) { return; } this._decodeImageData(); // decode pixel data this._skip(); if (this._error()) { return; } // create new image to receive frame data // image = // new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB_PRE); this._image = this._image ?? new int[this._width * this._height]; this._setPixels(); // transfer pixel data to image var bytes = new byte[this._width * this._height * sizeof(int)]; Buffer.BlockCopy(this._image, 0, bytes, 0, bytes.Length); this._currentFrame = new GifFrame { bytes = bytes, delay = this._delay }; this._frameCount++; if (this._transparency) { this._act[this._transIndex] = save; } this._resetFrame(); }