private static ByteVectorCollection BuildList() { ByteVectorCollection list = new ByteVectorCollection(); list.Add("ABC"); list.Add("DEF"); list.Add("GHI"); return list; }
private static ByteVectorCollection BuildList() { ByteVectorCollection list = new ByteVectorCollection(); list.Add("ABC"); list.Add("DEF"); list.Add("GHI"); return(list); }
/// <summary> /// Adds the next page to the current instance. /// </summary> /// <param name="page"> /// The next <see cref="Page" /> object found in the stream. /// </param> public void AddPage(Page page) { pages_read++; if (first_page_header == null) { first_page_header = page.Header; } if (page.Packets.Length == 0) { return; } ByteVector[] page_packets = page.Packets; for (int i = 0; i < page_packets.Length; i++) { if ((page.Header.Flags & PageFlags .FirstPacketContinued) != 0 && i == 0 && packets.Count > 0) { packets [packets.Count - 1].Add(page_packets [0]); } else { packets.Add(page_packets [i]); } } }
public Page(File file, long position) : this(new PageHeader(file, position)) { file.Seek(position + header.Size); foreach (int packet_size in header.PacketSizes) { packets.Add(file.ReadBlock(packet_size)); } }
public void SetText(ByteVector type, string text) { if (string.IsNullOrEmpty(text)) { ilst_box.RemoveChild(FixId(type)); return; } ByteVectorCollection l = new ByteVectorCollection(); l.Add(ByteVector.FromString(text, StringType.UTF8)); SetData(type, l, (uint)AppleDataBox.FlagType.ContainsText); }
public Page(File file, long position) : this(new PageHeader(file, position)) { if (file == null) { throw new ArgumentNullException("file"); } file.Seek(position + header.Size); foreach (int packet_size in header.PacketSizes) { packets.Add(file.ReadBlock(packet_size)); } }
/// <summary> /// Sets the text for a specified box type. /// </summary> /// <param name="type"> /// A <see cref="ByteVector" /> object containing the type to /// add to the new instance. /// </param> /// <param name="text"> /// A <see cref="string[]" /> containing text to store. /// </param> public void SetText(ByteVector type, string [] text) { // Remove empty data and return. if (text == null) { ilst_box.RemoveChild(FixId(type)); return; } // Create a list... ByteVectorCollection l = new ByteVectorCollection(); // and populate it with the ByteVectorized strings. foreach (string value in text) { l.Add(ByteVector.FromString(value, StringType.UTF8)); } // Send our final byte vectors to SetData SetData(type, l, (uint) AppleDataBox.FlagType.ContainsText); }
/// <summary> /// Repaginates the pages passed into the current instance to /// handle changes made to the Xiph comment. /// </summary> /// <param name="change"> /// A <see cref="int" /> value reference containing the /// the difference between the number of pages returned and /// the number of pages that were added to the class. /// </param> /// <returns> /// A <see cref="Page[]" /> containing the new page /// collection. /// </returns> public Page[] Paginate(out int change) { // Ogg Pagination: Welcome to sucksville! // If you don't understand this, you're not alone. // It is confusing as Hell. // TODO: Document this method, in the mean time, there // is always http://xiph.org/ogg/doc/framing.html if (pages_read == 0) { change = 0; return new Page[0]; } int count = pages_read; ByteVectorCollection packets = new ByteVectorCollection( this.packets); PageHeader first_header = (PageHeader)first_page_header; List<Page> pages = new List<Page>(); uint index = 0; bool bos = first_header.PageSequenceNumber == 0; if (bos) { pages.Add(new Page(new ByteVectorCollection(packets[0]), first_header)); index++; packets.RemoveAt(0); count--; } int lacing_per_page = 0xfc; if (count > 0) { int total_lacing_bytes = 0; for (int i = 0; i < packets.Count; i++) total_lacing_bytes += GetLacingValueLength( packets, i); lacing_per_page = Math.Min(total_lacing_bytes / count + 1, lacing_per_page); } int lacing_bytes_used = 0; ByteVectorCollection page_packets = new ByteVectorCollection(); bool first_packet_continued = false; while (packets.Count > 0) { int packet_bytes = GetLacingValueLength(packets, 0); int remaining = lacing_per_page - lacing_bytes_used; bool whole_packet = packet_bytes <= remaining; if (whole_packet) { page_packets.Add(packets[0]); lacing_bytes_used += packet_bytes; packets.RemoveAt(0); } else { page_packets.Add(packets[0].Mid(0, remaining * 0xff)); packets[0] = packets[0].Mid(remaining * 0xff); lacing_bytes_used += remaining; } if (lacing_bytes_used == lacing_per_page) { pages.Add(new Page(page_packets, new PageHeader(first_header, index, first_packet_continued ? PageFlags.FirstPacketContinued : PageFlags.None))); page_packets = new ByteVectorCollection(); lacing_bytes_used = 0; index++; count--; first_packet_continued = !whole_packet; } } if (page_packets.Count > 0) { pages.Add(new Page(page_packets, new PageHeader( first_header.StreamSerialNumber, index, first_packet_continued ? PageFlags.FirstPacketContinued : PageFlags.None))); index++; count--; } change = -count; return pages.ToArray(); }
/// <summary> /// Repaginates the pages passed into the current instance to /// handle changes made to the Xiph comment. /// </summary> /// <param name="change"> /// A <see cref="int" /> value reference containing the /// the difference between the number of pages returned and /// the number of pages that were added to the class. /// </param> /// <returns> /// A <see cref="T:Page[]" /> containing the new page /// collection. /// </returns> public Page [] Paginate(out int change) { // Ogg Pagination: Welcome to sucksville! // If you don't understand this, you're not alone. // It is confusing as Hell. // TODO: Document this method, in the mean time, there // is always http://xiph.org/ogg/doc/framing.html if (pages_read == 0) { change = 0; return(new Page [0]); } int count = pages_read; ByteVectorCollection packets = new ByteVectorCollection( this.packets); PageHeader first_header = (PageHeader)first_page_header; List <Page> pages = new List <Page> (); uint index = 0; bool bos = first_header.PageSequenceNumber == 0; if (bos) { pages.Add(new Page(new ByteVectorCollection(packets [0]), first_header)); index++; packets.RemoveAt(0); count--; } int lacing_per_page = 0xfc; if (count > 0) { int total_lacing_bytes = 0; for (int i = 0; i < packets.Count; i++) { total_lacing_bytes += GetLacingValueLength( packets, i); } lacing_per_page = Math.Min(total_lacing_bytes / count + 1, lacing_per_page); } int lacing_bytes_used = 0; ByteVectorCollection page_packets = new ByteVectorCollection(); bool first_packet_continued = false; while (packets.Count > 0) { int packet_bytes = GetLacingValueLength(packets, 0); int remaining = lacing_per_page - lacing_bytes_used; bool whole_packet = packet_bytes <= remaining; if (whole_packet) { page_packets.Add(packets [0]); lacing_bytes_used += packet_bytes; packets.RemoveAt(0); } else { page_packets.Add(packets [0].Mid(0, remaining * 0xff)); packets [0] = packets [0].Mid(remaining * 0xff); lacing_bytes_used += remaining; } if (lacing_bytes_used == lacing_per_page) { pages.Add(new Page(page_packets, new PageHeader(first_header, index, first_packet_continued ? PageFlags.FirstPacketContinued : PageFlags.None))); page_packets = new ByteVectorCollection(); lacing_bytes_used = 0; index++; count--; first_packet_continued = !whole_packet; } } if (page_packets.Count > 0) { pages.Add(new Page(page_packets, new PageHeader( first_header.StreamSerialNumber, index, first_packet_continued ? PageFlags.FirstPacketContinued : PageFlags.None))); index++; count--; } change = -count; return(pages.ToArray()); }
public Page[] Paginate(out int change) { if (pages_read == 0) { change = 0; return(new Page[0]); } int count = pages_read; ByteVectorCollection packets = new ByteVectorCollection(this.packets); PageHeader first_header = (PageHeader)first_page_header; List <Page> pages = new List <Page>(); uint index = 0; bool bos = first_header.PageSequenceNumber == 0; if (bos) { pages.Add(new Page(new ByteVectorCollection(packets[0]), first_header)); index++; packets.RemoveAt(0); count--; } int lacing_per_page = 0xfc; if (count > 0) { int total_lacing_bytes = 0; for (int i = 0; i < packets.Count; i++) { total_lacing_bytes += GetLacingValueLength(packets, i); } lacing_per_page = Math.Min(total_lacing_bytes / count + 1, lacing_per_page); } int lacing_bytes_used = 0; ByteVectorCollection page_packets = new ByteVectorCollection(); bool first_packet_continued = false; while (packets.Count > 0) { int packet_bytes = GetLacingValueLength(packets, 0); int remaining = lacing_per_page - lacing_bytes_used; bool whole_packet = packet_bytes <= remaining; if (whole_packet) { page_packets.Add(packets[0]); lacing_bytes_used += packet_bytes; packets.RemoveAt(0); } else { page_packets.Add(packets[0].Mid(0, remaining * 0xff)); packets[0] = packets[0].Mid(remaining * 0xff); lacing_bytes_used += remaining; } if (lacing_bytes_used == lacing_per_page) { pages.Add(new Page(page_packets, new PageHeader(first_header, index, first_packet_continued?PageFlags.FirstPacketContinued:PageFlags.None))); page_packets = new ByteVectorCollection(); lacing_bytes_used = 0; index++; count--; first_packet_continued = !whole_packet; } } if (page_packets.Count > 0) { pages.Add(new Page(page_packets, new PageHeader(first_header.StreamSerialNumber, index, first_packet_continued?PageFlags.FirstPacketContinued:PageFlags.None))); index++; count--; } change = -count; return(pages.ToArray()); }
private void WritePageGroup(IntCollection page_group) { if (page_group.IsEmpty) return; ByteVectorCollection packets = new ByteVectorCollection(); // If the first page of the group isn'type dirty, append its partial content here. if (!dirtyPages.Contains(((OggPage)this.pages[page_group[0]]).FirstPacketIndex)) packets.Add(((OggPage)this.pages[page_group[0]]).Packets[0]); int previous_packet = -1; int original_size = 0; for (int i = 0; i < page_group.Count; i++) { int page = page_group[i]; uint first_packet = (uint)((OggPage)this.pages[page]).FirstPacketIndex; uint last_packet = first_packet + ((OggPage)this.pages[page]).PacketCount - 1; for (uint j = first_packet; j <= last_packet; j++) { if (i == page_group.Count - 1 && j == last_packet && !dirtyPages.Contains((int)j)) packets.Add(((OggPage)this.pages[page]).Packets[((OggPage)this.pages[page]).Packets.Count - 1]); else if ((int)j != previous_packet) { previous_packet = (int)j; packets.Add(GetPacket(j)); } } original_size += ((OggPage)this.pages[page]).Size; } bool continued = ((OggPage)this.pages[page_group[0]]).Header.FirstPacketContinued; bool completed = ((OggPage)this.pages[page_group[page_group.Count - 1]]).Header.LastPacketCompleted; // TODO: This pagination method isn'type accurate for what'field being done here. // This should account for real possibilities like non-aligned packets and such. OggPage[] pages = OggPage.Paginate(packets, PaginationStrategy.SinglePagePerGroup, streamSerialNumber, page_group[0], continued, completed); ByteVector data = new ByteVector(); foreach (OggPage p in pages) data.Add(p.Render()); // The insertion algorithms could also be improve to queue and prioritize data // on the way out. Currently it requires rewriting the file for every page // group rather than just once; however, for tagging applications there will // generally only be one page group, so it'field not worth the time for the // optimization at the moment. Insert(data, ((OggPage)this.pages[page_group[0]]).FileOffset, original_size); // Update the page index to include the pages we just created and to delete the // old pages. foreach (OggPage p in pages) { int index = p.Header.PageSequenceNumber; this.pages[index] = p; } }
public Page[] Paginate(out int change) { if (this.pages_read == 0) { change = 0; return new Page[0]; } int num = this.pages_read; ByteVectorCollection packets = new ByteVectorCollection(this.packets); PageHeader header = this.first_page_header.Value; List<Page> list = new List<Page>(); uint offset = 0; if (header.PageSequenceNumber == 0) { ByteVector[] vectorArray1 = new ByteVector[] { packets[0] }; list.Add(new Page(new ByteVectorCollection(vectorArray1), header)); offset++; packets.RemoveAt(0); num--; } int num3 = 0xfc; if (num > 0) { int num4 = 0; for (int i = 0; i < packets.Count; i++) { num4 += GetLacingValueLength(packets, i); } num3 = Math.Min((num4 / num) + 1, num3); } int num6 = 0; ByteVectorCollection vectors2 = new ByteVectorCollection(); bool flag2 = false; while (packets.Count > 0) { int lacingValueLength = GetLacingValueLength(packets, 0); int num8 = num3 - num6; bool flag3 = lacingValueLength <= num8; if (flag3) { vectors2.Add(packets[0]); num6 += lacingValueLength; packets.RemoveAt(0); } else { vectors2.Add(packets[0].Mid(0, num8 * 0xff)); packets[0] = packets[0].Mid(num8 * 0xff); num6 += num8; } if (num6 == num3) { list.Add(new Page(vectors2, new PageHeader(header, offset, !flag2 ? PageFlags.None : PageFlags.FirstPacketContinued))); vectors2 = new ByteVectorCollection(); num6 = 0; offset++; num--; flag2 = !flag3; } } if (vectors2.Count > 0) { list.Add(new Page(vectors2, new PageHeader(header.StreamSerialNumber, offset, !flag2 ? PageFlags.None : PageFlags.FirstPacketContinued))); offset++; num--; } change = -num; return list.ToArray(); }